Compare commits

...

88 Commits

Author SHA1 Message Date
Bram Kragten
49086b2a76 2026.1.0 (#159957) 2026-01-07 18:38:10 +01:00
Bram Kragten
1f28fe9933 Bump version to 2026.1.0 2026-01-07 17:46:04 +01:00
Bram Kragten
4465aa264c Update frontend to 20260107.0 (#160434) 2026-01-07 17:45:41 +01:00
Robert Resch
2c1bc96161 Bump deebot-client to 17.0.1 (#160428) 2026-01-07 17:45:40 +01:00
Joost Lekkerkerker
7127159a5b Make Watts depend on the cloud integration (#160424) 2026-01-07 17:45:38 +01:00
Abílio Costa
9f0eb6f077 Support target triggers in automation relation extraction (#160369) 2026-01-07 17:45:37 +01:00
Paul Bottein
da19cc06e3 Fix hvac_mode validation in climate.hvac_mode_changed trigger (#160364) 2026-01-07 17:45:36 +01:00
Bram Kragten
fd92377cf2 Bump version to 2026.1.0b5 2026-01-07 14:53:13 +01:00
Robert Resch
c201938b8b Constraint aiomqtt>=2.5.0 to fix blocking call (#160410)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-07 14:51:49 +01:00
Luke Lashley
b3765204b1 Bump python-roborock to 4.2.1 (#160398) 2026-01-07 14:48:27 +01:00
Luke Lashley
786257e051 Remove q7 total cleaning time for Roborock (#160399) 2026-01-07 14:47:47 +01:00
Allen Porter
9559634151 Update roborock binary sensor tests with snapshots (#159981) 2026-01-07 14:47:41 +01:00
Allen Porter
cf12ed8f08 Improve roborock test accuracy/robustness (#160021) 2026-01-07 14:45:53 +01:00
Michael Hansen
e213f49c75 Bump intents to 2026.1.6 (#160389) 2026-01-07 14:42:00 +01:00
Raphael Hehl
09c7cc113a Bump uiprotect to 8.0.0 (#160384)
Co-authored-by: RaHehl <rahehl@users.noreply.github.com>
2026-01-07 14:41:59 +01:00
dontinelli
e1e7e039a9 Bump solarlog_cli to 0.7.0 (#160382) 2026-01-07 14:41:58 +01:00
Daniel Hjelseth Høyer
05a0f0d23f Bump pyTibber to 0.34.1 (#160380)
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-01-07 14:41:57 +01:00
Artem Draft
d3853019eb Add SSL support in Bravia TV (#160373)
Co-authored-by: Maciej Bieniek <bieniu@users.noreply.github.com>
2026-01-07 14:41:55 +01:00
hanwg
ccbaac55b3 Fix schema validation error in Telegram (#160367) 2026-01-07 14:41:54 +01:00
Xiangxuan Qu
771292ced9 Fix IndexError in Israel Rail sensor when no departures available (#160351)
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-01-07 14:41:53 +01:00
TheJulianJES
5d4262e8b3 Bump ZHA to 0.0.83 (#160342) 2026-01-07 14:41:52 +01:00
Paul Tarjan
d96da9a639 Fix Ring integration log flooding for accounts without subscription (#158012)
Co-authored-by: Robert Resch <robert@resch.dev>
2026-01-07 14:41:51 +01:00
Bram Kragten
288a805d0f Bump version to 2026.1.0b4 2026-01-06 17:56:49 +01:00
Bram Kragten
8e55ceea77 Update frontend to 20251229.1 (#160372) 2026-01-06 17:55:34 +01:00
Artem Draft
14f1d9fbad Bump pybravia to 0.4.1 (#160368) 2026-01-06 17:55:32 +01:00
Bram Kragten
eb6582bc24 Fix number or entity choose schema (#160358) 2026-01-06 17:55:32 +01:00
tronikos
4afe67f33d Bump opower to 0.16.0 (#160348) 2026-01-06 17:55:30 +01:00
Mika
5d7b10f569 Fix missing state class to solaredge (#160336) 2026-01-06 17:55:30 +01:00
Daniel Hjelseth Høyer
340c2e48df Bump pyTibber to 0.34.0 (#160333) 2026-01-06 17:55:29 +01:00
J. Nick Koston
86257b1865 Require service_uuid and service_data_uuid to match hue ble (#160321) 2026-01-06 17:55:27 +01:00
Daniel Hjelseth Høyer
eea1adccfd Fix unit for Tibber sensor (#160319) 2026-01-06 17:55:26 +01:00
Frédéric
242be14f88 Add Resideo X2S Smart Thermostat to Matter fan-only mode list (#160260) 2026-01-06 17:55:25 +01:00
Xidorn Quan
7e013b723d Fix rain count sensors' state class of Ecowitt (#158204) 2026-01-06 17:55:24 +01:00
Bram Kragten
4d55939f53 Bump version to 2026.1.0b3 2026-01-05 16:53:53 +01:00
Bram Kragten
e5e7546d49 Fix humidifier trigger turned on icon (#160297) 2026-01-05 16:52:56 +01:00
Joakim Sørensen
e560795d04 Add connection check before registering cloudhook URL (#160284) 2026-01-05 16:52:55 +01:00
epenet
15b0342bd7 Fix Tuya light color data wrapper (#160280) 2026-01-05 16:52:54 +01:00
Jan-Philipp Benecke
8d05a5f3d4 Bump aiowebdav2 to 0.5.0 (#160233) 2026-01-05 16:52:53 +01:00
Samuel Xiao
358ad29b59 Switchbot Cloud: Fixed Robot Vacuum Cleaner S20 had two device_model name (#160230) 2026-01-05 16:52:52 +01:00
J. Nick Koston
5c4f99b828 Bump aiohttp 3.13.3 (#160206) 2026-01-05 16:52:03 +01:00
Erik Montnemery
b3f123c715 Await writes in shopping_list action handlers (#157420) 2026-01-05 16:51:30 +01:00
J. Nick Koston
85c2351af2 Ensure Brotli >= 1.2.0 (#160229) 2026-01-05 16:45:49 +01:00
Josef Zweck
ec19529c99 Remove referral link from fish_audio (#160193) 2026-01-05 16:40:46 +01:00
Vincent Courcelle
d5ebd02afe Bump python-roborock to 4.2.0 (#160184) 2026-01-05 16:40:45 +01:00
wollew
37d82ab795 bump pyvlx version to 0.2.27 (#160139) 2026-01-05 16:40:44 +01:00
mettolen
5d08481137 Bump pyairobotrest to 0.2.0 (#160125) 2026-01-05 16:40:43 +01:00
Maikel Punie
0861b7541d Bump velbusaio to 2026.1.1 (#160116) 2026-01-05 16:40:42 +01:00
Jan Bouwhuis
abf7078842 Fix reolink brightness scaling (#160106) 2026-01-05 16:40:41 +01:00
Michael Hansen
c4012fae4e Bump intents to 2026.1.1 (#160099) 2026-01-05 16:40:40 +01:00
Maikel Punie
d6082ab6c3 Bump velbusaio to 2026.1.0 (#160087) 2026-01-05 16:40:39 +01:00
Austin Mroczek
77367e415f Bump total_connect_client to 2025.12.2 (#160075) 2026-01-05 16:40:38 +01:00
Miguel Camba
6c006c68c1 Update voluptuous and voluptuous-openapi (#160073) 2026-01-05 16:40:37 +01:00
Pete Sage
026fdeb4ce Improve Sonos wait to unjoin timeout (#160011) 2026-01-05 16:40:36 +01:00
cdnninja
1034218e6e add description to string vesync (#160003) 2026-01-05 16:40:35 +01:00
Willem-Jan van Rootselaar
a21062f502 Add schema validation for set_hot_water_schedule service (#159990) 2026-01-05 16:40:34 +01:00
Maikel Punie
2e157f1bc6 Velbus Exception translations (#159627)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-05 16:40:32 +01:00
Paul Tarjan
a697e63b8c Fix Tesla update showing scheduled updates as installing (#158681) 2026-01-05 16:40:31 +01:00
Ben Wolstencroft
d28d55c7db Add support for health_overview API endpoint to Tractive integration (#157960)
Co-authored-by: Maciej Bieniek <bieniu@users.noreply.github.com>
2026-01-05 16:40:30 +01:00
Brett Adams
8863488286 Handle export options when enrolled to VPP in Teslemetry (#157665) 2026-01-05 16:40:29 +01:00
Daniel Hjelseth Høyer
53cfdef1ac Move Tibber to OAuth (#156690)
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-01-05 16:40:28 +01:00
Franck Nijhof
42ea7ecbd6 Bump version to 2026.1.0b2 2025-12-31 15:34:05 +00:00
tronikos
d58d08c350 Filter out duplicate voices without language code in Google Cloud (#160046) 2025-12-31 15:33:49 +00:00
Paul Tarjan
65a259b9df Fix Hikvision thread safety issue when calling async_write_ha_state (#160027) 2025-12-31 15:33:48 +00:00
Luke Lashley
cbfbfbee13 Don't prefer cache for Roborock device fetching (#160022) 2025-12-31 15:33:47 +00:00
David Knowles
e503b37ddc Use WATER device_class for Hydrawise sensors (#160018) 2025-12-31 15:33:45 +00:00
Simone Chemelli
217eef39f3 Bump aioamazondevices to 11.0.2 (#160016)
Co-authored-by: Franck Nijhof <git@frenck.dev>
2025-12-31 15:33:44 +00:00
Manu
dcdbce9b21 Convert store image URLs to https in Xbox media resolver (#160015) 2025-12-31 15:33:42 +00:00
Erwin Douna
71db8fe185 Bump portainer 1.0.19 (#160014) 2025-12-31 15:33:41 +00:00
Anders Melchiorsen
9b96cb66d5 Fix netgear_lte unloading (#160008) 2025-12-31 15:33:39 +00:00
Anders Melchiorsen
78bccbbbc2 Move async_setup_services to async_setup for netgear_lte (#160007) 2025-12-31 15:33:38 +00:00
Anders Melchiorsen
b0a8f9575c Bump eternalegypt to 0.0.18 (#160006) 2025-12-31 15:33:36 +00:00
Matthias Alphart
61104a9970 Update knx-frontend to 2025.12.30.151231 (#159999) 2025-12-31 15:33:35 +00:00
Franck Nijhof
8d13dbdd0c Bump version to 2026.1.0b1 2025-12-30 09:14:36 +00:00
Erwin Douna
9afb41004e Portainer fix stopped container for stats (#159964) 2025-12-30 09:14:24 +00:00
Luke Lashley
cdd542f6e6 Bump Python-Roborock to 4.1.0 (#159963) 2025-12-30 09:14:22 +00:00
Joost Lekkerkerker
f520686002 Small cleanup in Feedreader (#159962) 2025-12-30 09:14:20 +00:00
J. Nick Koston
e4d09bb615 Bump aioesphomeapi to 43.9.1 (#159960) 2025-12-30 09:14:19 +00:00
Matthias Alphart
10f6ccf6cc Fix KNX translation references (#159959) 2025-12-30 09:14:17 +00:00
Ernst Klamer
d9fa67b16f bump xiaomi-ble to 1.4.1 (#159954) 2025-12-30 09:14:15 +00:00
Joost Lekkerkerker
cf228ae02b Inject session in Switchbot cloud (#159942) 2025-12-30 09:14:14 +00:00
Joost Lekkerkerker
cb4d62ab9a Add integration_type device to ps4 (#159892) 2025-12-30 09:14:12 +00:00
Joost Lekkerkerker
d2f75aec04 Add integration_type hub to poolsense (#159881) 2025-12-30 09:14:11 +00:00
Joost Lekkerkerker
a609fbc07b Add integration_type hub to pooldose (#159880) 2025-12-30 09:14:09 +00:00
Joost Lekkerkerker
1b9c7ae0ac Add integration_type hub to permobil (#159872) 2025-12-30 09:14:07 +00:00
Joost Lekkerkerker
492f2117fb Add integration_type service to nuheat (#159845) 2025-12-30 09:14:06 +00:00
Joost Lekkerkerker
2346f83635 Add integration_type device to netgear (#159816) 2025-12-30 09:14:04 +00:00
Kamil Breguła
8925bfb182 Add translation of exceptions in met (#155765)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-12-30 09:12:18 +00:00
Franck Nijhof
8f2b1f0eff Bump version to 2026.1.0b0 2025-12-29 19:01:17 +00:00
146 changed files with 7398 additions and 1633 deletions

View File

@@ -13,5 +13,5 @@
"iot_class": "local_polling",
"loggers": ["pyairobotrest"],
"quality_scale": "silver",
"requirements": ["pyairobotrest==0.1.0"]
"requirements": ["pyairobotrest==0.2.0"]
}

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["aioamazondevices"],
"quality_scale": "platinum",
"requirements": ["aioamazondevices==10.0.0"]
"requirements": ["aioamazondevices==11.0.2"]
}

View File

@@ -7,7 +7,7 @@ import asyncio
from collections.abc import Callable, Mapping
from dataclasses import dataclass
import logging
from typing import Any, Protocol, cast
from typing import Any, Literal, Protocol, cast
from propcache.api import cached_property
import voluptuous as vol
@@ -16,7 +16,10 @@ from homeassistant.components import labs, websocket_api
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
from homeassistant.components.labs import async_listen as async_labs_listen
from homeassistant.const import (
ATTR_AREA_ID,
ATTR_ENTITY_ID,
ATTR_FLOOR_ID,
ATTR_LABEL_ID,
ATTR_MODE,
ATTR_NAME,
CONF_ACTIONS,
@@ -30,6 +33,7 @@ from homeassistant.const import (
CONF_OPTIONS,
CONF_PATH,
CONF_PLATFORM,
CONF_TARGET,
CONF_TRIGGERS,
CONF_VARIABLES,
CONF_ZONE,
@@ -588,20 +592,32 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
"""Return True if entity is on."""
return self._async_detach_triggers is not None or self._is_enabled
@property
@cached_property
def referenced_labels(self) -> set[str]:
"""Return a set of referenced labels."""
return self.action_script.referenced_labels
referenced = self.action_script.referenced_labels
@property
for conf in self._trigger_config:
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_LABEL_ID))
return referenced
@cached_property
def referenced_floors(self) -> set[str]:
"""Return a set of referenced floors."""
return self.action_script.referenced_floors
referenced = self.action_script.referenced_floors
for conf in self._trigger_config:
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_FLOOR_ID))
return referenced
@cached_property
def referenced_areas(self) -> set[str]:
"""Return a set of referenced areas."""
return self.action_script.referenced_areas
referenced = self.action_script.referenced_areas
for conf in self._trigger_config:
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_AREA_ID))
return referenced
@property
def referenced_blueprint(self) -> str | None:
@@ -1209,6 +1225,9 @@ def _trigger_extract_devices(trigger_conf: dict) -> list[str]:
if trigger_conf[CONF_PLATFORM] == "tag" and CONF_DEVICE_ID in trigger_conf:
return trigger_conf[CONF_DEVICE_ID] # type: ignore[no-any-return]
if target_devices := _get_targets_from_trigger_config(trigger_conf, CONF_DEVICE_ID):
return target_devices
return []
@@ -1239,9 +1258,28 @@ def _trigger_extract_entities(trigger_conf: dict) -> list[str]:
):
return [trigger_conf[CONF_EVENT_DATA][CONF_ENTITY_ID]]
if target_entities := _get_targets_from_trigger_config(
trigger_conf, CONF_ENTITY_ID
):
return target_entities
return []
@callback
def _get_targets_from_trigger_config(
config: dict,
target: Literal["entity_id", "device_id", "area_id", "floor_id", "label_id"],
) -> list[str]:
"""Extract targets from a target config."""
if not (target_conf := config.get(CONF_TARGET)):
return []
if not (targets := target_conf.get(target)):
return []
return [targets] if isinstance(targets, str) else targets
@websocket_api.websocket_command({"type": "automation/config", "entity_id": str})
def websocket_config(
hass: HomeAssistant,

View File

@@ -11,6 +11,7 @@ from homeassistant.const import CONF_HOST, CONF_MAC, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from .const import CONF_USE_SSL
from .coordinator import BraviaTVConfigEntry, BraviaTVCoordinator
PLATFORMS: Final[list[Platform]] = [
@@ -26,11 +27,12 @@ async def async_setup_entry(
"""Set up a config entry."""
host = config_entry.data[CONF_HOST]
mac = config_entry.data[CONF_MAC]
ssl = config_entry.data.get(CONF_USE_SSL, False)
session = async_create_clientsession(
hass, cookie_jar=CookieJar(unsafe=True, quote_cookie=False)
)
client = BraviaClient(host, mac, session=session)
client = BraviaClient(host, mac, session=session, ssl=ssl)
coordinator = BraviaTVCoordinator(
hass=hass,
config_entry=config_entry,

View File

@@ -28,6 +28,7 @@ from .const import (
ATTR_MODEL,
CONF_NICKNAME,
CONF_USE_PSK,
CONF_USE_SSL,
DOMAIN,
NICKNAME_PREFIX,
)
@@ -46,11 +47,12 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN):
def create_client(self) -> None:
"""Create Bravia TV client from config."""
host = self.device_config[CONF_HOST]
ssl = self.device_config[CONF_USE_SSL]
session = async_create_clientsession(
self.hass,
cookie_jar=CookieJar(unsafe=True, quote_cookie=False),
)
self.client = BraviaClient(host=host, session=session)
self.client = BraviaClient(host=host, session=session, ssl=ssl)
async def gen_instance_ids(self) -> tuple[str, str]:
"""Generate client_id and nickname."""
@@ -123,10 +125,10 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN):
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle authorize step."""
self.create_client()
if user_input is not None:
self.device_config[CONF_USE_PSK] = user_input[CONF_USE_PSK]
self.device_config[CONF_USE_SSL] = user_input[CONF_USE_SSL]
self.create_client()
if user_input[CONF_USE_PSK]:
return await self.async_step_psk()
return await self.async_step_pin()
@@ -136,6 +138,7 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN):
data_schema=vol.Schema(
{
vol.Required(CONF_USE_PSK, default=False): bool,
vol.Required(CONF_USE_SSL, default=False): bool,
}
),
)

View File

@@ -12,6 +12,7 @@ ATTR_MODEL: Final = "model"
CONF_NICKNAME: Final = "nickname"
CONF_USE_PSK: Final = "use_psk"
CONF_USE_SSL: Final = "use_ssl"
DOMAIN: Final = "braviatv"
LEGACY_CLIENT_ID: Final = "HomeAssistant"

View File

@@ -7,7 +7,7 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["pybravia"],
"requirements": ["pybravia==0.3.4"],
"requirements": ["pybravia==0.4.1"],
"ssdp": [
{
"manufacturer": "Sony Corporation",

View File

@@ -15,9 +15,10 @@
"step": {
"authorize": {
"data": {
"use_psk": "Use PSK authentication"
"use_psk": "Use PSK authentication",
"use_ssl": "Use SSL connection"
},
"description": "Make sure that «Control remotely» is enabled on your TV, go to: \nSettings -> Network -> Remote device settings -> Control remotely. \n\nThere are two authorization methods: PIN code or PSK (Pre-Shared Key). \nAuthorization via PSK is recommended as more stable.",
"description": "Make sure that «Control remotely» is enabled on your TV. Go to: \nSettings -> Network -> Remote device settings -> Control remotely. \n\nThere are two authorization methods: PIN code or PSK (Pre-Shared Key). \nAuthorization via PSK is recommended, as it is more stable. \n\nUse an SSL connection only if your TV supports this connection type.",
"title": "Authorize Sony Bravia TV"
},
"confirm": {

View File

@@ -7,11 +7,12 @@ import logging
from typing import TYPE_CHECKING
from bsblan import BSBLANError, DaySchedule, DHWSchedule, TimeSlot
import voluptuous as vol
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers import config_validation as cv, device_registry as dr
from .const import DOMAIN
@@ -33,28 +34,27 @@ ATTR_SUNDAY_SLOTS = "sunday_slots"
SERVICE_SET_HOT_WATER_SCHEDULE = "set_hot_water_schedule"
def _parse_time_value(value: time | str) -> time:
"""Parse a time value from either a time object or string.
# Schema for a single time slot
_SLOT_SCHEMA = vol.Schema(
{
vol.Required("start_time"): cv.time,
vol.Required("end_time"): cv.time,
}
)
Raises ServiceValidationError if the format is invalid.
"""
if isinstance(value, time):
return value
if isinstance(value, str):
try:
parts = value.split(":")
return time(int(parts[0]), int(parts[1]))
except (ValueError, IndexError):
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="invalid_time_format",
) from None
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="invalid_time_format",
)
SERVICE_SET_HOT_WATER_SCHEDULE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_DEVICE_ID): cv.string,
vol.Optional(ATTR_MONDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
vol.Optional(ATTR_TUESDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
vol.Optional(ATTR_WEDNESDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
vol.Optional(ATTR_THURSDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
vol.Optional(ATTR_FRIDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
vol.Optional(ATTR_SATURDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
vol.Optional(ATTR_SUNDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
}
)
def _convert_time_slots_to_day_schedule(
@@ -62,8 +62,8 @@ def _convert_time_slots_to_day_schedule(
) -> DaySchedule | None:
"""Convert list of time slot dicts to a DaySchedule object.
Example: [{"start_time": "06:00", "end_time": "08:00"},
{"start_time": "17:00", "end_time": "21:00"}]
Example: [{"start_time": time(6, 0), "end_time": time(8, 0)},
{"start_time": time(17, 0), "end_time": time(21, 0)}]
becomes: DaySchedule with two TimeSlot objects
None returns None (don't modify this day).
@@ -77,31 +77,27 @@ def _convert_time_slots_to_day_schedule(
time_slots = []
for slot in slots:
start = slot.get("start_time")
end = slot.get("end_time")
start_time = slot["start_time"]
end_time = slot["end_time"]
if start and end:
start_time = _parse_time_value(start)
end_time = _parse_time_value(end)
# Validate that end time is after start time
if end_time <= start_time:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="end_time_before_start_time",
translation_placeholders={
"start_time": start_time.strftime("%H:%M"),
"end_time": end_time.strftime("%H:%M"),
},
)
time_slots.append(TimeSlot(start=start_time, end=end_time))
LOGGER.debug(
"Created time slot: %s-%s",
start_time.strftime("%H:%M"),
end_time.strftime("%H:%M"),
# Validate that end time is after start time
if end_time <= start_time:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="end_time_before_start_time",
translation_placeholders={
"start_time": start_time.strftime("%H:%M"),
"end_time": end_time.strftime("%H:%M"),
},
)
time_slots.append(TimeSlot(start=start_time, end=end_time))
LOGGER.debug(
"Created time slot: %s-%s",
start_time.strftime("%H:%M"),
end_time.strftime("%H:%M"),
)
LOGGER.debug("Created DaySchedule with %d slots", len(time_slots))
return DaySchedule(slots=time_slots)
@@ -214,4 +210,5 @@ def async_setup_services(hass: HomeAssistant) -> None:
DOMAIN,
SERVICE_SET_HOT_WATER_SCHEDULE,
set_hot_water_schedule,
schema=SERVICE_SET_HOT_WATER_SCHEDULE_SCHEMA,
)

View File

@@ -33,7 +33,7 @@ HVAC_MODE_CHANGED_TRIGGER_SCHEMA = ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST.extend
{
vol.Required(CONF_OPTIONS): {
vol.Required(CONF_HVAC_MODE): vol.All(
cv.ensure_list, vol.Length(min=1), [HVACMode]
cv.ensure_list, vol.Length(min=1), [vol.Coerce(HVACMode)]
),
},
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["hassil==3.5.0", "home-assistant-intents==2025.12.2"]
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.1.6"]
}

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
"requirements": ["py-sucks==0.9.11", "deebot-client==17.0.0"]
"requirements": ["py-sucks==0.9.11", "deebot-client==17.0.1"]
}

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
import dataclasses
from datetime import datetime
import logging
from typing import Final
from aioecowitt import EcoWittSensor, EcoWittSensorTypes
@@ -39,6 +40,9 @@ from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM
from . import EcowittConfigEntry
from .entity import EcowittEntity
_LOGGER = logging.getLogger(__name__)
_METRIC: Final = (
EcoWittSensorTypes.TEMPERATURE_C,
EcoWittSensorTypes.RAIN_COUNT_MM,
@@ -57,6 +61,40 @@ _IMPERIAL: Final = (
)
_RAIN_COUNT_SENSORS_STATE_CLASS_MAPPING: Final = {
"eventrainin": SensorStateClass.TOTAL_INCREASING,
"hourlyrainin": None,
"totalrainin": SensorStateClass.TOTAL_INCREASING,
"dailyrainin": SensorStateClass.TOTAL_INCREASING,
"weeklyrainin": SensorStateClass.TOTAL_INCREASING,
"monthlyrainin": SensorStateClass.TOTAL_INCREASING,
"yearlyrainin": SensorStateClass.TOTAL_INCREASING,
"last24hrainin": None,
"eventrainmm": SensorStateClass.TOTAL_INCREASING,
"hourlyrainmm": None,
"totalrainmm": SensorStateClass.TOTAL_INCREASING,
"dailyrainmm": SensorStateClass.TOTAL_INCREASING,
"weeklyrainmm": SensorStateClass.TOTAL_INCREASING,
"monthlyrainmm": SensorStateClass.TOTAL_INCREASING,
"yearlyrainmm": SensorStateClass.TOTAL_INCREASING,
"last24hrainmm": None,
"erain_piezo": SensorStateClass.TOTAL_INCREASING,
"hrain_piezo": None,
"drain_piezo": SensorStateClass.TOTAL_INCREASING,
"wrain_piezo": SensorStateClass.TOTAL_INCREASING,
"mrain_piezo": SensorStateClass.TOTAL_INCREASING,
"yrain_piezo": SensorStateClass.TOTAL_INCREASING,
"last24hrain_piezo": None,
"erain_piezomm": SensorStateClass.TOTAL_INCREASING,
"hrain_piezomm": None,
"drain_piezomm": SensorStateClass.TOTAL_INCREASING,
"wrain_piezomm": SensorStateClass.TOTAL_INCREASING,
"mrain_piezomm": SensorStateClass.TOTAL_INCREASING,
"yrain_piezomm": SensorStateClass.TOTAL_INCREASING,
"last24hrain_piezomm": None,
}
ECOWITT_SENSORS_MAPPING: Final = {
EcoWittSensorTypes.HUMIDITY: SensorEntityDescription(
key="HUMIDITY",
@@ -285,15 +323,15 @@ async def async_setup_entry(
name=sensor.name,
)
# Only total rain needs state class for long-term statistics
if sensor.key in (
"totalrainin",
"totalrainmm",
if sensor.stype in (
EcoWittSensorTypes.RAIN_COUNT_INCHES,
EcoWittSensorTypes.RAIN_COUNT_MM,
):
description = dataclasses.replace(
description,
state_class=SensorStateClass.TOTAL_INCREASING,
)
if sensor.key not in _RAIN_COUNT_SENSORS_STATE_CLASS_MAPPING:
_LOGGER.warning("Unknown rain count sensor: %s", sensor.key)
return
state_class = _RAIN_COUNT_SENSORS_STATE_CLASS_MAPPING[sensor.key]
description = dataclasses.replace(description, state_class=state_class)
async_add_entities([EcowittSensorEntity(sensor, description)])

View File

@@ -17,7 +17,7 @@
"mqtt": ["esphome/discover/#"],
"quality_scale": "platinum",
"requirements": [
"aioesphomeapi==43.9.0",
"aioesphomeapi==43.9.1",
"esphome-dashboard-api==1.3.0",
"bleak-esphome==3.4.0"
],

View File

@@ -9,14 +9,12 @@ from homeassistant.util.hass_dict import HassKey
from .const import DOMAIN
from .coordinator import FeedReaderConfigEntry, FeedReaderCoordinator, StoredData
CONF_URLS = "urls"
MY_KEY: HassKey[StoredData] = HassKey(DOMAIN)
FEEDREADER_KEY: HassKey[StoredData] = HassKey(DOMAIN)
async def async_setup_entry(hass: HomeAssistant, entry: FeedReaderConfigEntry) -> bool:
"""Set up Feedreader from a config entry."""
storage = hass.data.setdefault(MY_KEY, StoredData(hass))
storage = hass.data.setdefault(FEEDREADER_KEY, StoredData(hass))
if not storage.is_initialized:
await storage.async_setup()
@@ -42,5 +40,5 @@ async def async_unload_entry(hass: HomeAssistant, entry: FeedReaderConfigEntry)
)
# if this is the last entry, remove the storage
if len(entries) == 1:
hass.data.pop(MY_KEY)
hass.data.pop(FEEDREADER_KEY)
return await hass.config_entries.async_unload_platforms(entry, [Platform.EVENT])

View File

@@ -42,16 +42,15 @@ class FeedReaderEvent(CoordinatorEntity[FeedReaderCoordinator], EventEntity):
_attr_event_types = [EVENT_FEEDREADER]
_attr_name = None
_attr_has_entity_name = True
_attr_translation_key = "latest_feed"
_unrecorded_attributes = frozenset(
{ATTR_CONTENT, ATTR_DESCRIPTION, ATTR_TITLE, ATTR_LINK}
)
coordinator: FeedReaderCoordinator
def __init__(self, coordinator: FeedReaderCoordinator) -> None:
"""Initialize the feedreader event."""
super().__init__(coordinator)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_latest_feed"
self._attr_translation_key = "latest_feed"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, coordinator.config_entry.entry_id)},
name=coordinator.config_entry.title,

View File

@@ -21,12 +21,6 @@
}
}
},
"issues": {
"import_yaml_error_url_error": {
"description": "Configuring the Feedreader using YAML is being removed but there was a connection error when trying to import the YAML configuration for `{url}`.\n\nPlease verify that the URL is reachable and accessible for Home Assistant and restart Home Assistant to try again or remove the Feedreader YAML configuration from your configuration.yaml file and continue to set up the integration manually.",
"title": "The Feedreader YAML configuration import failed"
}
},
"options": {
"step": {
"init": {

View File

@@ -35,6 +35,6 @@ BACKEND_MODELS = ["s1", "speech-1.5", "speech-1.6"]
SORT_BY_OPTIONS = ["task_count", "score", "created_at"]
LATENCY_OPTIONS = ["normal", "balanced"]
SIGNUP_URL = "https://fish.audio/?fpr=homeassistant" # codespell:ignore fpr
SIGNUP_URL = "https://fish.audio/"
BILLING_URL = "https://fish.audio/app/billing/"
API_KEYS_URL = "https://fish.audio/app/api-keys/"

View File

@@ -23,5 +23,5 @@
"winter_mode": {}
},
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20251229.0"]
"requirements": ["home-assistant-frontend==20260107.0"]
}

View File

@@ -48,6 +48,8 @@ async def async_tts_voices(
list_voices_response = await client.list_voices()
for voice in list_voices_response.voices:
language_code = voice.language_codes[0]
if not voice.name.startswith(language_code):
continue
if language_code not in voices:
voices[language_code] = []
voices[language_code].append(voice.name)

View File

@@ -24,7 +24,7 @@ from homeassistant.const import (
CONF_SSL,
CONF_USERNAME,
)
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers import config_validation as cv, issue_registry as ir
from homeassistant.helpers.device_registry import DeviceInfo
@@ -227,7 +227,10 @@ class HikvisionBinarySensor(BinarySensorEntity):
# Register callback with pyhik
self._camera.add_update_callback(self._update_callback, self._callback_id)
@callback
def _update_callback(self, msg: str) -> None:
"""Update the sensor's state when callback is triggered."""
self.async_write_ha_state()
"""Update the sensor's state when callback is triggered.
This is called from pyhik's event stream thread, so we use
schedule_update_ha_state which is thread-safe.
"""
self.schedule_update_ha_state()

View File

@@ -4,7 +4,8 @@
"bluetooth": [
{
"connectable": true,
"service_data_uuid": "0000fe0f-0000-1000-8000-00805f9b34fb"
"service_data_uuid": "0000fe0f-0000-1000-8000-00805f9b34fb",
"service_uuid": "0000fe0f-0000-1000-8000-00805f9b34fb"
}
],
"codeowners": ["@flip-dots"],

View File

@@ -66,7 +66,7 @@
"trigger": "mdi:air-humidifier-off"
},
"turned_on": {
"trigger": "mdi:air-humidifier-on"
"trigger": "mdi:air-humidifier"
}
}
}

View File

@@ -67,21 +67,21 @@ FLOW_CONTROLLER_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = (
HydrawiseSensorEntityDescription(
key="daily_total_water_use",
translation_key="daily_total_water_use",
device_class=SensorDeviceClass.VOLUME,
device_class=SensorDeviceClass.WATER,
suggested_display_precision=1,
value_fn=lambda sensor: _get_water_use(sensor).total_use,
),
HydrawiseSensorEntityDescription(
key="daily_active_water_use",
translation_key="daily_active_water_use",
device_class=SensorDeviceClass.VOLUME,
device_class=SensorDeviceClass.WATER,
suggested_display_precision=1,
value_fn=lambda sensor: _get_water_use(sensor).total_active_use,
),
HydrawiseSensorEntityDescription(
key="daily_inactive_water_use",
translation_key="daily_inactive_water_use",
device_class=SensorDeviceClass.VOLUME,
device_class=SensorDeviceClass.WATER,
suggested_display_precision=1,
value_fn=lambda sensor: _get_water_use(sensor).total_inactive_use,
),
@@ -91,7 +91,7 @@ FLOW_ZONE_SENSORS: tuple[SensorEntityDescription, ...] = (
HydrawiseSensorEntityDescription(
key="daily_active_water_use",
translation_key="daily_active_water_use",
device_class=SensorDeviceClass.VOLUME,
device_class=SensorDeviceClass.WATER,
suggested_display_precision=1,
value_fn=lambda sensor: float(
_get_water_use(sensor).active_use_by_zone_id.get(sensor.zone.id, 0.0)
@@ -204,7 +204,7 @@ class HydrawiseSensor(HydrawiseEntity, SensorEntity):
@property
def native_unit_of_measurement(self) -> str | None:
"""Return the unit_of_measurement of the sensor."""
if self.entity_description.device_class != SensorDeviceClass.VOLUME:
if self.entity_description.device_class != SensorDeviceClass.WATER:
return self.entity_description.native_unit_of_measurement
return (
UnitOfVolume.GALLONS
@@ -217,7 +217,7 @@ class HydrawiseSensor(HydrawiseEntity, SensorEntity):
"""Icon of the entity based on the value."""
if (
self.entity_description.key in FLOW_MEASUREMENT_KEYS
and self.entity_description.device_class == SensorDeviceClass.VOLUME
and self.entity_description.device_class == SensorDeviceClass.WATER
and round(self.state, 2) == 0.0
):
return "mdi:water-outline"

View File

@@ -116,6 +116,8 @@ class IsraelRailEntitySensor(
@property
def native_value(self) -> StateType | datetime:
"""Return the state of the sensor."""
if self.entity_description.index >= len(self.coordinator.data):
return None
return self.entity_description.value_fn(
self.coordinator.data[self.entity_description.index]
)

View File

@@ -13,7 +13,7 @@
"requirements": [
"xknx==3.13.0",
"xknxproject==3.8.2",
"knx-frontend==2025.12.28.215221"
"knx-frontend==2025.12.30.151231"
],
"single_config_entry": true
}

View File

@@ -154,6 +154,27 @@
}
},
"config_panel": {
"dashboard": {
"connection_flow": {
"description": "Reconfigure KNX connection or import a new KNX keyring file",
"title": "Connection settings"
},
"options_flow": {
"description": "Configure integration settings",
"title": "Integration options"
},
"project_upload": {
"description": "Import a KNX project file to help configure group addresses and datapoint types",
"title": "[%key:component::knx::config_panel::dialogs::project_upload::title%]"
}
},
"dialogs": {
"project_upload": {
"description": "Details such as group address names, datapoint types, devices and group objects are extracted from your project file. The ETS project file itself and its optional password are not stored.\n\n`.knxproj` files exported by ETS 4, 5 or 6 are supported.",
"file_upload_label": "ETS project file",
"title": "Import ETS project"
}
},
"dpt": {
"options": {
"5": "Generic 1-byte unsigned integer",
@@ -845,9 +866,9 @@
},
"mode": {
"description": "Select how the entity is displayed in Home Assistant.",
"label": "[%common::config_flow::data::mode%]",
"label": "[%key:common::config_flow::data::mode%]",
"options": {
"password": "[%common::config_flow::data::password%]",
"password": "[%key:common::config_flow::data::password%]",
"text": "[%key:component::text::entity_component::_::state_attributes::mode::state::text%]"
}
}

View File

@@ -80,8 +80,6 @@ async def register_panel(hass: HomeAssistant) -> None:
hass=hass,
frontend_url_path=DOMAIN,
webcomponent_name=knx_panel.webcomponent_name,
sidebar_title=DOMAIN.upper(),
sidebar_icon="mdi:bus-electric",
module_url=f"{URL_BASE}/{knx_panel.entrypoint_js}",
embed_iframe=True,
require_admin=True,

View File

@@ -154,6 +154,7 @@ SUPPORT_FAN_MODE_DEVICES: set[tuple[int, int]] = {
(0x1209, 0x8027),
(0x1209, 0x8028),
(0x1209, 0x8029),
(0x131A, 0x1000),
}
SystemModeEnum = clusters.Thermostat.Enums.SystemModeEnum

View File

@@ -116,8 +116,12 @@ class MetDataUpdateCoordinator(DataUpdateCoordinator[MetWeatherData]):
"""Fetch data from Met."""
try:
return await self.weather.fetch_data()
except Exception as err:
raise UpdateFailed(f"Update failed: {err}") from err
except CannotConnect as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="update_failed",
translation_placeholders={"error": str(err)},
) from err
def track_home(self) -> None:
"""Start tracking changes to HA home setting."""

View File

@@ -19,6 +19,11 @@
}
}
},
"exceptions": {
"update_failed": {
"message": "Update of data from the web site failed: {error}"
}
},
"options": {
"step": {
"init": {

View File

@@ -69,7 +69,7 @@ class RegistrationsView(HomeAssistantView):
webhook_id = secrets.token_hex()
if cloud.async_active_subscription(hass):
if cloud.async_active_subscription(hass) and cloud.async_is_connected(hass):
data[CONF_CLOUDHOOK_URL] = await async_create_cloud_hook(
hass, webhook_id, None
)

View File

@@ -4,6 +4,7 @@
"codeowners": ["@hacf-fr", "@Quentame", "@starkillerOG"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/netgear",
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["pynetgear"],
"requirements": ["pynetgear==0.10.10"],

View File

@@ -51,7 +51,6 @@ ALL_BINARY_SENSORS = [
PLATFORMS = [
Platform.BINARY_SENSOR,
Platform.NOTIFY,
Platform.SENSOR,
]
@@ -61,6 +60,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up Netgear LTE component."""
hass.data[DATA_HASS_CONFIG] = config
async_setup_services(hass)
return True
@@ -96,19 +96,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: NetgearLTEConfigEntry) -
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
async_setup_services(hass)
await discovery.async_load_platform(
hass,
Platform.NOTIFY,
DOMAIN,
{CONF_NAME: entry.title, "modem": modem},
{CONF_NAME: entry.title, "modem": modem, "entry": entry},
hass.data[DATA_HASS_CONFIG],
)
await hass.config_entries.async_forward_entry_setups(
entry, [platform for platform in PLATFORMS if platform != Platform.NOTIFY]
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
@@ -118,7 +114,5 @@ async def async_unload_entry(hass: HomeAssistant, entry: NetgearLTEConfigEntry)
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if not hass.config_entries.async_loaded_entries(DOMAIN):
hass.data.pop(DOMAIN, None)
for service_name in hass.services.async_services()[DOMAIN]:
hass.services.async_remove(DOMAIN, service_name)
return unload_ok

View File

@@ -14,7 +14,7 @@ from homeassistant.const import CONF_HOST, CONF_PASSWORD
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from .const import DEFAULT_HOST, DOMAIN, LOGGER, MANUFACTURER
from .const import DEFAULT_HOST, DOMAIN, MANUFACTURER
class NetgearLTEFlowHandler(ConfigFlow, domain=DOMAIN):
@@ -72,9 +72,6 @@ class NetgearLTEFlowHandler(ConfigFlow, domain=DOMAIN):
info = await modem.information()
except Error as ex:
raise InputValidationError("cannot_connect") from ex
except Exception as ex:
LOGGER.exception("Unexpected exception")
raise InputValidationError("unknown") from ex
await modem.logout()
return info

View File

@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["eternalegypt"],
"requirements": ["eternalegypt==0.0.16"]
"requirements": ["eternalegypt==0.0.18"]
}

View File

@@ -38,6 +38,7 @@ class NetgearNotifyService(BaseNotificationService):
"""Initialize the service."""
self.config = config
self.modem: Modem = discovery_info["modem"]
discovery_info["entry"].async_on_unload(self.async_unregister_services)
async def async_send_message(self, message="", **kwargs):
"""Send a message to a user."""

View File

@@ -4,6 +4,7 @@ import voluptuous as vol
from homeassistant.const import CONF_HOST
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import config_validation as cv
from .const import (
@@ -14,7 +15,6 @@ from .const import (
AUTOCONNECT_MODES,
DOMAIN,
FAILOVER_MODES,
LOGGER,
)
from .coordinator import NetgearLTEConfigEntry
@@ -56,8 +56,11 @@ async def _service_handler(call: ServiceCall) -> None:
break
if not entry or not (modem := entry.runtime_data.modem).token:
LOGGER.error("%s: host %s unavailable", call.service, host)
return
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="config_entry_not_found",
translation_placeholders={"service": call.service},
)
if call.service == SERVICE_DELETE_SMS:
for sms_id in call.data[ATTR_SMS_ID]:

View File

@@ -71,6 +71,11 @@
}
}
},
"exceptions": {
"config_entry_not_found": {
"message": "Failed to perform action \"{service}\". Config entry for target not found"
}
},
"services": {
"connect_lte": {
"description": "Asks the modem to establish the LTE connection.",

View File

@@ -10,6 +10,7 @@
}
],
"documentation": "https://www.home-assistant.io/integrations/nuheat",
"integration_type": "device",
"iot_class": "cloud_polling",
"loggers": ["nuheat"],
"requirements": ["nuheat==1.0.1"]

View File

@@ -9,5 +9,5 @@
"iot_class": "cloud_polling",
"loggers": ["opower"],
"quality_scale": "bronze",
"requirements": ["opower==0.15.9"]
"requirements": ["opower==0.16.0"]
}

View File

@@ -4,6 +4,7 @@
"codeowners": ["@IsakNyberg"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/permobil",
"integration_type": "device",
"iot_class": "cloud_polling",
"requirements": ["mypermobil==0.1.8"]
}

View File

@@ -9,6 +9,7 @@
}
],
"documentation": "https://www.home-assistant.io/integrations/pooldose",
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "silver",
"requirements": ["python-pooldose==0.8.1"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@haemishkyd"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/poolsense",
"integration_type": "device",
"iot_class": "cloud_polling",
"loggers": ["poolsense"],
"requirements": ["poolsense==0.0.8"]

View File

@@ -15,6 +15,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import PortainerConfigEntry
from .const import CONTAINER_STATE_RUNNING
from .coordinator import PortainerContainerData, PortainerCoordinator
from .entity import (
PortainerContainerEntity,
@@ -41,7 +42,7 @@ CONTAINER_SENSORS: tuple[PortainerContainerBinarySensorEntityDescription, ...] =
PortainerContainerBinarySensorEntityDescription(
key="status",
translation_key="status",
state_fn=lambda data: data.container.state == "running",
state_fn=lambda data: data.container.state == CONTAINER_STATE_RUNNING,
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
),

View File

@@ -4,3 +4,5 @@ DOMAIN = "portainer"
DEFAULT_NAME = "Portainer"
ENDPOINT_STATUS_DOWN = 2
CONTAINER_STATE_RUNNING = "running"

View File

@@ -24,7 +24,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, ENDPOINT_STATUS_DOWN
from .const import CONTAINER_STATE_RUNNING, DOMAIN, ENDPOINT_STATUS_DOWN
type PortainerConfigEntry = ConfigEntry[PortainerCoordinator]
@@ -158,10 +158,11 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
),
)
for container in containers
if container.state == CONTAINER_STATE_RUNNING
]
container_stats_gather = await asyncio.gather(
*[task for _, task in container_stats_task],
*[task for _, task in container_stats_task]
)
for (container, _), container_stats in zip(
container_stats_task, container_stats_gather, strict=False

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["pyportainer==1.0.17"]
"requirements": ["pyportainer==1.0.19"]
}

View File

@@ -4,6 +4,7 @@
"codeowners": ["@ktnrg45"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/ps4",
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["pyps4_2ndscreen"],
"requirements": ["pyps4-2ndscreen==1.3.1"]

View File

@@ -19,6 +19,7 @@ from homeassistant.components.light import (
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util import color as color_util
from .entity import (
ReolinkChannelCoordinatorEntity,
@@ -157,16 +158,16 @@ class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity):
@property
def brightness(self) -> int | None:
"""Return the brightness of this light between 0.255."""
"""Return the brightness of this light between 1.255."""
assert self.entity_description.get_brightness_fn is not None
bright_pct = self.entity_description.get_brightness_fn(
self._host.api, self._channel
)
if bright_pct is None:
if not bright_pct:
return None
return round(255 * bright_pct / 100.0)
return color_util.value_to_brightness((1, 100), bright_pct)
@property
def color_temp_kelvin(self) -> int | None:
@@ -189,7 +190,7 @@ class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity):
if (
brightness := kwargs.get(ATTR_BRIGHTNESS)
) is not None and self.entity_description.set_brightness_fn is not None:
brightness_pct = int(brightness / 255.0 * 100)
brightness_pct = round(color_util.brightness_to_value((1, 100), brightness))
await self.entity_description.set_brightness_fn(
self._host.api, self._channel, brightness_pct
)

View File

@@ -128,8 +128,9 @@ class RingCam(RingEntity[RingDoorBell], Camera):
self._device = self._get_coordinator_data().get_video_device(
self._device.device_api_id
)
history_data = self._device.last_history
if history_data:
if history_data and self._device.has_subscription:
self._last_event = history_data[0]
# will call async_update to update the attributes and get the
# video url from the api
@@ -154,8 +155,16 @@ class RingCam(RingEntity[RingDoorBell], Camera):
self, width: int | None = None, height: int | None = None
) -> bytes | None:
"""Return a still image response from the camera."""
if self._video_url is None:
if not self._device.has_subscription:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="no_subscription",
)
return None
key = (width, height)
if not (image := self._images.get(key)) and self._video_url is not None:
if not (image := self._images.get(key)):
image = await ffmpeg.async_get_image(
self.hass,
self._video_url,

View File

@@ -151,6 +151,9 @@
"api_timeout": {
"message": "Timeout communicating with Ring API"
},
"no_subscription": {
"message": "Ring Protect subscription required for snapshots"
},
"sdp_m_line_index_required": {
"message": "Error negotiating stream for {device}"
}

View File

@@ -79,6 +79,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
map_scale=MAP_SCALE,
),
mqtt_session_unauthorized_hook=lambda: entry.async_start_reauth(hass),
prefer_cache=False,
)
except RoborockInvalidCredentials as err:
raise ConfigEntryAuthFailed(

View File

@@ -20,7 +20,7 @@
"loggers": ["roborock"],
"quality_scale": "silver",
"requirements": [
"python-roborock==3.21.0",
"python-roborock==4.2.1",
"vacuum-map-parser-roborock==0.1.4"
]
}

View File

@@ -391,15 +391,6 @@ Q7_B01_SENSOR_DESCRIPTIONS = [
translation_key="mop_life_time_left",
entity_category=EntityCategory.DIAGNOSTIC,
),
RoborockSensorDescriptionB01(
key="total_cleaning_time",
value_fn=lambda data: data.real_clean_time,
device_class=SensorDeviceClass.DURATION,
native_unit_of_measurement=UnitOfTime.MINUTES,
suggested_unit_of_measurement=UnitOfTime.HOURS,
translation_key="total_cleaning_time",
entity_category=EntityCategory.DIAGNOSTIC,
),
]

View File

@@ -325,8 +325,7 @@ class ShoppingData:
)
return self.items
@callback
def async_reorder(
async def async_reorder(
self, item_ids: list[str], context: Context | None = None
) -> None:
"""Reorder items."""
@@ -351,7 +350,7 @@ class ShoppingData:
)
new_items.append(value)
self.items = new_items
self.hass.async_add_executor_job(self.save)
await self.hass.async_add_executor_job(self.save)
self._async_notify()
self.hass.bus.async_fire(
EVENT_SHOPPING_LIST_UPDATED,
@@ -388,7 +387,7 @@ class ShoppingData:
) -> None:
"""Sort items by name."""
self.items = sorted(self.items, key=lambda item: item["name"], reverse=reverse) # type: ignore[arg-type,return-value]
self.hass.async_add_executor_job(self.save)
await self.hass.async_add_executor_job(self.save)
self._async_notify()
self.hass.bus.async_fire(
EVENT_SHOPPING_LIST_UPDATED,
@@ -591,7 +590,8 @@ async def websocket_handle_clear(
vol.Required("item_ids"): [str],
}
)
def websocket_handle_reorder(
@websocket_api.async_response
async def websocket_handle_reorder(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
@@ -599,7 +599,9 @@ def websocket_handle_reorder(
"""Handle reordering shopping_list items."""
msg_id = msg.pop("id")
try:
hass.data[DOMAIN].async_reorder(msg.pop("item_ids"), connection.context(msg))
await hass.data[DOMAIN].async_reorder(
msg.pop("item_ids"), connection.context(msg)
)
except NoMatchingShoppingListItem:
connection.send_error(
msg_id,

View File

@@ -46,7 +46,7 @@ SENSOR_TYPES = [
key="lifetime_energy",
json_key="lifeTimeData",
translation_key="lifetime_energy",
state_class=SensorStateClass.TOTAL,
state_class=SensorStateClass.TOTAL_INCREASING,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
),
@@ -55,6 +55,7 @@ SENSOR_TYPES = [
json_key="lastYearData",
translation_key="energy_this_year",
entity_registry_enabled_default=False,
state_class=SensorStateClass.TOTAL_INCREASING,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
),
@@ -63,6 +64,7 @@ SENSOR_TYPES = [
json_key="lastMonthData",
translation_key="energy_this_month",
entity_registry_enabled_default=False,
state_class=SensorStateClass.TOTAL_INCREASING,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
),
@@ -71,6 +73,7 @@ SENSOR_TYPES = [
json_key="lastDayData",
translation_key="energy_today",
entity_registry_enabled_default=False,
state_class=SensorStateClass.TOTAL_INCREASING,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
),
@@ -123,24 +126,32 @@ SENSOR_TYPES = [
json_key="LOAD",
translation_key="power_consumption",
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.POWER,
),
SolarEdgeSensorEntityDescription(
key="solar_power",
json_key="PV",
translation_key="solar_power",
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.POWER,
),
SolarEdgeSensorEntityDescription(
key="grid_power",
json_key="GRID",
translation_key="grid_power",
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.POWER,
),
SolarEdgeSensorEntityDescription(
key="storage_power",
json_key="STORAGE",
translation_key="storage_power",
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.POWER,
),
SolarEdgeSensorEntityDescription(
key="purchased_energy",
@@ -194,6 +205,7 @@ SENSOR_TYPES = [
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.BATTERY,
),
]

View File

@@ -7,5 +7,5 @@
"iot_class": "local_polling",
"loggers": ["solarlog_cli"],
"quality_scale": "platinum",
"requirements": ["solarlog_cli==0.6.1"]
"requirements": ["solarlog_cli==0.7.0"]
}

View File

@@ -958,6 +958,23 @@ class SonosSpeaker:
# as those "invisible" speakers will bypass the single speaker check
return
# Clear coordinator on speakers that are no longer in this group
old_members = set(self.sonos_group[1:])
new_members = set(sonos_group[1:])
removed_members = old_members - new_members
for removed_speaker in removed_members:
# Only clear if this speaker was coordinated by self and in the same group
if (
removed_speaker.coordinator == self
and removed_speaker.sonos_group is self.sonos_group
):
_LOGGER.debug(
"Zone %s Cleared coordinator [%s] (removed from group)",
removed_speaker.zone_name,
self.zone_name,
)
removed_speaker.clear_coordinator()
self.coordinator = None
self.sonos_group = sonos_group
self.sonos_group_entities = sonos_group_entities
@@ -990,6 +1007,19 @@ class SonosSpeaker:
return _async_handle_group_event(event)
@callback
def clear_coordinator(self) -> None:
"""Clear coordinator from speaker."""
self.coordinator = None
self.sonos_group = [self]
entity_registry = er.async_get(self.hass)
speaker_entity_id = cast(
str,
entity_registry.async_get_entity_id(MP_DOMAIN, DOMAIN, self.uid),
)
self.sonos_group_entities = [speaker_entity_id]
self.async_write_entity_states()
@soco_error()
def join(self, speakers: list[SonosSpeaker]) -> list[SonosSpeaker]:
"""Form a group with other players."""
@@ -1038,7 +1068,6 @@ class SonosSpeaker:
if self.sonos_group == [self]:
return
self.soco.unjoin()
self.coordinator = None
@staticmethod
async def unjoin_multi(

View File

@@ -20,6 +20,7 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_WEBHOOK_ID, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN, ENTRY_TITLE
from .coordinator import SwitchBotCoordinator
@@ -170,6 +171,7 @@ async def make_device_data(
"K20+ Pro",
"Robot Vacuum Cleaner K10+ Pro Combo",
"Robot Vacuum Cleaner S10",
"Robot Vacuum Cleaner S20",
"S20",
"Robot Vacuum Cleaner K11 Plus",
]:
@@ -309,7 +311,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
token = entry.data[CONF_API_TOKEN]
secret = entry.data[CONF_API_KEY]
api = SwitchBotAPI(token=token, secret=secret)
api = SwitchBotAPI(
token=token, secret=secret, session=async_get_clientsession(hass)
)
try:
devices = await api.list_devices()
except SwitchBotAuthenticationError as ex:

View File

@@ -245,6 +245,9 @@ def _async_make_entity(
return SwitchBotCloudVacuumV2(api, device, coordinator)
if device.device_type == "Robot Vacuum Cleaner K10+ Pro Combo":
return SwitchBotCloudVacuumK10PlusProCombo(api, device, coordinator)
if device.device_type in VacuumCleanerV3Commands.get_supported_devices():
if (
device.device_type in VacuumCleanerV3Commands.get_supported_devices()
or device.device_type == "Robot Vacuum Cleaner S20"
):
return SwitchBotCloudVacuumV3(api, device, coordinator)
return SwitchBotCloudVacuum(api, device, coordinator)

View File

@@ -80,10 +80,6 @@ class TelegramNotificationService(BaseNotificationService):
def send_message(self, message="", **kwargs):
"""Send a message to a user."""
service_data = {ATTR_TARGET: kwargs.get(ATTR_TARGET, self._chat_id)}
if ATTR_TITLE in kwargs:
service_data.update({ATTR_TITLE: kwargs.get(ATTR_TITLE)})
if message:
service_data.update({ATTR_MESSAGE: message})
data = kwargs.get(ATTR_DATA)
# Set message tag
@@ -161,6 +157,12 @@ class TelegramNotificationService(BaseNotificationService):
)
# Send message
if ATTR_TITLE in kwargs:
service_data.update({ATTR_TITLE: kwargs.get(ATTR_TITLE)})
if message:
service_data.update({ATTR_MESSAGE: message})
_LOGGER.debug(
"TELEGRAM NOTIFIER calling %s.send_message with %s",
TELEGRAM_BOT_DOMAIN,

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
import time
from typing import Any
from tesla_fleet_api.const import Scope
@@ -24,6 +25,9 @@ SCHEDULED = "scheduled"
PARALLEL_UPDATES = 0
# Show scheduled update as installing if within this many seconds
SCHEDULED_THRESHOLD_SECONDS = 120
async def async_setup_entry(
hass: HomeAssistant,
@@ -69,12 +73,9 @@ class TeslaFleetUpdateEntity(TeslaFleetVehicleEntity, UpdateEntity):
def _async_update_attrs(self) -> None:
"""Update the attributes of the entity."""
# Supported Features
if self.scoped and self._value in (
AVAILABLE,
SCHEDULED,
):
# Only allow install when an update has been fully downloaded
# Supported Features - only show install button if update is available
# but not already scheduled
if self.scoped and self._value == AVAILABLE:
self._attr_supported_features = (
UpdateEntityFeature.PROGRESS | UpdateEntityFeature.INSTALL
)
@@ -87,13 +88,9 @@ class TeslaFleetUpdateEntity(TeslaFleetVehicleEntity, UpdateEntity):
# Remove build from version
self._attr_installed_version = self._attr_installed_version.split(" ")[0]
# Latest Version
if self._value in (
AVAILABLE,
SCHEDULED,
INSTALLING,
DOWNLOADING,
WIFI_WAIT,
# Latest Version - hide update if scheduled far in the future
if self._value in (AVAILABLE, INSTALLING, DOWNLOADING, WIFI_WAIT) or (
self._value == SCHEDULED and self._is_scheduled_soon()
):
self._attr_latest_version = self.coordinator.data[
"vehicle_state_software_update_version"
@@ -101,14 +98,24 @@ class TeslaFleetUpdateEntity(TeslaFleetVehicleEntity, UpdateEntity):
else:
self._attr_latest_version = self._attr_installed_version
# In Progress
if self._value in (
SCHEDULED,
INSTALLING,
):
# In Progress - only show as installing if actually installing or
# scheduled to start within 2 minutes
if self._value == INSTALLING:
self._attr_in_progress = True
if install_perc := self.get("vehicle_state_software_update_install_perc"):
self._attr_update_percentage = install_perc
elif self._value == SCHEDULED and self._is_scheduled_soon():
self._attr_in_progress = True
self._attr_update_percentage = None
else:
self._attr_in_progress = False
self._attr_update_percentage = None
def _is_scheduled_soon(self) -> bool:
"""Check if a scheduled update is within the threshold to start."""
scheduled_time_ms = self.get("vehicle_state_software_update_scheduled_time_ms")
if scheduled_time_ms is None:
return False
# Convert milliseconds to seconds and compare to current time
scheduled_time_sec = scheduled_time_ms / 1000
return scheduled_time_sec - time.time() < SCHEDULED_THRESHOLD_SECONDS

View File

@@ -330,7 +330,9 @@ class TeslemetryOperationSelectEntity(TeslemetryEnergyInfoEntity, SelectEntity):
self.async_write_ha_state()
class TeslemetryExportRuleSelectEntity(TeslemetryEnergyInfoEntity, SelectEntity):
class TeslemetryExportRuleSelectEntity(
TeslemetryEnergyInfoEntity, SelectEntity, RestoreEntity
):
"""Select entity for export rules select entities."""
_attr_options: list[str] = [
@@ -348,9 +350,28 @@ class TeslemetryExportRuleSelectEntity(TeslemetryEnergyInfoEntity, SelectEntity)
self.scoped = Scope.ENERGY_CMDS in scopes
super().__init__(data, "components_customer_preferred_export_rule")
async def async_added_to_hass(self) -> None:
"""Handle entity which will be added."""
await super().async_added_to_hass()
# Restore state if it's not known
if self._attr_current_option is None:
if (state := await self.async_get_last_state()) is not None:
if state.state in self._attr_options:
self._attr_current_option = state.state
def _async_update_attrs(self) -> None:
"""Update the attributes of the entity."""
self._attr_current_option = self.get(self.key, EnergyExportMode.NEVER.value)
if value := self._value:
# Customer selected export option
self._attr_current_option = value
elif self.get("components_non_export_configured") is True:
# In VPP, Export is disabled
self._attr_current_option = EnergyExportMode.NEVER
elif self._attr_current_option == EnergyExportMode.NEVER:
# In VPP, Export is enabled, but our state shows it is disabled
self._attr_current_option = None # Unknown
# In VPP Mode, Export isn't disabled, so use last known state
async def async_select_option(self, option: str) -> None:
"""Change the selected option."""

View File

@@ -1,20 +1,36 @@
"""Support for Tibber."""
from __future__ import annotations
from dataclasses import dataclass, field
import logging
import aiohttp
from aiohttp.client_exceptions import ClientError, ClientResponseError
import tibber
from tibber import data_api as tibber_data_api
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN, EVENT_HOMEASSISTANT_STOP, Platform
from homeassistant.core import Event, HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.config_entry_oauth2_flow import (
ImplementationUnavailableError,
OAuth2Session,
async_get_config_entry_implementation,
)
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import dt as dt_util, ssl as ssl_util
from .const import DATA_HASS_CONFIG, DOMAIN
from .const import (
AUTH_IMPLEMENTATION,
CONF_LEGACY_ACCESS_TOKEN,
DATA_HASS_CONFIG,
DOMAIN,
TibberConfigEntry,
)
from .coordinator import TibberDataAPICoordinator
from .services import async_setup_services
PLATFORMS = [Platform.NOTIFY, Platform.SENSOR]
@@ -24,6 +40,33 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
_LOGGER = logging.getLogger(__name__)
@dataclass
class TibberRuntimeData:
"""Runtime data for Tibber API entries."""
tibber_connection: tibber.Tibber
session: OAuth2Session
data_api_coordinator: TibberDataAPICoordinator | None = field(default=None)
_client: tibber_data_api.TibberDataAPI | None = None
async def async_get_client(
self, hass: HomeAssistant
) -> tibber_data_api.TibberDataAPI:
"""Return an authenticated Tibber Data API client."""
await self.session.async_ensure_token_valid()
token = self.session.token
access_token = token.get(CONF_ACCESS_TOKEN)
if not access_token:
raise ConfigEntryAuthFailed("Access token missing from OAuth session")
if self._client is None:
self._client = tibber_data_api.TibberDataAPI(
access_token,
websession=async_get_clientsession(hass),
)
self._client.set_access_token(access_token)
return self._client
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Tibber component."""
@@ -34,16 +77,23 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: TibberConfigEntry) -> bool:
"""Set up a config entry."""
# Added in 2026.1 to migrate existing users to OAuth2 (Tibber Data API).
# Can be removed after 2026.7
if AUTH_IMPLEMENTATION not in entry.data:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="data_api_reauth_required",
)
tibber_connection = tibber.Tibber(
access_token=entry.data[CONF_ACCESS_TOKEN],
access_token=entry.data[CONF_LEGACY_ACCESS_TOKEN],
websession=async_get_clientsession(hass),
time_zone=dt_util.get_default_time_zone(),
ssl=ssl_util.get_default_context(),
)
hass.data[DOMAIN] = tibber_connection
async def _close(event: Event) -> None:
await tibber_connection.rt_disconnect()
@@ -52,7 +102,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
try:
await tibber_connection.update_info()
except (
TimeoutError,
aiohttp.ClientError,
@@ -65,17 +114,45 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
except tibber.FatalHttpExceptionError:
return False
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
try:
implementation = await async_get_config_entry_implementation(hass, entry)
except ImplementationUnavailableError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="oauth2_implementation_unavailable",
) from err
session = OAuth2Session(hass, entry, implementation)
try:
await session.async_ensure_token_valid()
except ClientResponseError as err:
if 400 <= err.status < 500:
raise ConfigEntryAuthFailed(
"OAuth session is not valid, reauthentication required"
) from err
raise ConfigEntryNotReady from err
except ClientError as err:
raise ConfigEntryNotReady from err
entry.runtime_data = TibberRuntimeData(
tibber_connection=tibber_connection,
session=session,
)
coordinator = TibberDataAPICoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data.data_api_coordinator = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
async def async_unload_entry(
hass: HomeAssistant, config_entry: TibberConfigEntry
) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(
if unload_ok := await hass.config_entries.async_unload_platforms(
config_entry, PLATFORMS
)
if unload_ok:
tibber_connection = hass.data[DOMAIN]
await tibber_connection.rt_disconnect()
):
await config_entry.runtime_data.tibber_connection.rt_disconnect()
return unload_ok

View File

@@ -0,0 +1,15 @@
"""Application credentials platform for Tibber."""
from homeassistant.components.application_credentials import AuthorizationServer
from homeassistant.core import HomeAssistant
AUTHORIZE_URL = "https://thewall.tibber.com/connect/authorize"
TOKEN_URL = "https://thewall.tibber.com/connect/token"
async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer:
"""Return authorization server for Tibber Data API."""
return AuthorizationServer(
authorize_url=AUTHORIZE_URL,
token_url=TOKEN_URL,
)

View File

@@ -2,80 +2,164 @@
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
import aiohttp
import tibber
from tibber import data_api as tibber_data_api
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, ConfigFlowResult
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler
from .const import DOMAIN
from .const import CONF_LEGACY_ACCESS_TOKEN, DATA_API_DEFAULT_SCOPES, DOMAIN
DATA_SCHEMA = vol.Schema({vol.Required(CONF_ACCESS_TOKEN): str})
DATA_SCHEMA = vol.Schema({vol.Required(CONF_LEGACY_ACCESS_TOKEN): str})
ERR_TIMEOUT = "timeout"
ERR_CLIENT = "cannot_connect"
ERR_TOKEN = "invalid_access_token"
TOKEN_URL = "https://developer.tibber.com/settings/access-token"
_LOGGER = logging.getLogger(__name__)
class TibberConfigFlow(ConfigFlow, domain=DOMAIN):
class TibberConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
"""Handle a config flow for Tibber integration."""
VERSION = 1
DOMAIN = DOMAIN
def __init__(self) -> None:
"""Initialize the config flow."""
super().__init__()
self._access_token: str | None = None
self._title = ""
@property
def logger(self) -> logging.Logger:
"""Return the logger."""
return _LOGGER
@property
def extra_authorize_data(self) -> dict[str, Any]:
"""Extra data appended to the authorize URL."""
return {
**super().extra_authorize_data,
"scope": " ".join(DATA_API_DEFAULT_SCOPES),
}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
self._async_abort_entries_match()
if user_input is not None:
access_token = user_input[CONF_ACCESS_TOKEN].replace(" ", "")
tibber_connection = tibber.Tibber(
access_token=access_token,
websession=async_get_clientsession(self.hass),
if user_input is None:
data_schema = self.add_suggested_values_to_schema(
DATA_SCHEMA, {CONF_LEGACY_ACCESS_TOKEN: self._access_token or ""}
)
errors = {}
return self.async_show_form(
step_id=SOURCE_USER,
data_schema=data_schema,
description_placeholders={"url": TOKEN_URL},
errors={},
)
try:
await tibber_connection.update_info()
except TimeoutError:
errors[CONF_ACCESS_TOKEN] = ERR_TIMEOUT
except tibber.InvalidLoginError:
errors[CONF_ACCESS_TOKEN] = ERR_TOKEN
except (
aiohttp.ClientError,
tibber.RetryableHttpExceptionError,
tibber.FatalHttpExceptionError,
):
errors[CONF_ACCESS_TOKEN] = ERR_CLIENT
self._access_token = user_input[CONF_LEGACY_ACCESS_TOKEN].replace(" ", "")
tibber_connection = tibber.Tibber(
access_token=self._access_token,
websession=async_get_clientsession(self.hass),
)
self._title = tibber_connection.name or "Tibber"
if errors:
return self.async_show_form(
step_id="user",
data_schema=DATA_SCHEMA,
description_placeholders={"url": TOKEN_URL},
errors=errors,
)
errors: dict[str, str] = {}
try:
await tibber_connection.update_info()
except TimeoutError:
errors[CONF_LEGACY_ACCESS_TOKEN] = ERR_TIMEOUT
except tibber.InvalidLoginError:
errors[CONF_LEGACY_ACCESS_TOKEN] = ERR_TOKEN
except (
aiohttp.ClientError,
tibber.RetryableHttpExceptionError,
tibber.FatalHttpExceptionError,
):
errors[CONF_LEGACY_ACCESS_TOKEN] = ERR_CLIENT
unique_id = tibber_connection.user_id
await self.async_set_unique_id(unique_id)
if errors:
data_schema = self.add_suggested_values_to_schema(
DATA_SCHEMA, {CONF_LEGACY_ACCESS_TOKEN: self._access_token or ""}
)
return self.async_show_form(
step_id=SOURCE_USER,
data_schema=data_schema,
description_placeholders={"url": TOKEN_URL},
errors=errors,
)
await self.async_set_unique_id(tibber_connection.user_id)
if self.source == SOURCE_REAUTH:
reauth_entry = self._get_reauth_entry()
self._abort_if_unique_id_mismatch(
reason="wrong_account",
description_placeholders={"title": reauth_entry.title},
)
else:
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=tibber_connection.name,
data={CONF_ACCESS_TOKEN: access_token},
return await self.async_step_pick_implementation()
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle a reauth flow."""
reauth_entry = self._get_reauth_entry()
self._access_token = reauth_entry.data.get(CONF_LEGACY_ACCESS_TOKEN)
self._title = reauth_entry.title
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm reauthentication by reusing the user step."""
reauth_entry = self._get_reauth_entry()
self._access_token = reauth_entry.data.get(CONF_LEGACY_ACCESS_TOKEN)
self._title = reauth_entry.title
if user_input is None:
return self.async_show_form(
step_id="reauth_confirm",
)
return await self.async_step_user()
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
"""Finalize the OAuth flow and create the config entry."""
if self._access_token is None:
return self.async_abort(reason="missing_configuration")
data[CONF_LEGACY_ACCESS_TOKEN] = self._access_token
access_token = data[CONF_TOKEN][CONF_ACCESS_TOKEN]
data_api_client = tibber_data_api.TibberDataAPI(
access_token,
websession=async_get_clientsession(self.hass),
)
try:
await data_api_client.get_userinfo()
except (aiohttp.ClientError, TimeoutError):
return self.async_abort(reason="cannot_connect")
if self.source == SOURCE_REAUTH:
reauth_entry = self._get_reauth_entry()
return self.async_update_reload_and_abort(
reauth_entry,
data=data,
title=self._title,
)
return self.async_show_form(
step_id="user",
data_schema=DATA_SCHEMA,
description_placeholders={"url": TOKEN_URL},
errors={},
)
return self.async_create_entry(title=self._title, data=data)

View File

@@ -1,5 +1,34 @@
"""Constants for Tibber integration."""
from __future__ import annotations
from typing import TYPE_CHECKING
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN
if TYPE_CHECKING:
from . import TibberRuntimeData
type TibberConfigEntry = ConfigEntry[TibberRuntimeData]
CONF_LEGACY_ACCESS_TOKEN = CONF_ACCESS_TOKEN
AUTH_IMPLEMENTATION = "auth_implementation"
DATA_HASS_CONFIG = "tibber_hass_config"
DOMAIN = "tibber"
MANUFACTURER = "Tibber"
DATA_API_DEFAULT_SCOPES = [
"openid",
"profile",
"email",
"offline_access",
"data-api-user-read",
"data-api-chargers-read",
"data-api-energy-systems-read",
"data-api-homes-read",
"data-api-thermostats-read",
"data-api-vehicles-read",
"data-api-inverters-read",
]

View File

@@ -4,9 +4,11 @@ from __future__ import annotations
from datetime import timedelta
import logging
from typing import cast
from typing import TYPE_CHECKING, cast
from aiohttp.client_exceptions import ClientError
import tibber
from tibber.data_api import TibberDataAPI, TibberDevice
from homeassistant.components.recorder import get_instance
from homeassistant.components.recorder.models import (
@@ -19,15 +21,18 @@ from homeassistant.components.recorder.statistics import (
get_last_statistics,
statistics_during_period,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import UnitOfEnergy
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt as dt_util
from homeassistant.util.unit_conversion import EnergyConverter
from .const import DOMAIN
if TYPE_CHECKING:
from .const import TibberConfigEntry
FIVE_YEARS = 5 * 365 * 24
_LOGGER = logging.getLogger(__name__)
@@ -36,12 +41,12 @@ _LOGGER = logging.getLogger(__name__)
class TibberDataCoordinator(DataUpdateCoordinator[None]):
"""Handle Tibber data and insert statistics."""
config_entry: ConfigEntry
config_entry: TibberConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: ConfigEntry,
config_entry: TibberConfigEntry,
tibber_connection: tibber.Tibber,
) -> None:
"""Initialize the data handler."""
@@ -187,3 +192,64 @@ class TibberDataCoordinator(DataUpdateCoordinator[None]):
unit_of_measurement=unit,
)
async_add_external_statistics(self.hass, metadata, statistics)
class TibberDataAPICoordinator(DataUpdateCoordinator[dict[str, TibberDevice]]):
"""Fetch and cache Tibber Data API device capabilities."""
config_entry: TibberConfigEntry
def __init__(
self,
hass: HomeAssistant,
entry: TibberConfigEntry,
) -> None:
"""Initialize the coordinator."""
super().__init__(
hass,
_LOGGER,
name=f"{DOMAIN} Data API",
update_interval=timedelta(minutes=1),
config_entry=entry,
)
self._runtime_data = entry.runtime_data
self.sensors_by_device: dict[str, dict[str, tibber.data_api.Sensor]] = {}
def _build_sensor_lookup(self, devices: dict[str, TibberDevice]) -> None:
"""Build sensor lookup dict for efficient access."""
self.sensors_by_device = {
device_id: {sensor.id: sensor for sensor in device.sensors}
for device_id, device in devices.items()
}
def get_sensor(
self, device_id: str, sensor_id: str
) -> tibber.data_api.Sensor | None:
"""Get a sensor by device and sensor ID."""
if device_sensors := self.sensors_by_device.get(device_id):
return device_sensors.get(sensor_id)
return None
async def _async_get_client(self) -> TibberDataAPI:
"""Get the Tibber Data API client with error handling."""
try:
return await self._runtime_data.async_get_client(self.hass)
except ConfigEntryAuthFailed:
raise
except (ClientError, TimeoutError, tibber.UserAgentMissingError) as err:
raise UpdateFailed(
f"Unable to create Tibber Data API client: {err}"
) from err
async def _async_setup(self) -> None:
"""Initial load of Tibber Data API devices."""
client = await self._async_get_client()
devices = await client.get_all_devices()
self._build_sensor_lookup(devices)
async def _async_update_data(self) -> dict[str, TibberDevice]:
"""Fetch the latest device capabilities from the Tibber Data API."""
client = await self._async_get_client()
devices: dict[str, TibberDevice] = await client.update_devices()
self._build_sensor_lookup(devices)
return devices

View File

@@ -4,21 +4,18 @@ from __future__ import annotations
from typing import Any
import tibber
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from .const import DOMAIN
from .const import TibberConfigEntry
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, config_entry: ConfigEntry
hass: HomeAssistant, config_entry: TibberConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
tibber_connection: tibber.Tibber = hass.data[DOMAIN]
return {
runtime = config_entry.runtime_data
result: dict[str, Any] = {
"homes": [
{
"last_data_timestamp": home.last_data_timestamp,
@@ -27,6 +24,24 @@ async def async_get_config_entry_diagnostics(
"last_cons_data_timestamp": home.last_cons_data_timestamp,
"country": home.country,
}
for home in tibber_connection.get_homes(only_active=False)
for home in runtime.tibber_connection.get_homes(only_active=False)
]
}
devices = (
runtime.data_api_coordinator.data
if runtime.data_api_coordinator is not None
else {}
) or {}
result["devices"] = [
{
"id": device.id,
"name": device.name,
"brand": device.brand,
"model": device.model,
}
for device in devices.values()
]
return result

View File

@@ -3,9 +3,9 @@
"name": "Tibber",
"codeowners": ["@danielhiversen"],
"config_flow": true,
"dependencies": ["recorder"],
"dependencies": ["application_credentials", "recorder"],
"documentation": "https://www.home-assistant.io/integrations/tibber",
"iot_class": "cloud_polling",
"loggers": ["tibber"],
"requirements": ["pyTibber==0.32.2"]
"requirements": ["pyTibber==0.34.1"]
}

View File

@@ -2,28 +2,25 @@
from __future__ import annotations
from tibber import Tibber
from homeassistant.components.notify import (
ATTR_TITLE_DEFAULT,
NotifyEntity,
NotifyEntityFeature,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import DOMAIN
from .const import DOMAIN, TibberConfigEntry
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
entry: TibberConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Tibber notification entity."""
async_add_entities([TibberNotificationEntity(entry.entry_id)])
async_add_entities([TibberNotificationEntity(entry)])
class TibberNotificationEntity(NotifyEntity):
@@ -33,13 +30,14 @@ class TibberNotificationEntity(NotifyEntity):
_attr_name = DOMAIN
_attr_icon = "mdi:message-flash"
def __init__(self, unique_id: str) -> None:
def __init__(self, entry: TibberConfigEntry) -> None:
"""Initialize Tibber notify entity."""
self._attr_unique_id = unique_id
self._attr_unique_id = entry.entry_id
self._entry = entry
async def async_send_message(self, message: str, title: str | None = None) -> None:
"""Send a message to Tibber devices."""
tibber_connection: Tibber = self.hass.data[DOMAIN]
tibber_connection = self._entry.runtime_data.tibber_connection
try:
await tibber_connection.send_notification(
title or ATTR_TITLE_DEFAULT, message

View File

@@ -10,7 +10,8 @@ from random import randrange
from typing import Any
import aiohttp
import tibber
from tibber import FatalHttpExceptionError, RetryableHttpExceptionError, TibberHome
from tibber.data_api import TibberDevice
from homeassistant.components.sensor import (
SensorDeviceClass,
@@ -27,6 +28,7 @@ from homeassistant.const import (
UnitOfElectricCurrent,
UnitOfElectricPotential,
UnitOfEnergy,
UnitOfLength,
UnitOfPower,
)
from homeassistant.core import Event, HomeAssistant, callback
@@ -41,8 +43,8 @@ from homeassistant.helpers.update_coordinator import (
)
from homeassistant.util import Throttle, dt as dt_util
from .const import DOMAIN, MANUFACTURER
from .coordinator import TibberDataCoordinator
from .const import DOMAIN, MANUFACTURER, TibberConfigEntry
from .coordinator import TibberDataAPICoordinator, TibberDataCoordinator
_LOGGER = logging.getLogger(__name__)
@@ -260,14 +262,65 @@ SENSORS: tuple[SensorEntityDescription, ...] = (
)
DATA_API_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="storage.stateOfCharge",
translation_key="storage_state_of_charge",
device_class=SensorDeviceClass.BATTERY,
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="storage.targetStateOfCharge",
translation_key="storage_target_state_of_charge",
device_class=SensorDeviceClass.BATTERY,
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="range.remaining",
translation_key="range_remaining",
device_class=SensorDeviceClass.DISTANCE,
native_unit_of_measurement=UnitOfLength.METERS,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=1,
),
SensorEntityDescription(
key="charging.current.max",
translation_key="charging_current_max",
device_class=SensorDeviceClass.CURRENT,
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="charging.current.offlineFallback",
translation_key="charging_current_offline_fallback",
device_class=SensorDeviceClass.CURRENT,
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
state_class=SensorStateClass.MEASUREMENT,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
entry: TibberConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Tibber sensor."""
tibber_connection = hass.data[DOMAIN]
_setup_data_api_sensors(entry, async_add_entities)
await _async_setup_graphql_sensors(hass, entry, async_add_entities)
async def _async_setup_graphql_sensors(
hass: HomeAssistant,
entry: TibberConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Tibber sensor."""
tibber_connection = entry.runtime_data.tibber_connection
entity_registry = er.async_get(hass)
device_registry = dr.async_get(hass)
@@ -280,7 +333,11 @@ async def async_setup_entry(
except TimeoutError as err:
_LOGGER.error("Timeout connecting to Tibber home: %s ", err)
raise PlatformNotReady from err
except (tibber.RetryableHttpExceptionError, aiohttp.ClientError) as err:
except (
RetryableHttpExceptionError,
FatalHttpExceptionError,
aiohttp.ClientError,
) as err:
_LOGGER.error("Error connecting to Tibber home: %s ", err)
raise PlatformNotReady from err
@@ -325,7 +382,67 @@ async def async_setup_entry(
device_entry.id, new_identifiers={(DOMAIN, home.home_id)}
)
async_add_entities(entities, True)
async_add_entities(entities)
def _setup_data_api_sensors(
entry: TibberConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up sensors backed by the Tibber Data API."""
coordinator = entry.runtime_data.data_api_coordinator
if coordinator is None:
return
entities: list[TibberDataAPISensor] = []
api_sensors = {sensor.key: sensor for sensor in DATA_API_SENSORS}
for device in coordinator.data.values():
for sensor in device.sensors:
description: SensorEntityDescription | None = api_sensors.get(sensor.id)
if description is None:
_LOGGER.debug(
"Sensor %s not found in DATA_API_SENSORS, skipping", sensor
)
continue
entities.append(TibberDataAPISensor(coordinator, device, description))
async_add_entities(entities)
class TibberDataAPISensor(CoordinatorEntity[TibberDataAPICoordinator], SensorEntity):
"""Representation of a Tibber Data API capability sensor."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: TibberDataAPICoordinator,
device: TibberDevice,
entity_description: SensorEntityDescription,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator)
self._device_id: str = device.id
self.entity_description = entity_description
self._attr_translation_key = entity_description.translation_key
self._attr_unique_id = f"{device.external_id}_{self.entity_description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, device.external_id)},
name=device.name,
manufacturer=device.brand,
model=device.model,
)
@property
def native_value(self) -> StateType:
"""Return the value reported by the device."""
sensors = self.coordinator.sensors_by_device.get(self._device_id, {})
sensor = sensors.get(self.entity_description.key)
return sensor.value if sensor else None
class TibberSensor(SensorEntity):
@@ -333,9 +450,7 @@ class TibberSensor(SensorEntity):
_attr_has_entity_name = True
def __init__(
self, *args: Any, tibber_home: tibber.TibberHome, **kwargs: Any
) -> None:
def __init__(self, *args: Any, tibber_home: TibberHome, **kwargs: Any) -> None:
"""Initialize the sensor."""
super().__init__(*args, **kwargs)
self._tibber_home = tibber_home
@@ -366,7 +481,7 @@ class TibberSensorElPrice(TibberSensor):
_attr_state_class = SensorStateClass.MEASUREMENT
_attr_translation_key = "electricity_price"
def __init__(self, tibber_home: tibber.TibberHome) -> None:
def __init__(self, tibber_home: TibberHome) -> None:
"""Initialize the sensor."""
super().__init__(tibber_home=tibber_home)
self._last_updated: datetime.datetime | None = None
@@ -443,7 +558,7 @@ class TibberDataSensor(TibberSensor, CoordinatorEntity[TibberDataCoordinator]):
def __init__(
self,
tibber_home: tibber.TibberHome,
tibber_home: TibberHome,
coordinator: TibberDataCoordinator,
entity_description: SensorEntityDescription,
) -> None:
@@ -470,7 +585,7 @@ class TibberSensorRT(TibberSensor, CoordinatorEntity["TibberRtDataCoordinator"])
def __init__(
self,
tibber_home: tibber.TibberHome,
tibber_home: TibberHome,
description: SensorEntityDescription,
initial_state: float,
coordinator: TibberRtDataCoordinator,
@@ -532,7 +647,7 @@ class TibberRtEntityCreator:
def __init__(
self,
async_add_entities: AddConfigEntryEntitiesCallback,
tibber_home: tibber.TibberHome,
tibber_home: TibberHome,
entity_registry: er.EntityRegistry,
) -> None:
"""Initialize the data handler."""
@@ -618,7 +733,7 @@ class TibberRtDataCoordinator(DataUpdateCoordinator): # pylint: disable=hass-en
hass: HomeAssistant,
config_entry: ConfigEntry,
add_sensor_callback: Callable[[TibberRtDataCoordinator, Any], None],
tibber_home: tibber.TibberHome,
tibber_home: TibberHome,
) -> None:
"""Initialize the data handler."""
self._add_sensor_callback = add_sensor_callback

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
import datetime as dt
from datetime import datetime
from typing import Any, Final
from typing import TYPE_CHECKING, Any, Final
import voluptuous as vol
@@ -20,6 +20,9 @@ from homeassistant.util import dt as dt_util
from .const import DOMAIN
if TYPE_CHECKING:
from .const import TibberConfigEntry
PRICE_SERVICE_NAME = "get_prices"
ATTR_START: Final = "start"
ATTR_END: Final = "end"
@@ -33,7 +36,13 @@ SERVICE_SCHEMA: Final = vol.Schema(
async def __get_prices(call: ServiceCall) -> ServiceResponse:
tibber_connection = call.hass.data[DOMAIN]
entries: list[TibberConfigEntry] = call.hass.config_entries.async_entries(DOMAIN)
if not entries:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="no_config_entry",
)
tibber_connection = entries[0].runtime_data.tibber_connection
start = __get_date(call.data.get(ATTR_START), "start")
end = __get_date(call.data.get(ATTR_END), "end")
@@ -57,7 +66,7 @@ async def __get_prices(call: ServiceCall) -> ServiceResponse:
selected_data = [
price
for price in price_data
if start <= dt.datetime.fromisoformat(price["start_time"]) < end
if start <= dt.datetime.fromisoformat(str(price["start_time"])) < end
]
tibber_prices[home_nickname] = selected_data

View File

@@ -1,7 +1,11 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
"missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"wrong_account": "The connected account does not match {title}. Sign in with the same Tibber account and try again."
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
@@ -9,6 +13,10 @@
"timeout": "[%key:common::config_flow::error::timeout_connect%]"
},
"step": {
"reauth_confirm": {
"description": "Reconnect your Tibber account to refresh access.",
"title": "[%key:common::config_flow::title::reauth%]"
},
"user": {
"data": {
"access_token": "[%key:common::config_flow::data::access_token%]"
@@ -40,6 +48,12 @@
"average_power": {
"name": "Average power"
},
"charging_current_max": {
"name": "Maximum allowed charge current"
},
"charging_current_offline_fallback": {
"name": "Fallback current if charger goes offline"
},
"current_l1": {
"name": "Current L1"
},
@@ -88,9 +102,18 @@
"power_production": {
"name": "Power production"
},
"range_remaining": {
"name": "Estimated remaining driving range"
},
"signal_strength": {
"name": "Signal strength"
},
"storage_state_of_charge": {
"name": "State of charge"
},
"storage_target_state_of_charge": {
"name": "Target state of charge"
},
"voltage_phase1": {
"name": "Voltage phase1"
},
@@ -103,9 +126,18 @@
}
},
"exceptions": {
"data_api_reauth_required": {
"message": "Reconnect Tibber so Home Assistant can enable the new Tibber Data API features."
},
"invalid_date": {
"message": "Invalid datetime provided {date}"
},
"no_config_entry": {
"message": "No Tibber integration configured"
},
"oauth2_implementation_unavailable": {
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
},
"send_message_timeout": {
"message": "Timeout sending message with Tibber"
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/totalconnect",
"iot_class": "cloud_polling",
"loggers": ["total_connect_client"],
"requirements": ["total-connect-client==2025.5"]
"requirements": ["total-connect-client==2025.12.2"]
}

View File

@@ -39,6 +39,7 @@ from .const import (
SERVER_UNAVAILABLE,
SWITCH_KEY_MAP,
TRACKER_HARDWARE_STATUS_UPDATED,
TRACKER_HEALTH_OVERVIEW_UPDATED,
TRACKER_POSITION_UPDATED,
TRACKER_SWITCH_STATUS_UPDATED,
TRACKER_WELLNESS_STATUS_UPDATED,
@@ -64,6 +65,7 @@ class Trackables:
tracker_details: dict
hw_info: dict
pos_report: dict
health_overview: dict
@dataclass(slots=True)
@@ -114,6 +116,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: TractiveConfigEntry) ->
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
# Send initial health overview data to sensors after platforms are set up
for item in filtered_trackables:
if item.health_overview:
tractive.send_health_overview_update(item.health_overview)
async def cancel_listen_task(_: Event) -> None:
await tractive.unsubscribe()
@@ -144,9 +151,13 @@ async def _generate_trackables(
return None
tracker = client.tracker(trackable["device_id"])
trackable_pet = client.trackable_object(trackable["_id"])
tracker_details, hw_info, pos_report = await asyncio.gather(
tracker.details(), tracker.hw_info(), tracker.pos_report()
tracker_details, hw_info, pos_report, health_overview = await asyncio.gather(
tracker.details(),
tracker.hw_info(),
tracker.pos_report(),
trackable_pet.health_overview(),
)
if not tracker_details.get("_id"):
@@ -154,7 +165,9 @@ async def _generate_trackables(
f"Tractive API returns incomplete data for tracker {trackable['device_id']}",
)
return Trackables(tracker, trackable, tracker_details, hw_info, pos_report)
return Trackables(
tracker, trackable, tracker_details, hw_info, pos_report, health_overview
)
async def async_unload_entry(hass: HomeAssistant, entry: TractiveConfigEntry) -> bool:
@@ -226,6 +239,9 @@ class TractiveClient:
if server_was_unavailable:
_LOGGER.debug("Tractive is back online")
server_was_unavailable = False
if event["message"] == "health_overview":
self.send_health_overview_update(event)
continue
if event["message"] == "wellness_overview":
self._send_wellness_update(event)
continue
@@ -316,6 +332,27 @@ class TractiveClient:
TRACKER_WELLNESS_STATUS_UPDATED, event["pet_id"], payload
)
def send_health_overview_update(self, event: dict[str, Any]) -> None:
"""Handle health_overview events from Tractive API."""
# The health_overview response can be at root level or wrapped in 'content'
# Handle both structures for compatibility
data = event.get("content", event)
activity = data.get("activity", {})
sleep = data.get("sleep", {})
payload = {
ATTR_DAILY_GOAL: activity.get("minutesGoal"),
ATTR_MINUTES_ACTIVE: activity.get("minutesActive"),
ATTR_MINUTES_DAY_SLEEP: sleep.get("minutesDaySleep"),
ATTR_MINUTES_NIGHT_SLEEP: sleep.get("minutesNightSleep"),
# Calm minutes can be used as rest indicator
ATTR_MINUTES_REST: sleep.get("minutesCalm"),
}
self._dispatch_tracker_event(
TRACKER_HEALTH_OVERVIEW_UPDATED, data["petId"], payload
)
def _send_position_update(self, event: dict[str, Any]) -> None:
payload = {
"latitude": event["position"]["latlong"][0],

View File

@@ -28,6 +28,7 @@ TRACKER_HARDWARE_STATUS_UPDATED = f"{DOMAIN}_tracker_hardware_status_updated"
TRACKER_POSITION_UPDATED = f"{DOMAIN}_tracker_position_updated"
TRACKER_SWITCH_STATUS_UPDATED = f"{DOMAIN}_tracker_switch_updated"
TRACKER_WELLNESS_STATUS_UPDATED = f"{DOMAIN}_tracker_wellness_updated"
TRACKER_HEALTH_OVERVIEW_UPDATED = f"{DOMAIN}_tracker_health_overview_updated"
SERVER_UNAVAILABLE = f"{DOMAIN}_server_unavailable"

View File

@@ -35,6 +35,7 @@ from .const import (
ATTR_SLEEP_LABEL,
ATTR_TRACKER_STATE,
TRACKER_HARDWARE_STATUS_UPDATED,
TRACKER_HEALTH_OVERVIEW_UPDATED,
TRACKER_WELLNESS_STATUS_UPDATED,
)
from .entity import TractiveEntity
@@ -115,14 +116,14 @@ SENSOR_TYPES: tuple[TractiveSensorEntityDescription, ...] = (
key=ATTR_MINUTES_ACTIVE,
translation_key="activity_time",
native_unit_of_measurement=UnitOfTime.MINUTES,
signal_prefix=TRACKER_WELLNESS_STATUS_UPDATED,
signal_prefix=TRACKER_HEALTH_OVERVIEW_UPDATED,
state_class=SensorStateClass.TOTAL,
),
TractiveSensorEntityDescription(
key=ATTR_MINUTES_REST,
translation_key="rest_time",
native_unit_of_measurement=UnitOfTime.MINUTES,
signal_prefix=TRACKER_WELLNESS_STATUS_UPDATED,
signal_prefix=TRACKER_HEALTH_OVERVIEW_UPDATED,
state_class=SensorStateClass.TOTAL,
),
TractiveSensorEntityDescription(
@@ -136,20 +137,20 @@ SENSOR_TYPES: tuple[TractiveSensorEntityDescription, ...] = (
key=ATTR_DAILY_GOAL,
translation_key="daily_goal",
native_unit_of_measurement=UnitOfTime.MINUTES,
signal_prefix=TRACKER_WELLNESS_STATUS_UPDATED,
signal_prefix=TRACKER_HEALTH_OVERVIEW_UPDATED,
),
TractiveSensorEntityDescription(
key=ATTR_MINUTES_DAY_SLEEP,
translation_key="minutes_day_sleep",
native_unit_of_measurement=UnitOfTime.MINUTES,
signal_prefix=TRACKER_WELLNESS_STATUS_UPDATED,
signal_prefix=TRACKER_HEALTH_OVERVIEW_UPDATED,
state_class=SensorStateClass.TOTAL,
),
TractiveSensorEntityDescription(
key=ATTR_MINUTES_NIGHT_SLEEP,
translation_key="minutes_night_sleep",
native_unit_of_measurement=UnitOfTime.MINUTES,
signal_prefix=TRACKER_WELLNESS_STATUS_UPDATED,
signal_prefix=TRACKER_HEALTH_OVERVIEW_UPDATED,
state_class=SensorStateClass.TOTAL,
),
TractiveSensorEntityDescription(

View File

@@ -186,14 +186,14 @@ class _ColorDataWrapper(DPCodeJsonWrapper):
)
def _convert_value_to_raw_value(
self, device: CustomerDevice, value: tuple[tuple[float, float], float]
self, device: CustomerDevice, value: tuple[float, float, float]
) -> Any:
"""Convert a Home Assistant color/brightness pair back to a raw device value."""
color, brightness = value
"""Convert a Home Assistant tuple (H, S, V) back to a raw device value."""
hue, saturation, brightness = value
return json.dumps(
{
"h": round(self.h_type.remap_value_from(color[0])),
"s": round(self.s_type.remap_value_from(color[1])),
"h": round(self.h_type.remap_value_from(hue)),
"s": round(self.s_type.remap_value_from(saturation)),
"v": round(self.v_type.remap_value_from(brightness)),
}
)

View File

@@ -41,7 +41,7 @@
"iot_class": "local_push",
"loggers": ["uiprotect", "unifi_discovery"],
"quality_scale": "platinum",
"requirements": ["uiprotect==7.33.3", "unifi-discovery==1.2.0"],
"requirements": ["uiprotect==8.0.0", "unifi-discovery==1.2.0"],
"ssdp": [
{
"manufacturer": "Ubiquiti Networks",

View File

@@ -105,7 +105,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: VelbusConfigEntry) -> bo
try:
await controller.connect()
except VelbusConnectionFailed as error:
raise ConfigEntryNotReady("Cannot connect to Velbus") from error
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="connection_failed",
) from error
task = hass.async_create_task(velbus_scan_task(controller, hass, entry.entry_id))
entry.runtime_data = VelbusData(controller=controller, scan_task=task)

View File

@@ -65,7 +65,7 @@ class VelbusClimate(VelbusEntity, ClimateEntity):
)
@property
def current_temperature(self) -> int | None:
def current_temperature(self) -> float | None:
"""Return the current temperature."""
return self._channel.get_state()

View File

@@ -66,6 +66,7 @@ class VelbusEntity(Entity):
self._channel.remove_on_status_update(self._on_update)
async def _on_update(self) -> None:
"""Handle status updates from the channel."""
self.async_write_ha_state()
@@ -80,8 +81,13 @@ def api_call[_T: VelbusEntity, **_P](
try:
await func(self, *args, **kwargs)
except OSError as exc:
entity_name = self.name if isinstance(self.name, str) else "Unknown"
raise HomeAssistantError(
f"Could not execute {func.__name__} service for {self.name}"
translation_domain=DOMAIN,
translation_key="api_call_failed",
translation_placeholders={
"entity": entity_name,
},
) from exc
return cmd_wrapper

View File

@@ -14,7 +14,7 @@
"velbus-protocol"
],
"quality_scale": "bronze",
"requirements": ["velbus-aio==2025.12.0"],
"requirements": ["velbus-aio==2026.1.1"],
"usb": [
{
"pid": "0B1B",

View File

@@ -56,7 +56,7 @@ rules:
entity-device-class: todo
entity-disabled-by-default: done
entity-translations: todo
exception-translations: todo
exception-translations: done
icon-translations: todo
reconfiguration-flow: todo
repair-issues:

View File

@@ -57,8 +57,14 @@
}
},
"exceptions": {
"api_call_failed": {
"message": "Action execute for {entity} failed."
},
"clear_cache_failed": {
"message": "Could not cleat the Velbus cache: {error}"
"message": "Could not clear the Velbus cache: {error}"
},
"connection_failed": {
"message": "Could not connect to Velbus."
},
"integration_not_found": {
"message": "Integration \"{target}\" not found in registry."

View File

@@ -13,5 +13,5 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["pyvlx"],
"requirements": ["pyvlx==0.2.26"]
"requirements": ["pyvlx==0.2.27"]
}

View File

@@ -5,9 +5,7 @@ rules:
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow:
status: todo
comment: Missing data descriptions
config-flow: done
dependency-transparency: done
docs-actions: done
docs-high-level-description: done

View File

@@ -15,6 +15,10 @@
"password": "[%key:common::config_flow::data::password%]",
"username": "[%key:common::config_flow::data::email%]"
},
"data_description": {
"password": "[%key:component::vesync::config::step::user::data_description::password%]",
"username": "[%key:component::vesync::config::step::user::data_description::username%]"
},
"description": "The VeSync integration needs to re-authenticate your account",
"title": "[%key:common::config_flow::title::reauth%]"
},
@@ -23,6 +27,11 @@
"password": "[%key:common::config_flow::data::password%]",
"username": "[%key:common::config_flow::data::email%]"
},
"data_description": {
"password": "Password associated with your VeSync account",
"username": "Email address associated with your VeSync account"
},
"description": "Enter the account used in the vesync app. 2FA is not supported and must be disabled.",
"title": "Enter username and password"
}
}

View File

@@ -3,7 +3,7 @@
"name": "Watts Vision +",
"codeowners": ["@theobld-ww", "@devender-verma-ww", "@ssi-spyro"],
"config_flow": true,
"dependencies": ["application_credentials"],
"dependencies": ["application_credentials", "cloud"],
"documentation": "https://www.home-assistant.io/integrations/watts",
"iot_class": "cloud_polling",
"quality_scale": "bronze",

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["aiowebdav2"],
"quality_scale": "bronze",
"requirements": ["aiowebdav2==0.4.6"]
"requirements": ["aiowebdav2==0.5.0"]
}

View File

@@ -209,7 +209,7 @@ class XboxSource(MediaSource):
if images is not None:
try:
return PlayMedia(
images[int(identifier.media_id)].url,
to_https(images[int(identifier.media_id)].url),
MIME_TYPE_MAP[ATTR_SCREENSHOTS],
)
except (ValueError, IndexError):

View File

@@ -25,5 +25,5 @@
"documentation": "https://www.home-assistant.io/integrations/xiaomi_ble",
"integration_type": "device",
"iot_class": "local_push",
"requirements": ["xiaomi-ble==1.2.0"]
"requirements": ["xiaomi-ble==1.4.1"]
}

View File

@@ -23,7 +23,7 @@
"universal_silabs_flasher",
"serialx"
],
"requirements": ["zha==0.0.82", "serialx==0.5.0"],
"requirements": ["zha==0.0.83", "serialx==0.5.0"],
"usb": [
{
"description": "*2652*",

View File

@@ -17,7 +17,7 @@ if TYPE_CHECKING:
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2026
MINOR_VERSION: Final = 1
PATCH_VERSION: Final = "0.dev0"
PATCH_VERSION: Final = "0"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2)

View File

@@ -39,6 +39,7 @@ APPLICATION_CREDENTIALS = [
"spotify",
"tesla_fleet",
"teslemetry",
"tibber",
"twitch",
"volvo",
"watts",

View File

@@ -323,6 +323,7 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [
"connectable": True,
"domain": "hue_ble",
"service_data_uuid": "0000fe0f-0000-1000-8000-00805f9b34fb",
"service_uuid": "0000fe0f-0000-1000-8000-00805f9b34fb",
},
{
"connectable": True,

View File

@@ -4584,7 +4584,7 @@
},
"nuheat": {
"name": "NuHeat",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "cloud_polling"
},
@@ -5014,7 +5014,7 @@
},
"permobil": {
"name": "MyPermobil",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "cloud_polling"
},
@@ -5152,13 +5152,13 @@
},
"pooldose": {
"name": "SEKO PoolDose",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_polling"
},
"poolsense": {
"name": "PoolSense",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "cloud_polling"
},

View File

@@ -537,7 +537,7 @@ def _validate_range[_T: dict[str, Any]](
_NUMBER_OR_ENTITY_CHOOSE_SCHEMA = vol.Schema(
{
vol.Required("chosen_selector"): vol.In(["number", "entity"]),
vol.Required("active_choice"): vol.In(["number", "entity"]),
vol.Optional("entity"): cv.entity_id,
vol.Optional("number"): vol.Coerce(float),
}
@@ -548,7 +548,7 @@ def _validate_number_or_entity(value: dict | float | str) -> float | str:
"""Validate number or entity selector result."""
if isinstance(value, dict):
_NUMBER_OR_ENTITY_CHOOSE_SCHEMA(value)
return value[value["chosen_selector"]] # type: ignore[no-any-return]
return value[value["active_choice"]] # type: ignore[no-any-return]
return value

Some files were not shown because too many files have changed in this diff Show More