forked from home-assistant/core
Compare commits
136 Commits
2025.3.0b1
...
2025.3.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4e89948b5c | ||
|
|
9f95383201 | ||
|
|
7e452521c8 | ||
|
|
991de6f1d0 | ||
|
|
be32e3fe8f | ||
|
|
d6eb61e9ec | ||
|
|
e74fe69d65 | ||
|
|
208406123e | ||
|
|
8bcd135f3d | ||
|
|
e7ea0e435e | ||
|
|
b15b680cfe | ||
|
|
5e26d98bdf | ||
|
|
9f94ee280a | ||
|
|
efa98539fa | ||
|
|
113cd4bfcc | ||
|
|
ccbaf76e44 | ||
|
|
5d9d93d3a1 | ||
|
|
c2c5274aac | ||
|
|
89756394c9 | ||
|
|
352aa88e79 | ||
|
|
714962bd7a | ||
|
|
fb4c50b5dc | ||
|
|
b4794b2029 | ||
|
|
3a8c8accfe | ||
|
|
844adfc590 | ||
|
|
a279e23fb5 | ||
|
|
af9bbd0585 | ||
|
|
1304194f09 | ||
|
|
e909417a3f | ||
|
|
02706c116d | ||
|
|
3af6b5cb4c | ||
|
|
35c1bb1ec5 | ||
|
|
97cc3984c5 | ||
|
|
98e317dd55 | ||
|
|
ed088aa72f | ||
|
|
51162320cb | ||
|
|
b88eab8ba3 | ||
|
|
6c080ee650 | ||
|
|
8056b0df2b | ||
|
|
3f94b7a61c | ||
|
|
1484e46317 | ||
|
|
2812c8a993 | ||
|
|
5043e2ad10 | ||
|
|
2c2fd76270 | ||
|
|
7001f8daaf | ||
|
|
b41fc932c5 | ||
|
|
0872243297 | ||
|
|
bba889975a | ||
|
|
01e8ca6495 | ||
|
|
7d82375f81 | ||
|
|
47033e587b | ||
|
|
e73b08b269 | ||
|
|
a195a9107b | ||
|
|
185949cc18 | ||
|
|
c129f27c95 | ||
|
|
6a5a66e2f9 | ||
|
|
db63d9fcbf | ||
|
|
5b3d798eca | ||
|
|
a0dde2a7d6 | ||
|
|
1bdc33d52d | ||
|
|
f1d332da5a | ||
|
|
304c13261a | ||
|
|
c58cbfd6f4 | ||
|
|
b890d3e15a | ||
|
|
2c9b8b6835 | ||
|
|
73cc1f51ca | ||
|
|
dca77e8232 | ||
|
|
03cb177e7c | ||
|
|
ad04b53615 | ||
|
|
46bcb307f6 | ||
|
|
b816625028 | ||
|
|
0940fc7806 | ||
|
|
50aefc3653 | ||
|
|
c0dc83cbc0 | ||
|
|
8382663be4 | ||
|
|
7e1309d874 | ||
|
|
1d0cba1a43 | ||
|
|
7d9a6ceb6b | ||
|
|
6abdb28a03 | ||
|
|
3690e03951 | ||
|
|
4fe4d14f16 | ||
|
|
74e8ffa555 | ||
|
|
c257b228f1 | ||
|
|
6ff0f67d03 | ||
|
|
8fdff9ca37 | ||
|
|
9055dff9bd | ||
|
|
e766d681b5 | ||
|
|
511e57d0b3 | ||
|
|
74be49d00d | ||
|
|
684c3aac6b | ||
|
|
a718b6ebff | ||
|
|
f17274d417 | ||
|
|
1530139a61 | ||
|
|
f56d65b2ec | ||
|
|
21277a81d3 | ||
|
|
e1ce5b8c69 | ||
|
|
0323a9c4e6 | ||
|
|
c7d89398a0 | ||
|
|
8cc587d3a7 | ||
|
|
5ad156767a | ||
|
|
f54b3f4de2 | ||
|
|
6f0c62dc9d | ||
|
|
dce8bca103 | ||
|
|
22af8af132 | ||
|
|
8a62b882bf | ||
|
|
708f22fe6f | ||
|
|
a4e71e2055 | ||
|
|
61a3cc37e0 | ||
|
|
a0668e5a5b | ||
|
|
b4b7142b55 | ||
|
|
108b71d33c | ||
|
|
2636a47333 | ||
|
|
17116fcd6c | ||
|
|
17c16144d1 | ||
|
|
178d509d56 | ||
|
|
09c129de40 | ||
|
|
07128ba063 | ||
|
|
a786ff53ff | ||
|
|
d2e19c829d | ||
|
|
94b342f26a | ||
|
|
9e3e6b3f43 | ||
|
|
4300900322 | ||
|
|
342e04974d | ||
|
|
fdb4c0a81f | ||
|
|
6de878ffe4 | ||
|
|
c63aaec09e | ||
|
|
d8bf47c101 | ||
|
|
736ff8828d | ||
|
|
b501999a4c | ||
|
|
3985f1c6c8 | ||
|
|
46ec3987a8 | ||
|
|
df4e5a54e3 | ||
|
|
d8a259044f | ||
|
|
0891669aee | ||
|
|
83c0351338 | ||
|
|
c5e5fe555d |
40
.github/workflows/wheels.yml
vendored
40
.github/workflows/wheels.yml
vendored
@@ -218,15 +218,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Split requirements all
|
||||
run: |
|
||||
# We split requirements all into multiple files.
|
||||
# This is to prevent the build from running out of memory when
|
||||
# resolving packages on 32-bits systems (like armhf, armv7).
|
||||
|
||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||
|
||||
- name: Build wheels (part 1)
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
@@ -238,32 +230,4 @@ jobs:
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtaa"
|
||||
|
||||
- name: Build wheels (part 2)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtab"
|
||||
|
||||
- name: Build wheels (part 3)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtac"
|
||||
requirements: "requirements_all.txt"
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant.components.notify import (
|
||||
)
|
||||
from homeassistant.const import STATE_IDLE, STATE_OFF, STATE_ON
|
||||
from homeassistant.core import Event, EventStateChangedData, HassJob, HomeAssistant
|
||||
from homeassistant.exceptions import ServiceNotFound
|
||||
from homeassistant.exceptions import ServiceNotFound, ServiceValidationError
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_point_in_time,
|
||||
@@ -195,7 +195,8 @@ class AlertEntity(Entity):
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Async Acknowledge alert."""
|
||||
LOGGER.debug("Acknowledged Alert: %s", self._attr_name)
|
||||
if not self._can_ack:
|
||||
raise ServiceValidationError("This alert cannot be acknowledged")
|
||||
self._ack = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
1
homeassistant/components/apollo_automation/__init__.py
Normal file
1
homeassistant/components/apollo_automation/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Virtual integration: Apollo Automation."""
|
||||
6
homeassistant/components/apollo_automation/manifest.json
Normal file
6
homeassistant/components/apollo_automation/manifest.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "apollo_automation",
|
||||
"name": "Apollo Automation",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "esphome"
|
||||
}
|
||||
@@ -14,6 +14,7 @@ from itertools import chain
|
||||
import json
|
||||
from pathlib import Path, PurePath
|
||||
import shutil
|
||||
import sys
|
||||
import tarfile
|
||||
import time
|
||||
from typing import IO, TYPE_CHECKING, Any, Protocol, TypedDict, cast
|
||||
@@ -308,6 +309,12 @@ class DecryptOnDowloadNotSupported(BackupManagerError):
|
||||
_message = "On-the-fly decryption is not supported for this backup."
|
||||
|
||||
|
||||
class BackupManagerExceptionGroup(BackupManagerError, ExceptionGroup):
|
||||
"""Raised when multiple exceptions occur."""
|
||||
|
||||
error_code = "multiple_errors"
|
||||
|
||||
|
||||
class BackupManager:
|
||||
"""Define the format that backup managers can have."""
|
||||
|
||||
@@ -1605,10 +1612,24 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
)
|
||||
finally:
|
||||
# Inform integrations the backup is done
|
||||
# If there's an unhandled exception, we keep it so we can rethrow it in case
|
||||
# the post backup actions also fail.
|
||||
unhandled_exc = sys.exception()
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
except Exception as err:
|
||||
if not unhandled_exc:
|
||||
raise
|
||||
# If there's an unhandled exception, we wrap both that and the exception
|
||||
# from the post backup actions in an ExceptionGroup so the caller is
|
||||
# aware of both exceptions.
|
||||
raise BackupManagerExceptionGroup(
|
||||
f"Multiple errors when creating backup: {unhandled_exc}, {err}",
|
||||
[unhandled_exc, err],
|
||||
) from None
|
||||
|
||||
def _mkdir_and_generate_backup_contents(
|
||||
self,
|
||||
|
||||
@@ -153,6 +153,27 @@ def _has_min_duration(
|
||||
return validate
|
||||
|
||||
|
||||
def _has_positive_interval(
|
||||
start_key: str, end_key: str, duration_key: str
|
||||
) -> Callable[[dict[str, Any]], dict[str, Any]]:
|
||||
"""Verify that the time span between start and end is greater than zero."""
|
||||
|
||||
def validate(obj: dict[str, Any]) -> dict[str, Any]:
|
||||
if (duration := obj.get(duration_key)) is not None:
|
||||
if duration <= datetime.timedelta(seconds=0):
|
||||
raise vol.Invalid(f"Expected positive duration ({duration})")
|
||||
return obj
|
||||
|
||||
if (start := obj.get(start_key)) and (end := obj.get(end_key)):
|
||||
if start >= end:
|
||||
raise vol.Invalid(
|
||||
f"Expected end time to be after start time ({start}, {end})"
|
||||
)
|
||||
return obj
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
def _has_same_type(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
|
||||
"""Verify that all values are of the same type."""
|
||||
|
||||
@@ -281,6 +302,7 @@ SERVICE_GET_EVENTS_SCHEMA: Final = vol.All(
|
||||
),
|
||||
}
|
||||
),
|
||||
_has_positive_interval(EVENT_START_DATETIME, EVENT_END_DATETIME, EVENT_DURATION),
|
||||
)
|
||||
|
||||
|
||||
@@ -870,6 +892,7 @@ async def async_get_events_service(
|
||||
end = start + service_call.data[EVENT_DURATION]
|
||||
else:
|
||||
end = service_call.data[EVENT_END_DATETIME]
|
||||
|
||||
calendar_event_list = await calendar.async_get_events(
|
||||
calendar.hass, dt_util.as_local(start), dt_util.as_local(end)
|
||||
)
|
||||
|
||||
@@ -68,7 +68,6 @@ from .const import ( # noqa: F401
|
||||
FAN_ON,
|
||||
FAN_TOP,
|
||||
HVAC_MODES,
|
||||
INTENT_GET_TEMPERATURE,
|
||||
INTENT_SET_TEMPERATURE,
|
||||
PRESET_ACTIVITY,
|
||||
PRESET_AWAY,
|
||||
|
||||
@@ -126,7 +126,6 @@ DEFAULT_MAX_HUMIDITY = 99
|
||||
|
||||
DOMAIN = "climate"
|
||||
|
||||
INTENT_GET_TEMPERATURE = "HassClimateGetTemperature"
|
||||
INTENT_SET_TEMPERATURE = "HassClimateSetTemperature"
|
||||
|
||||
SERVICE_SET_AUX_HEAT = "set_aux_heat"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Intents for the client integration."""
|
||||
"""Intents for the climate integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -11,7 +11,6 @@ from homeassistant.helpers import config_validation as cv, intent
|
||||
from . import (
|
||||
ATTR_TEMPERATURE,
|
||||
DOMAIN,
|
||||
INTENT_GET_TEMPERATURE,
|
||||
INTENT_SET_TEMPERATURE,
|
||||
SERVICE_SET_TEMPERATURE,
|
||||
ClimateEntityFeature,
|
||||
@@ -20,49 +19,9 @@ from . import (
|
||||
|
||||
async def async_setup_intents(hass: HomeAssistant) -> None:
|
||||
"""Set up the climate intents."""
|
||||
intent.async_register(hass, GetTemperatureIntent())
|
||||
intent.async_register(hass, SetTemperatureIntent())
|
||||
|
||||
|
||||
class GetTemperatureIntent(intent.IntentHandler):
|
||||
"""Handle GetTemperature intents."""
|
||||
|
||||
intent_type = INTENT_GET_TEMPERATURE
|
||||
description = "Gets the current temperature of a climate device or entity"
|
||||
slot_schema = {
|
||||
vol.Optional("area"): intent.non_empty_string,
|
||||
vol.Optional("name"): intent.non_empty_string,
|
||||
}
|
||||
platforms = {DOMAIN}
|
||||
|
||||
async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse:
|
||||
"""Handle the intent."""
|
||||
hass = intent_obj.hass
|
||||
slots = self.async_validate_slots(intent_obj.slots)
|
||||
|
||||
name: str | None = None
|
||||
if "name" in slots:
|
||||
name = slots["name"]["value"]
|
||||
|
||||
area: str | None = None
|
||||
if "area" in slots:
|
||||
area = slots["area"]["value"]
|
||||
|
||||
match_constraints = intent.MatchTargetsConstraints(
|
||||
name=name, area_name=area, domains=[DOMAIN], assistant=intent_obj.assistant
|
||||
)
|
||||
match_result = intent.async_match_targets(hass, match_constraints)
|
||||
if not match_result.is_match:
|
||||
raise intent.MatchFailedError(
|
||||
result=match_result, constraints=match_constraints
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.response_type = intent.IntentResponseType.QUERY_ANSWER
|
||||
response.async_set_states(matched_states=match_result.states)
|
||||
return response
|
||||
|
||||
|
||||
class SetTemperatureIntent(intent.IntentHandler):
|
||||
"""Handle SetTemperature intents."""
|
||||
|
||||
|
||||
@@ -49,7 +49,11 @@ def async_get_chat_log(
|
||||
raise RuntimeError(
|
||||
"Cannot attach chat log delta listener unless initial caller"
|
||||
)
|
||||
if user_input is not None:
|
||||
if user_input is not None and (
|
||||
(content := chat_log.content[-1]).role != "user"
|
||||
# MyPy doesn't understand that content is a UserContent here
|
||||
or content.content != user_input.text # type: ignore[union-attr]
|
||||
):
|
||||
chat_log.async_add_user_content(UserContent(content=user_input.text))
|
||||
|
||||
yield chat_log
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.2.26"]
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.3.5"]
|
||||
}
|
||||
|
||||
@@ -24,7 +24,14 @@ from homeassistant.const import (
|
||||
STATE_UNKNOWN,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import Event, EventStateChangedData, HomeAssistant, callback
|
||||
from homeassistant.core import (
|
||||
Event,
|
||||
EventStateChangedData,
|
||||
EventStateReportedData,
|
||||
HomeAssistant,
|
||||
State,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_entity
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -32,7 +39,10 @@ from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.event import async_track_state_change_event
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_state_change_event,
|
||||
async_track_state_report_event,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import (
|
||||
@@ -200,13 +210,33 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
_LOGGER.warning("Could not restore last state: %s", err)
|
||||
|
||||
@callback
|
||||
def calc_derivative(event: Event[EventStateChangedData]) -> None:
|
||||
def on_state_reported(event: Event[EventStateReportedData]) -> None:
|
||||
"""Handle constant sensor state."""
|
||||
if self._attr_native_value == Decimal(0):
|
||||
# If the derivative is zero, and the source sensor hasn't
|
||||
# changed state, then we know it will still be zero.
|
||||
return
|
||||
new_state = event.data["new_state"]
|
||||
if new_state is not None:
|
||||
calc_derivative(
|
||||
new_state, new_state.state, event.data["old_last_reported"]
|
||||
)
|
||||
|
||||
@callback
|
||||
def on_state_changed(event: Event[EventStateChangedData]) -> None:
|
||||
"""Handle changed sensor state."""
|
||||
new_state = event.data["new_state"]
|
||||
old_state = event.data["old_state"]
|
||||
if new_state is not None and old_state is not None:
|
||||
calc_derivative(new_state, old_state.state, old_state.last_reported)
|
||||
|
||||
def calc_derivative(
|
||||
new_state: State, old_value: str, old_last_reported: datetime
|
||||
) -> None:
|
||||
"""Handle the sensor state changes."""
|
||||
if (
|
||||
(old_state := event.data["old_state"]) is None
|
||||
or old_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE)
|
||||
or (new_state := event.data["new_state"]) is None
|
||||
or new_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE)
|
||||
if old_value in (STATE_UNKNOWN, STATE_UNAVAILABLE) or new_state.state in (
|
||||
STATE_UNKNOWN,
|
||||
STATE_UNAVAILABLE,
|
||||
):
|
||||
return
|
||||
|
||||
@@ -220,15 +250,15 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
self._state_list = [
|
||||
(time_start, time_end, state)
|
||||
for time_start, time_end, state in self._state_list
|
||||
if (new_state.last_updated - time_end).total_seconds()
|
||||
if (new_state.last_reported - time_end).total_seconds()
|
||||
< self._time_window
|
||||
]
|
||||
|
||||
try:
|
||||
elapsed_time = (
|
||||
new_state.last_updated - old_state.last_updated
|
||||
new_state.last_reported - old_last_reported
|
||||
).total_seconds()
|
||||
delta_value = Decimal(new_state.state) - Decimal(old_state.state)
|
||||
delta_value = Decimal(new_state.state) - Decimal(old_value)
|
||||
new_derivative = (
|
||||
delta_value
|
||||
/ Decimal(elapsed_time)
|
||||
@@ -240,7 +270,7 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
_LOGGER.warning("While calculating derivative: %s", err)
|
||||
except DecimalException as err:
|
||||
_LOGGER.warning(
|
||||
"Invalid state (%s > %s): %s", old_state.state, new_state.state, err
|
||||
"Invalid state (%s > %s): %s", old_value, new_state.state, err
|
||||
)
|
||||
except AssertionError as err:
|
||||
_LOGGER.error("Could not calculate derivative: %s", err)
|
||||
@@ -257,7 +287,7 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
|
||||
# add latest derivative to the window list
|
||||
self._state_list.append(
|
||||
(old_state.last_updated, new_state.last_updated, new_derivative)
|
||||
(old_last_reported, new_state.last_reported, new_derivative)
|
||||
)
|
||||
|
||||
def calculate_weight(
|
||||
@@ -277,13 +307,19 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
else:
|
||||
derivative = Decimal("0.00")
|
||||
for start, end, value in self._state_list:
|
||||
weight = calculate_weight(start, end, new_state.last_updated)
|
||||
weight = calculate_weight(start, end, new_state.last_reported)
|
||||
derivative = derivative + (value * Decimal(weight))
|
||||
self._attr_native_value = round(derivative, self._round_digits)
|
||||
self.async_write_ha_state()
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_change_event(
|
||||
self.hass, self._sensor_source_id, calc_derivative
|
||||
self.hass, self._sensor_source_id, on_state_changed
|
||||
)
|
||||
)
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_report_event(
|
||||
self.hass, self._sensor_source_id, on_state_reported
|
||||
)
|
||||
)
|
||||
|
||||
@@ -8,6 +8,7 @@ from devolo_plc_api.device_api import (
|
||||
WifiGuestAccessGet,
|
||||
)
|
||||
from devolo_plc_api.plcnet_api import DataRate, LogicalNetwork
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.const import ATTR_CONNECTIONS
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
@@ -43,7 +44,7 @@ class DevoloEntity(Entity):
|
||||
self.entry = entry
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=f"http://{self.device.ip}",
|
||||
configuration_url=URL.build(scheme="http", host=self.device.ip),
|
||||
identifiers={(DOMAIN, str(self.device.serial_number))},
|
||||
manufacturer="devolo",
|
||||
model=self.device.product,
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.2.0"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.3.1"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["webhook"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecowitt",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aioecowitt==2024.2.1"]
|
||||
"requirements": ["aioecowitt==2025.3.1"]
|
||||
}
|
||||
|
||||
@@ -105,6 +105,7 @@ class ElkArea(ElkAttachedEntity, AlarmControlPanelEntity, RestoreEntity):
|
||||
AlarmControlPanelEntityFeature.ARM_HOME
|
||||
| AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
| AlarmControlPanelEntityFeature.ARM_NIGHT
|
||||
| AlarmControlPanelEntityFeature.ARM_VACATION
|
||||
)
|
||||
_element: Area
|
||||
|
||||
@@ -204,7 +205,7 @@ class ElkArea(ElkAttachedEntity, AlarmControlPanelEntity, RestoreEntity):
|
||||
ArmedStatus.ARMED_STAY_INSTANT: AlarmControlPanelState.ARMED_HOME,
|
||||
ArmedStatus.ARMED_TO_NIGHT: AlarmControlPanelState.ARMED_NIGHT,
|
||||
ArmedStatus.ARMED_TO_NIGHT_INSTANT: AlarmControlPanelState.ARMED_NIGHT,
|
||||
ArmedStatus.ARMED_TO_VACATION: AlarmControlPanelState.ARMED_AWAY,
|
||||
ArmedStatus.ARMED_TO_VACATION: AlarmControlPanelState.ARMED_VACATION,
|
||||
}
|
||||
|
||||
if self._element.alarm_state is None:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["sense_energy"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["sense-energy==0.13.5"]
|
||||
"requirements": ["sense-energy==0.13.6"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env-canada==0.7.2"]
|
||||
"requirements": ["env-canada==0.8.0"]
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
from math import isfinite
|
||||
from typing import Any, cast
|
||||
|
||||
from aioesphomeapi import (
|
||||
@@ -238,9 +239,13 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
@esphome_state_property
|
||||
def current_humidity(self) -> int | None:
|
||||
"""Return the current humidity."""
|
||||
if not self._static_info.supports_current_humidity:
|
||||
if (
|
||||
not self._static_info.supports_current_humidity
|
||||
or (val := self._state.current_humidity) is None
|
||||
or not isfinite(val)
|
||||
):
|
||||
return None
|
||||
return round(self._state.current_humidity)
|
||||
return round(val)
|
||||
|
||||
@property
|
||||
@esphome_float_state_property
|
||||
|
||||
@@ -13,11 +13,13 @@ DEFAULT_ALLOW_SERVICE_CALLS = True
|
||||
DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS = False
|
||||
|
||||
|
||||
STABLE_BLE_VERSION_STR = "2025.2.1"
|
||||
STABLE_BLE_VERSION_STR = "2025.2.2"
|
||||
STABLE_BLE_VERSION = AwesomeVersion(STABLE_BLE_VERSION_STR)
|
||||
PROJECT_URLS = {
|
||||
"esphome.bluetooth-proxy": "https://esphome.github.io/bluetooth-proxies/",
|
||||
}
|
||||
DEFAULT_URL = f"https://esphome.io/changelog/{STABLE_BLE_VERSION_STR}.html"
|
||||
# ESPHome always uses .0 for the changelog URL
|
||||
STABLE_BLE_URL_VERSION = f"{STABLE_BLE_VERSION.major}.{STABLE_BLE_VERSION.minor}.0"
|
||||
DEFAULT_URL = f"https://esphome.io/changelog/{STABLE_BLE_URL_VERSION}.html"
|
||||
|
||||
DATA_FFMPEG_PROXY = f"{DOMAIN}.ffmpeg_proxy"
|
||||
|
||||
@@ -11,6 +11,7 @@ from typing import Any
|
||||
import evohomeasync as ec1
|
||||
import evohomeasync2 as ec2
|
||||
from evohomeasync2.const import (
|
||||
SZ_DHW,
|
||||
SZ_GATEWAY_ID,
|
||||
SZ_GATEWAY_INFO,
|
||||
SZ_GATEWAYS,
|
||||
@@ -19,8 +20,9 @@ from evohomeasync2.const import (
|
||||
SZ_TEMPERATURE_CONTROL_SYSTEMS,
|
||||
SZ_TIME_ZONE,
|
||||
SZ_USE_DAYLIGHT_SAVE_SWITCHING,
|
||||
SZ_ZONES,
|
||||
)
|
||||
from evohomeasync2.schemas.typedefs import EvoLocStatusResponseT
|
||||
from evohomeasync2.schemas.typedefs import EvoLocStatusResponseT, EvoTcsConfigResponseT
|
||||
|
||||
from homeassistant.const import CONF_SCAN_INTERVAL
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -113,17 +115,19 @@ class EvoDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
SZ_USE_DAYLIGHT_SAVE_SWITCHING
|
||||
],
|
||||
}
|
||||
tcs_info: EvoTcsConfigResponseT = self.tcs.config # type: ignore[assignment]
|
||||
tcs_info[SZ_ZONES] = [zone.config for zone in self.tcs.zones]
|
||||
if self.tcs.hotwater:
|
||||
tcs_info[SZ_DHW] = self.tcs.hotwater.config
|
||||
gwy_info = {
|
||||
SZ_GATEWAY_ID: self.loc.gateways[0].id,
|
||||
SZ_TEMPERATURE_CONTROL_SYSTEMS: [
|
||||
self.loc.gateways[0].systems[0].config
|
||||
],
|
||||
SZ_TEMPERATURE_CONTROL_SYSTEMS: [tcs_info],
|
||||
}
|
||||
config = {
|
||||
SZ_LOCATION_INFO: loc_info,
|
||||
SZ_GATEWAYS: [{SZ_GATEWAY_INFO: gwy_info}],
|
||||
}
|
||||
self.logger.debug("Config = %s", config)
|
||||
self.logger.debug("Config = %s", [config])
|
||||
|
||||
async def call_client_api(
|
||||
self,
|
||||
@@ -203,10 +207,18 @@ class EvoDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
|
||||
async def _update_v2_schedules(self) -> None:
|
||||
for zone in self.tcs.zones:
|
||||
await zone.get_schedule()
|
||||
try:
|
||||
await zone.get_schedule()
|
||||
except ec2.InvalidScheduleError as err:
|
||||
self.logger.warning(
|
||||
"Zone '%s' has an invalid/missing schedule: %r", zone.name, err
|
||||
)
|
||||
|
||||
if dhw := self.tcs.hotwater:
|
||||
await dhw.get_schedule()
|
||||
try:
|
||||
await dhw.get_schedule()
|
||||
except ec2.InvalidScheduleError as err:
|
||||
self.logger.warning("DHW has an invalid/missing schedule: %r", err)
|
||||
|
||||
async def _async_update_data(self) -> EvoLocStatusResponseT: # type: ignore[override]
|
||||
"""Fetch the latest state of an entire TCC Location.
|
||||
|
||||
@@ -6,6 +6,7 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
import evohomeasync2 as evo
|
||||
from evohomeasync2.schemas.typedefs import DayOfWeekDhwT
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
@@ -102,7 +103,7 @@ class EvoChild(EvoEntity):
|
||||
|
||||
self._evo_tcs = evo_device.tcs
|
||||
|
||||
self._schedule: dict[str, Any] | None = None
|
||||
self._schedule: list[DayOfWeekDhwT] | None = None
|
||||
self._setpoints: dict[str, Any] = {}
|
||||
|
||||
@property
|
||||
@@ -123,6 +124,9 @@ class EvoChild(EvoEntity):
|
||||
Only Zones & DHW controllers (but not the TCS) can have schedules.
|
||||
"""
|
||||
|
||||
if not self._schedule:
|
||||
return self._setpoints
|
||||
|
||||
this_sp_dtm, this_sp_val = self._evo_device.this_switchpoint
|
||||
next_sp_dtm, next_sp_val = self._evo_device.next_switchpoint
|
||||
|
||||
@@ -152,10 +156,10 @@ class EvoChild(EvoEntity):
|
||||
self._evo_device,
|
||||
err,
|
||||
)
|
||||
self._schedule = {}
|
||||
self._schedule = []
|
||||
return
|
||||
else:
|
||||
self._schedule = schedule or {} # mypy hint
|
||||
self._schedule = schedule # type: ignore[assignment]
|
||||
|
||||
_LOGGER.debug("Schedule['%s'] = %s", self.name, schedule)
|
||||
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250227.0"]
|
||||
"requirements": ["home-assistant-frontend==20250306.0"]
|
||||
}
|
||||
|
||||
@@ -65,9 +65,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
prompt_parts = [call.data[CONF_PROMPT]]
|
||||
|
||||
config_entry: GoogleGenerativeAIConfigEntry = hass.config_entries.async_entries(
|
||||
DOMAIN
|
||||
)[0]
|
||||
config_entry: GoogleGenerativeAIConfigEntry = (
|
||||
hass.config_entries.async_loaded_entries(DOMAIN)[0]
|
||||
)
|
||||
|
||||
client = config_entry.runtime_data
|
||||
|
||||
|
||||
@@ -64,28 +64,18 @@ async def async_setup_entry(
|
||||
|
||||
|
||||
SUPPORTED_SCHEMA_KEYS = {
|
||||
"min_items",
|
||||
"example",
|
||||
"property_ordering",
|
||||
"pattern",
|
||||
"minimum",
|
||||
"default",
|
||||
"any_of",
|
||||
"max_length",
|
||||
"title",
|
||||
"min_properties",
|
||||
"min_length",
|
||||
"max_items",
|
||||
"maximum",
|
||||
"nullable",
|
||||
"max_properties",
|
||||
# Gemini API does not support all of the OpenAPI schema
|
||||
# SoT: https://ai.google.dev/api/caching#Schema
|
||||
"type",
|
||||
"description",
|
||||
"enum",
|
||||
"format",
|
||||
"items",
|
||||
"description",
|
||||
"nullable",
|
||||
"enum",
|
||||
"max_items",
|
||||
"min_items",
|
||||
"properties",
|
||||
"required",
|
||||
"items",
|
||||
}
|
||||
|
||||
|
||||
@@ -109,11 +99,20 @@ def _format_schema(schema: dict[str, Any]) -> Schema:
|
||||
key = _camel_to_snake(key)
|
||||
if key not in SUPPORTED_SCHEMA_KEYS:
|
||||
continue
|
||||
if key == "any_of":
|
||||
val = [_format_schema(subschema) for subschema in val]
|
||||
if key == "type":
|
||||
val = val.upper()
|
||||
if key == "items":
|
||||
elif key == "format":
|
||||
# Gemini API does not support all formats, see: https://ai.google.dev/api/caching#Schema
|
||||
# formats that are not supported are ignored
|
||||
if schema.get("type") == "string" and val not in ("enum", "date-time"):
|
||||
continue
|
||||
if schema.get("type") == "number" and val not in ("float", "double"):
|
||||
continue
|
||||
if schema.get("type") == "integer" and val not in ("int32", "int64"):
|
||||
continue
|
||||
if schema.get("type") not in ("string", "number", "integer"):
|
||||
continue
|
||||
elif key == "items":
|
||||
val = _format_schema(val)
|
||||
elif key == "properties":
|
||||
val = {k: _format_schema(v) for k, v in val.items()}
|
||||
|
||||
@@ -20,3 +20,4 @@ MAX_ERRORS = 2
|
||||
TARGET_TEMPERATURE_STEP = 1
|
||||
|
||||
UPDATE_INTERVAL = 60
|
||||
MAX_EXPECTED_RESPONSE_TIME_INTERVAL = UPDATE_INTERVAL * 2
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
@@ -24,6 +25,7 @@ from .const import (
|
||||
DISPATCH_DEVICE_DISCOVERED,
|
||||
DOMAIN,
|
||||
MAX_ERRORS,
|
||||
MAX_EXPECTED_RESPONSE_TIME_INTERVAL,
|
||||
UPDATE_INTERVAL,
|
||||
)
|
||||
|
||||
@@ -48,7 +50,6 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
always_update=False,
|
||||
)
|
||||
self.device = device
|
||||
self.device.add_handler(Response.DATA, self.device_state_updated)
|
||||
self.device.add_handler(Response.RESULT, self.device_state_updated)
|
||||
|
||||
self._error_count: int = 0
|
||||
@@ -88,7 +89,9 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
# raise update failed if time for more than MAX_ERRORS has passed since last update
|
||||
now = utcnow()
|
||||
elapsed_success = now - self._last_response_time
|
||||
if self.update_interval and elapsed_success >= self.update_interval:
|
||||
if self.update_interval and elapsed_success >= timedelta(
|
||||
seconds=MAX_EXPECTED_RESPONSE_TIME_INTERVAL
|
||||
):
|
||||
if not self._last_error_time or (
|
||||
(now - self.update_interval) >= self._last_error_time
|
||||
):
|
||||
@@ -96,16 +99,19 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
self._error_count += 1
|
||||
|
||||
_LOGGER.warning(
|
||||
"Device %s is unresponsive for %s seconds",
|
||||
"Device %s took an unusually long time to respond, %s seconds",
|
||||
self.name,
|
||||
elapsed_success,
|
||||
)
|
||||
else:
|
||||
self._error_count = 0
|
||||
if self.last_update_success and self._error_count >= MAX_ERRORS:
|
||||
raise UpdateFailed(
|
||||
f"Device {self.name} is unresponsive for too long and now unavailable"
|
||||
)
|
||||
|
||||
return self.device.raw_properties
|
||||
self._last_response_time = utcnow()
|
||||
return copy.deepcopy(self.device.raw_properties)
|
||||
|
||||
async def push_state_update(self):
|
||||
"""Send state updates to the physical device."""
|
||||
|
||||
@@ -26,6 +26,7 @@ TOTAL_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="todayEnergy",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="total_output_power",
|
||||
@@ -33,6 +34,7 @@ TOTAL_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="invTodayPpv",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="total_energy_output",
|
||||
|
||||
@@ -11,7 +11,6 @@ from hko import HKO, HKOError
|
||||
from homeassistant.components.weather import (
|
||||
ATTR_CONDITION_CLOUDY,
|
||||
ATTR_CONDITION_FOG,
|
||||
ATTR_CONDITION_HAIL,
|
||||
ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
ATTR_CONDITION_PARTLYCLOUDY,
|
||||
ATTR_CONDITION_POURING,
|
||||
@@ -145,7 +144,7 @@ class HKOUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Return the condition corresponding to the weather info."""
|
||||
info = info.lower()
|
||||
if WEATHER_INFO_RAIN in info:
|
||||
return ATTR_CONDITION_HAIL
|
||||
return ATTR_CONDITION_RAINY
|
||||
if WEATHER_INFO_SNOW in info and WEATHER_INFO_RAIN in info:
|
||||
return ATTR_CONDITION_SNOWY_RAINY
|
||||
if WEATHER_INFO_SNOW in info:
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.67", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.68", "babel==2.15.0"]
|
||||
}
|
||||
|
||||
@@ -47,8 +47,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type HomeConnectConfigEntry = ConfigEntry[HomeConnectCoordinator]
|
||||
|
||||
EVENT_STREAM_RECONNECT_DELAY = 30
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class HomeConnectApplianceData:
|
||||
@@ -100,6 +98,7 @@ class HomeConnectCoordinator(
|
||||
CALLBACK_TYPE, tuple[CALLBACK_TYPE, tuple[EventKey, ...]]
|
||||
] = {}
|
||||
self.device_registry = dr.async_get(self.hass)
|
||||
self.data = {}
|
||||
|
||||
@cached_property
|
||||
def context_listeners(self) -> dict[tuple[str, EventKey], list[CALLBACK_TYPE]]:
|
||||
@@ -157,10 +156,20 @@ class HomeConnectCoordinator(
|
||||
|
||||
async def _event_listener(self) -> None:
|
||||
"""Match event with listener for event type."""
|
||||
retry_time = 10
|
||||
while True:
|
||||
try:
|
||||
async for event_message in self.client.stream_all_events():
|
||||
retry_time = 10
|
||||
event_message_ha_id = event_message.ha_id
|
||||
if (
|
||||
event_message_ha_id in self.data
|
||||
and not self.data[event_message_ha_id].info.connected
|
||||
):
|
||||
self.data[event_message_ha_id].info.connected = True
|
||||
self._call_all_event_listeners_for_appliance(
|
||||
event_message_ha_id
|
||||
)
|
||||
match event_message.type:
|
||||
case EventType.STATUS:
|
||||
statuses = self.data[event_message_ha_id].status
|
||||
@@ -256,20 +265,18 @@ class HomeConnectCoordinator(
|
||||
except (EventStreamInterruptedError, HomeConnectRequestError) as error:
|
||||
_LOGGER.debug(
|
||||
"Non-breaking error (%s) while listening for events,"
|
||||
" continuing in 30 seconds",
|
||||
" continuing in %s seconds",
|
||||
type(error).__name__,
|
||||
retry_time,
|
||||
)
|
||||
await asyncio.sleep(EVENT_STREAM_RECONNECT_DELAY)
|
||||
await asyncio.sleep(retry_time)
|
||||
retry_time = min(retry_time * 2, 3600)
|
||||
except HomeConnectApiError as error:
|
||||
_LOGGER.error("Error while listening for events: %s", error)
|
||||
self.hass.config_entries.async_schedule_reload(
|
||||
self.config_entry.entry_id
|
||||
)
|
||||
break
|
||||
# if there was a non-breaking error, we continue listening
|
||||
# but we need to refresh the data to get the possible changes
|
||||
# that happened while the event stream was interrupted
|
||||
await self.async_refresh()
|
||||
|
||||
@callback
|
||||
def _call_event_listener(self, event_message: EventMessage) -> None:
|
||||
@@ -297,6 +304,8 @@ class HomeConnectCoordinator(
|
||||
translation_placeholders=get_dict_from_home_connect_error(error),
|
||||
) from error
|
||||
except HomeConnectError as error:
|
||||
for appliance_data in self.data.values():
|
||||
appliance_data.info.connected = False
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="fetch_api_error",
|
||||
@@ -305,7 +314,7 @@ class HomeConnectCoordinator(
|
||||
|
||||
return {
|
||||
appliance.ha_id: await self._get_appliance_data(
|
||||
appliance, self.data.get(appliance.ha_id) if self.data else None
|
||||
appliance, self.data.get(appliance.ha_id)
|
||||
)
|
||||
for appliance in appliances.homeappliances
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ from typing import cast
|
||||
from aiohomeconnect.model import EventKey, OptionKey
|
||||
from aiohomeconnect.model.error import ActiveProgramNotSetError, HomeConnectError
|
||||
|
||||
from homeassistant.const import STATE_UNAVAILABLE
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -51,8 +52,10 @@ class HomeConnectEntity(CoordinatorEntity[HomeConnectCoordinator]):
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self.update_native_value()
|
||||
available = self._attr_available = self.appliance.info.connected
|
||||
self.async_write_ha_state()
|
||||
_LOGGER.debug("Updated %s, new state: %s", self.entity_id, self.state)
|
||||
state = STATE_UNAVAILABLE if not available else self.state
|
||||
_LOGGER.debug("Updated %s, new state: %s", self.entity_id, state)
|
||||
|
||||
@property
|
||||
def bsh_key(self) -> str:
|
||||
@@ -61,10 +64,13 @@ class HomeConnectEntity(CoordinatorEntity[HomeConnectCoordinator]):
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return (
|
||||
self.appliance.info.connected and self._attr_available and super().available
|
||||
)
|
||||
"""Return True if entity is available.
|
||||
|
||||
Do not use self.last_update_success for available state
|
||||
as event updates should take precedence over the coordinator
|
||||
refresh.
|
||||
"""
|
||||
return self._attr_available
|
||||
|
||||
|
||||
class HomeConnectOptionEntity(HomeConnectEntity):
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/home_connect",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"requirements": ["aiohomeconnect==0.15.0"],
|
||||
"requirements": ["aiohomeconnect==0.16.3"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -386,6 +386,13 @@ class HomeConnectProgramSensor(HomeConnectSensor):
|
||||
|
||||
def update_native_value(self) -> None:
|
||||
"""Update the program sensor's status."""
|
||||
self.program_running = (
|
||||
status := self.appliance.status.get(StatusKey.BSH_COMMON_OPERATION_STATE)
|
||||
) is not None and status.value in [
|
||||
BSH_OPERATION_STATE_RUN,
|
||||
BSH_OPERATION_STATE_PAUSE,
|
||||
BSH_OPERATION_STATE_FINISHED,
|
||||
]
|
||||
event = self.appliance.events.get(cast(EventKey, self.bsh_key))
|
||||
if event:
|
||||
self._update_native_value(event.value)
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"brightness": {
|
||||
"default": "mdi:brightness-5"
|
||||
},
|
||||
"brightness_instance": {
|
||||
"default": "mdi:brightness-5"
|
||||
},
|
||||
"link_quality": {
|
||||
"default": "mdi:signal"
|
||||
},
|
||||
@@ -9,7 +15,7 @@
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"watchdog_on_off": {
|
||||
"watchdog": {
|
||||
"default": "mdi:dog"
|
||||
},
|
||||
"manual_operation": {
|
||||
|
||||
@@ -40,10 +40,22 @@ def get_window_value(attribute: HomeeAttribute) -> str | None:
|
||||
return vals.get(attribute.current_value)
|
||||
|
||||
|
||||
def get_brightness_device_class(
|
||||
attribute: HomeeAttribute, device_class: SensorDeviceClass | None
|
||||
) -> SensorDeviceClass | None:
|
||||
"""Return the device class for a brightness sensor."""
|
||||
if attribute.unit == "%":
|
||||
return None
|
||||
return device_class
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class HomeeSensorEntityDescription(SensorEntityDescription):
|
||||
"""A class that describes Homee sensor entities."""
|
||||
|
||||
device_class_fn: Callable[
|
||||
[HomeeAttribute, SensorDeviceClass | None], SensorDeviceClass | None
|
||||
] = lambda attribute, device_class: device_class
|
||||
value_fn: Callable[[HomeeAttribute], str | float | None] = (
|
||||
lambda value: value.current_value
|
||||
)
|
||||
@@ -67,6 +79,7 @@ SENSOR_DESCRIPTIONS: dict[AttributeType, HomeeSensorEntityDescription] = {
|
||||
AttributeType.BRIGHTNESS: HomeeSensorEntityDescription(
|
||||
key="brightness",
|
||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||
device_class_fn=get_brightness_device_class,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=(
|
||||
lambda attribute: attribute.current_value * 1000
|
||||
@@ -303,6 +316,9 @@ class HomeeSensor(HomeeEntity, SensorEntity):
|
||||
if attribute.instance > 0:
|
||||
self._attr_translation_key = f"{self._attr_translation_key}_instance"
|
||||
self._attr_translation_placeholders = {"instance": str(attribute.instance)}
|
||||
self._attr_device_class = description.device_class_fn(
|
||||
attribute, description.device_class
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | str | None:
|
||||
|
||||
@@ -111,6 +111,9 @@
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"brightness": {
|
||||
"name": "Illuminance"
|
||||
},
|
||||
"brightness_instance": {
|
||||
"name": "Illuminance {instance}"
|
||||
},
|
||||
|
||||
@@ -154,7 +154,6 @@ class HKDevice:
|
||||
self._pending_subscribes: set[tuple[int, int]] = set()
|
||||
self._subscribe_timer: CALLBACK_TYPE | None = None
|
||||
self._load_platforms_lock = asyncio.Lock()
|
||||
self._full_update_requested: bool = False
|
||||
|
||||
@property
|
||||
def entity_map(self) -> Accessories:
|
||||
@@ -841,48 +840,11 @@ class HKDevice:
|
||||
|
||||
async def async_request_update(self, now: datetime | None = None) -> None:
|
||||
"""Request an debounced update from the accessory."""
|
||||
self._full_update_requested = True
|
||||
await self._debounced_update.async_call()
|
||||
|
||||
async def async_update(self, now: datetime | None = None) -> None:
|
||||
"""Poll state of all entities attached to this bridge/accessory."""
|
||||
to_poll = self.pollable_characteristics
|
||||
accessories = self.entity_map.accessories
|
||||
|
||||
if (
|
||||
not self._full_update_requested
|
||||
and len(accessories) == 1
|
||||
and self.available
|
||||
and not (to_poll - self.watchable_characteristics)
|
||||
and self.pairing.is_available
|
||||
and await self.pairing.controller.async_reachable(
|
||||
self.unique_id, timeout=5.0
|
||||
)
|
||||
):
|
||||
# If its a single accessory and all chars are watchable,
|
||||
# only poll the firmware version to keep the connection alive
|
||||
# https://github.com/home-assistant/core/issues/123412
|
||||
#
|
||||
# Firmware revision is used here since iOS does this to keep camera
|
||||
# connections alive, and the goal is to not regress
|
||||
# https://github.com/home-assistant/core/issues/116143
|
||||
# by polling characteristics that are not normally polled frequently
|
||||
# and may not be tested by the device vendor.
|
||||
#
|
||||
_LOGGER.debug(
|
||||
"Accessory is reachable, limiting poll to firmware version: %s",
|
||||
self.unique_id,
|
||||
)
|
||||
first_accessory = accessories[0]
|
||||
accessory_info = first_accessory.services.first(
|
||||
service_type=ServicesTypes.ACCESSORY_INFORMATION
|
||||
)
|
||||
assert accessory_info is not None
|
||||
firmware_iid = accessory_info[CharacteristicsTypes.FIRMWARE_REVISION].iid
|
||||
to_poll = {(first_accessory.aid, firmware_iid)}
|
||||
|
||||
self._full_update_requested = False
|
||||
|
||||
if not to_poll:
|
||||
self.async_update_available_state()
|
||||
_LOGGER.debug(
|
||||
|
||||
@@ -14,6 +14,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohomekit", "commentjson"],
|
||||
"requirements": ["aiohomekit==3.2.7"],
|
||||
"requirements": ["aiohomekit==3.2.8"],
|
||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Pick Homematic IP access point",
|
||||
"description": "If you are about to register a **Homematic IP HCU1**, please press the button on top of the device before you continue.\n\nThe registration process must be completed within 5 minutes.",
|
||||
"data": {
|
||||
"hapid": "Access point ID (SGTIN)",
|
||||
"pin": "[%key:common::config_flow::data::pin%]",
|
||||
|
||||
@@ -94,7 +94,12 @@ async def async_setup_devices(bridge: HueBridge):
|
||||
add_device(hue_resource)
|
||||
|
||||
# create/update all current devices found in controllers
|
||||
known_devices = [add_device(hue_device) for hue_device in dev_controller]
|
||||
# sort the devices to ensure bridges are added first
|
||||
hue_devices = list(dev_controller)
|
||||
hue_devices.sort(
|
||||
key=lambda dev: dev.metadata.archetype != DeviceArchetypes.BRIDGE_V2
|
||||
)
|
||||
known_devices = [add_device(hue_device) for hue_device in hue_devices]
|
||||
known_devices += [add_device(hue_room) for hue_room in api.groups.room]
|
||||
known_devices += [add_device(hue_zone) for hue_zone in api.groups.zone]
|
||||
|
||||
|
||||
@@ -280,7 +280,7 @@ class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]):
|
||||
if self.custom_event_template is not None:
|
||||
try:
|
||||
data["custom"] = self.custom_event_template.async_render(
|
||||
data, parse_result=True
|
||||
data | {"text": message.text}, parse_result=True
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"IMAP custom template (%s) for msguid %s (%s) rendered to: %s, initial: %s",
|
||||
|
||||
@@ -28,5 +28,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/inkbird",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["inkbird-ble==0.7.0"]
|
||||
"requirements": ["inkbird-ble==0.7.1"]
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import http
|
||||
from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN
|
||||
from homeassistant.components.cover import (
|
||||
ATTR_POSITION,
|
||||
DOMAIN as COVER_DOMAIN,
|
||||
@@ -140,6 +141,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
intent.async_register(hass, GetCurrentDateIntentHandler())
|
||||
intent.async_register(hass, GetCurrentTimeIntentHandler())
|
||||
intent.async_register(hass, RespondIntentHandler())
|
||||
intent.async_register(hass, GetTemperatureIntent())
|
||||
|
||||
return True
|
||||
|
||||
@@ -444,6 +446,48 @@ class RespondIntentHandler(intent.IntentHandler):
|
||||
return response
|
||||
|
||||
|
||||
class GetTemperatureIntent(intent.IntentHandler):
|
||||
"""Handle GetTemperature intents."""
|
||||
|
||||
intent_type = intent.INTENT_GET_TEMPERATURE
|
||||
description = "Gets the current temperature of a climate device or entity"
|
||||
slot_schema = {
|
||||
vol.Optional("area"): intent.non_empty_string,
|
||||
vol.Optional("name"): intent.non_empty_string,
|
||||
}
|
||||
platforms = {CLIMATE_DOMAIN}
|
||||
|
||||
async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse:
|
||||
"""Handle the intent."""
|
||||
hass = intent_obj.hass
|
||||
slots = self.async_validate_slots(intent_obj.slots)
|
||||
|
||||
name: str | None = None
|
||||
if "name" in slots:
|
||||
name = slots["name"]["value"]
|
||||
|
||||
area: str | None = None
|
||||
if "area" in slots:
|
||||
area = slots["area"]["value"]
|
||||
|
||||
match_constraints = intent.MatchTargetsConstraints(
|
||||
name=name,
|
||||
area_name=area,
|
||||
domains=[CLIMATE_DOMAIN],
|
||||
assistant=intent_obj.assistant,
|
||||
)
|
||||
match_result = intent.async_match_targets(hass, match_constraints)
|
||||
if not match_result.is_match:
|
||||
raise intent.MatchFailedError(
|
||||
result=match_result, constraints=match_constraints
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.response_type = intent.IntentResponseType.QUERY_ANSWER
|
||||
response.async_set_states(matched_states=match_result.states)
|
||||
return response
|
||||
|
||||
|
||||
async def _async_process_intent(
|
||||
hass: HomeAssistant, domain: str, platform: IntentPlatformProtocol
|
||||
) -> None:
|
||||
|
||||
@@ -110,7 +110,9 @@ class ThinQClimateEntity(ThinQEntity, ClimateEntity):
|
||||
self._attr_hvac_modes = [HVACMode.OFF]
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
self._attr_preset_modes = []
|
||||
self._attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
self._attr_temperature_unit = (
|
||||
self._get_unit_of_measurement(self.data.unit) or UnitOfTemperature.CELSIUS
|
||||
)
|
||||
self._requested_hvac_mode: str | None = None
|
||||
|
||||
# Set up HVAC modes.
|
||||
@@ -182,6 +184,11 @@ class ThinQClimateEntity(ThinQEntity, ClimateEntity):
|
||||
self._attr_target_temperature_high = self.data.target_temp_high
|
||||
self._attr_target_temperature_low = self.data.target_temp_low
|
||||
|
||||
# Update unit.
|
||||
self._attr_temperature_unit = (
|
||||
self._get_unit_of_measurement(self.data.unit) or UnitOfTemperature.CELSIUS
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"[%s:%s] update status: c:%s, t:%s, l:%s, h:%s, hvac:%s, unit:%s, step:%s",
|
||||
self.coordinator.device_name,
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
from datetime import timedelta
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
|
||||
# Config flow
|
||||
DOMAIN = "lg_thinq"
|
||||
COMPANY = "LGE"
|
||||
@@ -18,3 +20,10 @@ MQTT_SUBSCRIPTION_INTERVAL: Final = timedelta(days=1)
|
||||
# MQTT: Message types
|
||||
DEVICE_PUSH_MESSAGE: Final = "DEVICE_PUSH"
|
||||
DEVICE_STATUS_MESSAGE: Final = "DEVICE_STATUS"
|
||||
|
||||
# Unit conversion map
|
||||
DEVICE_UNIT_TO_HA: dict[str, str] = {
|
||||
"F": UnitOfTemperature.FAHRENHEIT,
|
||||
"C": UnitOfTemperature.CELSIUS,
|
||||
}
|
||||
REVERSE_DEVICE_UNIT_TO_HA = {v: k for k, v in DEVICE_UNIT_TO_HA.items()}
|
||||
|
||||
@@ -2,19 +2,21 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from thinqconnect import ThinQAPIException
|
||||
from thinqconnect.integration import HABridge
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.const import EVENT_CORE_CONFIG_UPDATE
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import ThinqConfigEntry
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DOMAIN, REVERSE_DEVICE_UNIT_TO_HA
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -54,6 +56,40 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
f"{self.device_id}_{self.sub_id}" if self.sub_id else self.device_id
|
||||
)
|
||||
|
||||
# Set your preferred temperature unit. This will allow us to retrieve
|
||||
# temperature values from the API in a converted value corresponding to
|
||||
# preferred unit.
|
||||
self._update_preferred_temperature_unit()
|
||||
|
||||
# Add a callback to handle core config update.
|
||||
self.unit_system: str | None = None
|
||||
self.hass.bus.async_listen(
|
||||
event_type=EVENT_CORE_CONFIG_UPDATE,
|
||||
listener=self._handle_update_config,
|
||||
event_filter=self.async_config_update_filter,
|
||||
)
|
||||
|
||||
async def _handle_update_config(self, _: Event) -> None:
|
||||
"""Handle update core config."""
|
||||
self._update_preferred_temperature_unit()
|
||||
|
||||
await self.async_refresh()
|
||||
|
||||
@callback
|
||||
def async_config_update_filter(self, event_data: Mapping[str, Any]) -> bool:
|
||||
"""Filter out unwanted events."""
|
||||
if (unit_system := event_data.get("unit_system")) != self.unit_system:
|
||||
self.unit_system = unit_system
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _update_preferred_temperature_unit(self) -> None:
|
||||
"""Update preferred temperature unit."""
|
||||
self.api.set_preferred_temperature_unit(
|
||||
REVERSE_DEVICE_UNIT_TO_HA.get(self.hass.config.units.temperature_unit)
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Request to the server to update the status from full response data."""
|
||||
try:
|
||||
|
||||
@@ -10,25 +10,19 @@ from thinqconnect import ThinQAPIException
|
||||
from thinqconnect.devices.const import Location
|
||||
from thinqconnect.integration import PropertyState
|
||||
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import COMPANY, DOMAIN
|
||||
from .const import COMPANY, DEVICE_UNIT_TO_HA, DOMAIN
|
||||
from .coordinator import DeviceDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
EMPTY_STATE = PropertyState()
|
||||
|
||||
UNIT_CONVERSION_MAP: dict[str, str] = {
|
||||
"F": UnitOfTemperature.FAHRENHEIT,
|
||||
"C": UnitOfTemperature.CELSIUS,
|
||||
}
|
||||
|
||||
|
||||
class ThinQEntity(CoordinatorEntity[DeviceDataUpdateCoordinator]):
|
||||
"""The base implementation of all lg thinq entities."""
|
||||
@@ -75,7 +69,7 @@ class ThinQEntity(CoordinatorEntity[DeviceDataUpdateCoordinator]):
|
||||
if unit is None:
|
||||
return None
|
||||
|
||||
return UNIT_CONVERSION_MAP.get(unit)
|
||||
return DEVICE_UNIT_TO_HA.get(unit)
|
||||
|
||||
def _update_status(self) -> None:
|
||||
"""Update status itself.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"domain": "matter",
|
||||
"name": "Matter (BETA)",
|
||||
"name": "Matter",
|
||||
"after_dependencies": ["hassio"],
|
||||
"codeowners": ["@home-assistant/matter"],
|
||||
"config_flow": true,
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "calculated",
|
||||
"loggers": ["yt_dlp"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["yt-dlp[default]==2025.01.26"],
|
||||
"requirements": ["yt-dlp[default]==2025.02.19"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -217,6 +217,10 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
||||
self._attr_color_mode = next(iter(self.supported_color_modes))
|
||||
else:
|
||||
self._attr_color_mode = ColorMode.UNKNOWN
|
||||
elif config.get(CONF_BRIGHTNESS):
|
||||
# Brightness is supported and no supported_color_modes are set,
|
||||
# so set brightness as the supported color mode.
|
||||
self._attr_supported_color_modes = {ColorMode.BRIGHTNESS}
|
||||
|
||||
def _update_color(self, values: dict[str, Any]) -> None:
|
||||
color_mode: str = values["color_mode"]
|
||||
|
||||
@@ -11,6 +11,7 @@ import voluptuous as vol
|
||||
from homeassistant.components import sensor
|
||||
from homeassistant.components.sensor import (
|
||||
CONF_STATE_CLASS,
|
||||
DEVICE_CLASS_UNITS,
|
||||
DEVICE_CLASSES_SCHEMA,
|
||||
ENTITY_ID_FORMAT,
|
||||
STATE_CLASSES_SCHEMA,
|
||||
@@ -107,6 +108,20 @@ def validate_sensor_state_and_device_class_config(config: ConfigType) -> ConfigT
|
||||
f"got `{CONF_DEVICE_CLASS}` '{device_class}'"
|
||||
)
|
||||
|
||||
if (device_class := config.get(CONF_DEVICE_CLASS)) is None or (
|
||||
unit_of_measurement := config.get(CONF_UNIT_OF_MEASUREMENT)
|
||||
) is None:
|
||||
return config
|
||||
|
||||
if (
|
||||
device_class in DEVICE_CLASS_UNITS
|
||||
and unit_of_measurement not in DEVICE_CLASS_UNITS[device_class]
|
||||
):
|
||||
raise vol.Invalid(
|
||||
f"The unit of measurement `{unit_of_measurement}` is not valid "
|
||||
f"together with device class `{device_class}`"
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ from typing import TYPE_CHECKING
|
||||
from music_assistant_client import MusicAssistantClient
|
||||
from music_assistant_client.exceptions import CannotConnect, InvalidServerVersion
|
||||
from music_assistant_models.enums import EventType
|
||||
from music_assistant_models.errors import MusicAssistantError
|
||||
from music_assistant_models.errors import ActionUnavailable, MusicAssistantError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
||||
from homeassistant.const import CONF_URL, EVENT_HOMEASSISTANT_STOP, Platform
|
||||
@@ -23,7 +23,7 @@ from homeassistant.helpers.issue_registry import (
|
||||
async_delete_issue,
|
||||
)
|
||||
|
||||
from .actions import register_actions
|
||||
from .actions import get_music_assistant_client, register_actions
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -137,6 +137,18 @@ async def async_setup_entry(
|
||||
mass.subscribe(handle_player_removed, EventType.PLAYER_REMOVED)
|
||||
)
|
||||
|
||||
# check if any playerconfigs have been removed while we were disconnected
|
||||
all_player_configs = await mass.config.get_player_configs()
|
||||
player_ids = {player.player_id for player in all_player_configs}
|
||||
dev_reg = dr.async_get(hass)
|
||||
dev_entries = dr.async_entries_for_config_entry(dev_reg, entry.entry_id)
|
||||
for device in dev_entries:
|
||||
for identifier in device.identifiers:
|
||||
if identifier[0] == DOMAIN and identifier[1] not in player_ids:
|
||||
dev_reg.async_update_device(
|
||||
device.id, remove_config_entry_id=entry.entry_id
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -174,3 +186,31 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
await mass_entry_data.mass.disconnect()
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def async_remove_config_entry_device(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry
|
||||
) -> bool:
|
||||
"""Remove a config entry from a device."""
|
||||
player_id = next(
|
||||
(
|
||||
identifier[1]
|
||||
for identifier in device_entry.identifiers
|
||||
if identifier[0] == DOMAIN
|
||||
),
|
||||
None,
|
||||
)
|
||||
if player_id is None:
|
||||
# this should not be possible at all, but guard it anyways
|
||||
return False
|
||||
mass = get_music_assistant_client(hass, config_entry.entry_id)
|
||||
if mass.players.get(player_id) is None:
|
||||
# player is already removed on the server, this is an orphaned device
|
||||
return True
|
||||
# try to remove the player from the server
|
||||
try:
|
||||
await mass.config.remove_player_config(player_id)
|
||||
except ActionUnavailable:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
@@ -23,6 +23,7 @@ from .const import (
|
||||
ATTR_ALBUM_TYPE,
|
||||
ATTR_ALBUMS,
|
||||
ATTR_ARTISTS,
|
||||
ATTR_AUDIOBOOKS,
|
||||
ATTR_CONFIG_ENTRY_ID,
|
||||
ATTR_FAVORITE,
|
||||
ATTR_ITEMS,
|
||||
@@ -32,6 +33,7 @@ from .const import (
|
||||
ATTR_OFFSET,
|
||||
ATTR_ORDER_BY,
|
||||
ATTR_PLAYLISTS,
|
||||
ATTR_PODCASTS,
|
||||
ATTR_RADIO,
|
||||
ATTR_SEARCH,
|
||||
ATTR_SEARCH_ALBUM,
|
||||
@@ -48,7 +50,15 @@ from .schemas import (
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from music_assistant_client import MusicAssistantClient
|
||||
from music_assistant_models.media_items import Album, Artist, Playlist, Radio, Track
|
||||
from music_assistant_models.media_items import (
|
||||
Album,
|
||||
Artist,
|
||||
Audiobook,
|
||||
Playlist,
|
||||
Podcast,
|
||||
Radio,
|
||||
Track,
|
||||
)
|
||||
|
||||
from . import MusicAssistantConfigEntry
|
||||
|
||||
@@ -155,6 +165,14 @@ async def handle_search(call: ServiceCall) -> ServiceResponse:
|
||||
media_item_dict_from_mass_item(mass, item)
|
||||
for item in search_results.radio
|
||||
],
|
||||
ATTR_AUDIOBOOKS: [
|
||||
media_item_dict_from_mass_item(mass, item)
|
||||
for item in search_results.audiobooks
|
||||
],
|
||||
ATTR_PODCASTS: [
|
||||
media_item_dict_from_mass_item(mass, item)
|
||||
for item in search_results.podcasts
|
||||
],
|
||||
}
|
||||
)
|
||||
return response
|
||||
@@ -175,7 +193,13 @@ async def handle_get_library(call: ServiceCall) -> ServiceResponse:
|
||||
"order_by": order_by,
|
||||
}
|
||||
library_result: (
|
||||
list[Album] | list[Artist] | list[Track] | list[Radio] | list[Playlist]
|
||||
list[Album]
|
||||
| list[Artist]
|
||||
| list[Track]
|
||||
| list[Radio]
|
||||
| list[Playlist]
|
||||
| list[Audiobook]
|
||||
| list[Podcast]
|
||||
)
|
||||
if media_type == MediaType.ALBUM:
|
||||
library_result = await mass.music.get_library_albums(
|
||||
@@ -199,6 +223,14 @@ async def handle_get_library(call: ServiceCall) -> ServiceResponse:
|
||||
library_result = await mass.music.get_library_playlists(
|
||||
**base_params,
|
||||
)
|
||||
elif media_type == MediaType.AUDIOBOOK:
|
||||
library_result = await mass.music.get_library_audiobooks(
|
||||
**base_params,
|
||||
)
|
||||
elif media_type == MediaType.PODCAST:
|
||||
library_result = await mass.music.get_library_podcasts(
|
||||
**base_params,
|
||||
)
|
||||
else:
|
||||
raise ServiceValidationError(f"Unsupported media type {media_type}")
|
||||
|
||||
|
||||
@@ -34,6 +34,8 @@ ATTR_ARTISTS = "artists"
|
||||
ATTR_ALBUMS = "albums"
|
||||
ATTR_TRACKS = "tracks"
|
||||
ATTR_PLAYLISTS = "playlists"
|
||||
ATTR_AUDIOBOOKS = "audiobooks"
|
||||
ATTR_PODCASTS = "podcasts"
|
||||
ATTR_RADIO = "radio"
|
||||
ATTR_ITEMS = "items"
|
||||
ATTR_RADIO_MODE = "radio_mode"
|
||||
|
||||
@@ -15,6 +15,7 @@ from .const import (
|
||||
ATTR_ALBUM,
|
||||
ATTR_ALBUMS,
|
||||
ATTR_ARTISTS,
|
||||
ATTR_AUDIOBOOKS,
|
||||
ATTR_BIT_DEPTH,
|
||||
ATTR_CONTENT_TYPE,
|
||||
ATTR_CURRENT_INDEX,
|
||||
@@ -31,6 +32,7 @@ from .const import (
|
||||
ATTR_OFFSET,
|
||||
ATTR_ORDER_BY,
|
||||
ATTR_PLAYLISTS,
|
||||
ATTR_PODCASTS,
|
||||
ATTR_PROVIDER,
|
||||
ATTR_QUEUE_ID,
|
||||
ATTR_QUEUE_ITEM_ID,
|
||||
@@ -101,6 +103,12 @@ SEARCH_RESULT_SCHEMA = vol.Schema(
|
||||
vol.Required(ATTR_RADIO): vol.All(
|
||||
cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)]
|
||||
),
|
||||
vol.Required(ATTR_AUDIOBOOKS): vol.All(
|
||||
cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)]
|
||||
),
|
||||
vol.Required(ATTR_PODCASTS): vol.All(
|
||||
cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)]
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -21,7 +21,10 @@ play_media:
|
||||
options:
|
||||
- artist
|
||||
- album
|
||||
- audiobook
|
||||
- folder
|
||||
- playlist
|
||||
- podcast
|
||||
- track
|
||||
- radio
|
||||
artist:
|
||||
@@ -118,7 +121,9 @@ search:
|
||||
options:
|
||||
- artist
|
||||
- album
|
||||
- audiobook
|
||||
- playlist
|
||||
- podcast
|
||||
- track
|
||||
- radio
|
||||
artist:
|
||||
@@ -160,7 +165,9 @@ get_library:
|
||||
options:
|
||||
- artist
|
||||
- album
|
||||
- audiobook
|
||||
- playlist
|
||||
- podcast
|
||||
- track
|
||||
- radio
|
||||
favorite:
|
||||
|
||||
@@ -195,8 +195,11 @@
|
||||
"options": {
|
||||
"artist": "Artist",
|
||||
"album": "Album",
|
||||
"audiobook": "Audiobook",
|
||||
"folder": "Folder",
|
||||
"track": "Track",
|
||||
"playlist": "Playlist",
|
||||
"podcast": "Podcast",
|
||||
"radio": "Radio"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -20,7 +20,7 @@ from homeassistant.exceptions import PlatformNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import Throttle
|
||||
from homeassistant.util import Throttle, dt as dt_util
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -119,6 +119,8 @@ class NSDepartureSensor(SensorEntity):
|
||||
self._time = time
|
||||
self._state = None
|
||||
self._trips = None
|
||||
self._first_trip = None
|
||||
self._next_trip = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -133,44 +135,44 @@ class NSDepartureSensor(SensorEntity):
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
if not self._trips:
|
||||
if not self._trips or self._first_trip is None:
|
||||
return None
|
||||
|
||||
if self._trips[0].trip_parts:
|
||||
route = [self._trips[0].departure]
|
||||
route.extend(k.destination for k in self._trips[0].trip_parts)
|
||||
if self._first_trip.trip_parts:
|
||||
route = [self._first_trip.departure]
|
||||
route.extend(k.destination for k in self._first_trip.trip_parts)
|
||||
|
||||
# Static attributes
|
||||
attributes = {
|
||||
"going": self._trips[0].going,
|
||||
"going": self._first_trip.going,
|
||||
"departure_time_planned": None,
|
||||
"departure_time_actual": None,
|
||||
"departure_delay": False,
|
||||
"departure_platform_planned": self._trips[0].departure_platform_planned,
|
||||
"departure_platform_actual": self._trips[0].departure_platform_actual,
|
||||
"departure_platform_planned": self._first_trip.departure_platform_planned,
|
||||
"departure_platform_actual": self._first_trip.departure_platform_actual,
|
||||
"arrival_time_planned": None,
|
||||
"arrival_time_actual": None,
|
||||
"arrival_delay": False,
|
||||
"arrival_platform_planned": self._trips[0].arrival_platform_planned,
|
||||
"arrival_platform_actual": self._trips[0].arrival_platform_actual,
|
||||
"arrival_platform_planned": self._first_trip.arrival_platform_planned,
|
||||
"arrival_platform_actual": self._first_trip.arrival_platform_actual,
|
||||
"next": None,
|
||||
"status": self._trips[0].status.lower(),
|
||||
"transfers": self._trips[0].nr_transfers,
|
||||
"status": self._first_trip.status.lower(),
|
||||
"transfers": self._first_trip.nr_transfers,
|
||||
"route": route,
|
||||
"remarks": None,
|
||||
}
|
||||
|
||||
# Planned departure attributes
|
||||
if self._trips[0].departure_time_planned is not None:
|
||||
attributes["departure_time_planned"] = self._trips[
|
||||
0
|
||||
].departure_time_planned.strftime("%H:%M")
|
||||
if self._first_trip.departure_time_planned is not None:
|
||||
attributes["departure_time_planned"] = (
|
||||
self._first_trip.departure_time_planned.strftime("%H:%M")
|
||||
)
|
||||
|
||||
# Actual departure attributes
|
||||
if self._trips[0].departure_time_actual is not None:
|
||||
attributes["departure_time_actual"] = self._trips[
|
||||
0
|
||||
].departure_time_actual.strftime("%H:%M")
|
||||
if self._first_trip.departure_time_actual is not None:
|
||||
attributes["departure_time_actual"] = (
|
||||
self._first_trip.departure_time_actual.strftime("%H:%M")
|
||||
)
|
||||
|
||||
# Delay departure attributes
|
||||
if (
|
||||
@@ -182,16 +184,16 @@ class NSDepartureSensor(SensorEntity):
|
||||
attributes["departure_delay"] = True
|
||||
|
||||
# Planned arrival attributes
|
||||
if self._trips[0].arrival_time_planned is not None:
|
||||
attributes["arrival_time_planned"] = self._trips[
|
||||
0
|
||||
].arrival_time_planned.strftime("%H:%M")
|
||||
if self._first_trip.arrival_time_planned is not None:
|
||||
attributes["arrival_time_planned"] = (
|
||||
self._first_trip.arrival_time_planned.strftime("%H:%M")
|
||||
)
|
||||
|
||||
# Actual arrival attributes
|
||||
if self._trips[0].arrival_time_actual is not None:
|
||||
attributes["arrival_time_actual"] = self._trips[
|
||||
0
|
||||
].arrival_time_actual.strftime("%H:%M")
|
||||
if self._first_trip.arrival_time_actual is not None:
|
||||
attributes["arrival_time_actual"] = (
|
||||
self._first_trip.arrival_time_actual.strftime("%H:%M")
|
||||
)
|
||||
|
||||
# Delay arrival attributes
|
||||
if (
|
||||
@@ -202,15 +204,14 @@ class NSDepartureSensor(SensorEntity):
|
||||
attributes["arrival_delay"] = True
|
||||
|
||||
# Next attributes
|
||||
if len(self._trips) > 1:
|
||||
if self._trips[1].departure_time_actual is not None:
|
||||
attributes["next"] = self._trips[1].departure_time_actual.strftime(
|
||||
"%H:%M"
|
||||
)
|
||||
elif self._trips[1].departure_time_planned is not None:
|
||||
attributes["next"] = self._trips[1].departure_time_planned.strftime(
|
||||
"%H:%M"
|
||||
)
|
||||
if self._next_trip.departure_time_actual is not None:
|
||||
attributes["next"] = self._next_trip.departure_time_actual.strftime("%H:%M")
|
||||
elif self._next_trip.departure_time_planned is not None:
|
||||
attributes["next"] = self._next_trip.departure_time_planned.strftime(
|
||||
"%H:%M"
|
||||
)
|
||||
else:
|
||||
attributes["next"] = None
|
||||
|
||||
return attributes
|
||||
|
||||
@@ -225,6 +226,7 @@ class NSDepartureSensor(SensorEntity):
|
||||
):
|
||||
self._state = None
|
||||
self._trips = None
|
||||
self._first_trip = None
|
||||
return
|
||||
|
||||
# Set the search parameter to search from a specific trip time
|
||||
@@ -236,19 +238,51 @@ class NSDepartureSensor(SensorEntity):
|
||||
.strftime("%d-%m-%Y %H:%M")
|
||||
)
|
||||
else:
|
||||
trip_time = datetime.now().strftime("%d-%m-%Y %H:%M")
|
||||
trip_time = dt_util.now().strftime("%d-%m-%Y %H:%M")
|
||||
|
||||
try:
|
||||
self._trips = self._nsapi.get_trips(
|
||||
trip_time, self._departure, self._via, self._heading, True, 0, 2
|
||||
)
|
||||
if self._trips:
|
||||
if self._trips[0].departure_time_actual is None:
|
||||
planned_time = self._trips[0].departure_time_planned
|
||||
self._state = planned_time.strftime("%H:%M")
|
||||
all_times = []
|
||||
|
||||
# If a train is delayed we can observe this through departure_time_actual.
|
||||
for trip in self._trips:
|
||||
if trip.departure_time_actual is None:
|
||||
all_times.append(trip.departure_time_planned)
|
||||
else:
|
||||
all_times.append(trip.departure_time_actual)
|
||||
|
||||
# Remove all trains that already left.
|
||||
filtered_times = [
|
||||
(i, time)
|
||||
for i, time in enumerate(all_times)
|
||||
if time > dt_util.now()
|
||||
]
|
||||
|
||||
if len(filtered_times) > 0:
|
||||
sorted_times = sorted(filtered_times, key=lambda x: x[1])
|
||||
self._first_trip = self._trips[sorted_times[0][0]]
|
||||
self._state = sorted_times[0][1].strftime("%H:%M")
|
||||
|
||||
# Filter again to remove trains that leave at the exact same time.
|
||||
filtered_times = [
|
||||
(i, time)
|
||||
for i, time in enumerate(all_times)
|
||||
if time > sorted_times[0][1]
|
||||
]
|
||||
|
||||
if len(filtered_times) > 0:
|
||||
sorted_times = sorted(filtered_times, key=lambda x: x[1])
|
||||
self._next_trip = self._trips[sorted_times[0][0]]
|
||||
else:
|
||||
self._next_trip = None
|
||||
|
||||
else:
|
||||
actual_time = self._trips[0].departure_time_actual
|
||||
self._state = actual_time.strftime("%H:%M")
|
||||
self._first_trip = None
|
||||
self._state = None
|
||||
|
||||
except (
|
||||
requests.exceptions.ConnectionError,
|
||||
requests.exceptions.HTTPError,
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nest",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["google_nest_sdm"],
|
||||
"requirements": ["google-nest-sdm==7.1.3"]
|
||||
"requirements": ["google-nest-sdm==7.1.4"]
|
||||
}
|
||||
|
||||
@@ -58,6 +58,7 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
|
||||
"missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]",
|
||||
"authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]",
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nexia",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["nexia"],
|
||||
"requirements": ["nexia==2.0.9"]
|
||||
"requirements": ["nexia==2.2.2"]
|
||||
}
|
||||
|
||||
@@ -41,14 +41,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) -> bool:
|
||||
"""Set up OneDrive from a config entry."""
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
session = OAuth2Session(hass, entry, implementation)
|
||||
|
||||
async def get_access_token() -> str:
|
||||
await session.async_ensure_token_valid()
|
||||
return cast(str, session.token[CONF_ACCESS_TOKEN])
|
||||
|
||||
client = OneDriveClient(get_access_token, async_get_clientsession(hass))
|
||||
client, get_access_token = await _get_onedrive_client(hass, entry)
|
||||
|
||||
# get approot, will be created automatically if it does not exist
|
||||
approot = await _handle_item_operation(client.get_approot, "approot")
|
||||
@@ -164,20 +157,47 @@ async def async_migrate_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) -
|
||||
_LOGGER.debug(
|
||||
"Migrating OneDrive config entry from version %s.%s", version, minor_version
|
||||
)
|
||||
|
||||
client, _ = await _get_onedrive_client(hass, entry)
|
||||
instance_id = await async_get_instance_id(hass)
|
||||
try:
|
||||
approot = await client.get_approot()
|
||||
folder = await client.get_drive_item(
|
||||
f"{approot.id}:/backups_{instance_id[:8]}:"
|
||||
)
|
||||
except OneDriveException:
|
||||
_LOGGER.exception("Migration to version 1.2 failed")
|
||||
return False
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_FOLDER_ID: "id", # will be updated during setup_entry
|
||||
CONF_FOLDER_ID: folder.id,
|
||||
CONF_FOLDER_NAME: f"backups_{instance_id[:8]}",
|
||||
},
|
||||
minor_version=2,
|
||||
)
|
||||
_LOGGER.debug("Migration to version 1.2 successful")
|
||||
return True
|
||||
|
||||
|
||||
async def _get_onedrive_client(
|
||||
hass: HomeAssistant, entry: OneDriveConfigEntry
|
||||
) -> tuple[OneDriveClient, Callable[[], Awaitable[str]]]:
|
||||
"""Get OneDrive client."""
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
session = OAuth2Session(hass, entry, implementation)
|
||||
|
||||
async def get_access_token() -> str:
|
||||
await session.async_ensure_token_valid()
|
||||
return cast(str, session.token[CONF_ACCESS_TOKEN])
|
||||
|
||||
return (
|
||||
OneDriveClient(get_access_token, async_get_clientsession(hass)),
|
||||
get_access_token,
|
||||
)
|
||||
|
||||
|
||||
async def _handle_item_operation(
|
||||
func: Callable[[], Awaitable[Item]], folder: str
|
||||
) -> Item:
|
||||
|
||||
33
homeassistant/components/onedrive/diagnostics.py
Normal file
33
homeassistant/components/onedrive/diagnostics.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""Diagnostics support for OneDrive."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import OneDriveConfigEntry
|
||||
|
||||
TO_REDACT = {"display_name", "email", CONF_ACCESS_TOKEN, CONF_TOKEN}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
entry: OneDriveConfigEntry,
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data.coordinator
|
||||
|
||||
data = {
|
||||
"drive": asdict(coordinator.data),
|
||||
"config": {
|
||||
**entry.data,
|
||||
**entry.options,
|
||||
},
|
||||
}
|
||||
|
||||
return async_redact_data(data, TO_REDACT)
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["onedrive_personal_sdk"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["onedrive-personal-sdk==0.0.12"]
|
||||
"requirements": ["onedrive-personal-sdk==0.0.13"]
|
||||
}
|
||||
|
||||
@@ -41,10 +41,7 @@ rules:
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics:
|
||||
status: exempt
|
||||
comment: |
|
||||
There is no data to diagnose.
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from logging import getLogger
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import CoreState, HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .util import async_migration_in_progress, get_instance
|
||||
@@ -14,6 +14,8 @@ async def async_pre_backup(hass: HomeAssistant) -> None:
|
||||
"""Perform operations before a backup starts."""
|
||||
_LOGGER.info("Backup start notification, locking database for writes")
|
||||
instance = get_instance(hass)
|
||||
if hass.state is not CoreState.running:
|
||||
raise HomeAssistantError("Home Assistant is not running")
|
||||
if async_migration_in_progress(hass):
|
||||
raise HomeAssistantError("Database migration in progress")
|
||||
await instance.lock_database()
|
||||
|
||||
@@ -30,6 +30,12 @@ CONF_DB_INTEGRITY_CHECK = "db_integrity_check"
|
||||
MAX_QUEUE_BACKLOG_MIN_VALUE = 65000
|
||||
MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG = 256 * 1024**2
|
||||
|
||||
# As soon as we have more than 999 ids, split the query as the
|
||||
# MySQL optimizer handles it poorly and will no longer
|
||||
# do an index only scan with a group-by
|
||||
# https://github.com/home-assistant/core/issues/132865#issuecomment-2543160459
|
||||
MAX_IDS_FOR_INDEXED_GROUP_BY = 999
|
||||
|
||||
# The maximum number of rows (events) we purge in one delete statement
|
||||
|
||||
DEFAULT_MAX_BIND_VARS = 4000
|
||||
|
||||
@@ -6,11 +6,12 @@ from collections.abc import Callable, Iterable, Iterator
|
||||
from datetime import datetime
|
||||
from itertools import groupby
|
||||
from operator import itemgetter
|
||||
from typing import Any, cast
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from sqlalchemy import (
|
||||
CompoundSelect,
|
||||
Select,
|
||||
StatementLambdaElement,
|
||||
Subquery,
|
||||
and_,
|
||||
func,
|
||||
@@ -26,8 +27,9 @@ from homeassistant.const import COMPRESSED_STATE_LAST_UPDATED, COMPRESSED_STATE_
|
||||
from homeassistant.core import HomeAssistant, State, split_entity_id
|
||||
from homeassistant.helpers.recorder import get_instance
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.collection import chunked_or_all
|
||||
|
||||
from ..const import LAST_REPORTED_SCHEMA_VERSION
|
||||
from ..const import LAST_REPORTED_SCHEMA_VERSION, MAX_IDS_FOR_INDEXED_GROUP_BY
|
||||
from ..db_schema import (
|
||||
SHARED_ATTR_OR_LEGACY_ATTRIBUTES,
|
||||
StateAttributes,
|
||||
@@ -149,6 +151,7 @@ def _significant_states_stmt(
|
||||
no_attributes: bool,
|
||||
include_start_time_state: bool,
|
||||
run_start_ts: float | None,
|
||||
slow_dependent_subquery: bool,
|
||||
) -> Select | CompoundSelect:
|
||||
"""Query the database for significant state changes."""
|
||||
include_last_changed = not significant_changes_only
|
||||
@@ -187,6 +190,7 @@ def _significant_states_stmt(
|
||||
metadata_ids,
|
||||
no_attributes,
|
||||
include_last_changed,
|
||||
slow_dependent_subquery,
|
||||
).subquery(),
|
||||
no_attributes,
|
||||
include_last_changed,
|
||||
@@ -257,7 +261,68 @@ def get_significant_states_with_session(
|
||||
start_time_ts = start_time.timestamp()
|
||||
end_time_ts = datetime_to_timestamp_or_none(end_time)
|
||||
single_metadata_id = metadata_ids[0] if len(metadata_ids) == 1 else None
|
||||
stmt = lambda_stmt(
|
||||
rows: list[Row] = []
|
||||
if TYPE_CHECKING:
|
||||
assert instance.database_engine is not None
|
||||
slow_dependent_subquery = instance.database_engine.optimizer.slow_dependent_subquery
|
||||
if include_start_time_state and slow_dependent_subquery:
|
||||
# https://github.com/home-assistant/core/issues/137178
|
||||
# If we include the start time state we need to limit the
|
||||
# number of metadata_ids we query for at a time to avoid
|
||||
# hitting limits in the MySQL optimizer that prevent
|
||||
# the start time state query from using an index-only optimization
|
||||
# to find the start time state.
|
||||
iter_metadata_ids = chunked_or_all(metadata_ids, MAX_IDS_FOR_INDEXED_GROUP_BY)
|
||||
else:
|
||||
iter_metadata_ids = (metadata_ids,)
|
||||
for metadata_ids_chunk in iter_metadata_ids:
|
||||
stmt = _generate_significant_states_with_session_stmt(
|
||||
start_time_ts,
|
||||
end_time_ts,
|
||||
single_metadata_id,
|
||||
metadata_ids_chunk,
|
||||
metadata_ids_in_significant_domains,
|
||||
significant_changes_only,
|
||||
no_attributes,
|
||||
include_start_time_state,
|
||||
oldest_ts,
|
||||
slow_dependent_subquery,
|
||||
)
|
||||
row_chunk = cast(
|
||||
list[Row],
|
||||
execute_stmt_lambda_element(session, stmt, None, end_time, orm_rows=False),
|
||||
)
|
||||
if rows:
|
||||
rows += row_chunk
|
||||
else:
|
||||
# If we have no rows yet, we can just assign the chunk
|
||||
# as this is the common case since its rare that
|
||||
# we exceed the MAX_IDS_FOR_INDEXED_GROUP_BY limit
|
||||
rows = row_chunk
|
||||
return _sorted_states_to_dict(
|
||||
rows,
|
||||
start_time_ts if include_start_time_state else None,
|
||||
entity_ids,
|
||||
entity_id_to_metadata_id,
|
||||
minimal_response,
|
||||
compressed_state_format,
|
||||
no_attributes=no_attributes,
|
||||
)
|
||||
|
||||
|
||||
def _generate_significant_states_with_session_stmt(
|
||||
start_time_ts: float,
|
||||
end_time_ts: float | None,
|
||||
single_metadata_id: int | None,
|
||||
metadata_ids: list[int],
|
||||
metadata_ids_in_significant_domains: list[int],
|
||||
significant_changes_only: bool,
|
||||
no_attributes: bool,
|
||||
include_start_time_state: bool,
|
||||
oldest_ts: float | None,
|
||||
slow_dependent_subquery: bool,
|
||||
) -> StatementLambdaElement:
|
||||
return lambda_stmt(
|
||||
lambda: _significant_states_stmt(
|
||||
start_time_ts,
|
||||
end_time_ts,
|
||||
@@ -268,6 +333,7 @@ def get_significant_states_with_session(
|
||||
no_attributes,
|
||||
include_start_time_state,
|
||||
oldest_ts,
|
||||
slow_dependent_subquery,
|
||||
),
|
||||
track_on=[
|
||||
bool(single_metadata_id),
|
||||
@@ -276,17 +342,9 @@ def get_significant_states_with_session(
|
||||
significant_changes_only,
|
||||
no_attributes,
|
||||
include_start_time_state,
|
||||
slow_dependent_subquery,
|
||||
],
|
||||
)
|
||||
return _sorted_states_to_dict(
|
||||
execute_stmt_lambda_element(session, stmt, None, end_time, orm_rows=False),
|
||||
start_time_ts if include_start_time_state else None,
|
||||
entity_ids,
|
||||
entity_id_to_metadata_id,
|
||||
minimal_response,
|
||||
compressed_state_format,
|
||||
no_attributes=no_attributes,
|
||||
)
|
||||
|
||||
|
||||
def get_full_significant_states_with_session(
|
||||
@@ -554,13 +612,14 @@ def get_last_state_changes(
|
||||
)
|
||||
|
||||
|
||||
def _get_start_time_state_for_entities_stmt(
|
||||
def _get_start_time_state_for_entities_stmt_dependent_sub_query(
|
||||
epoch_time: float,
|
||||
metadata_ids: list[int],
|
||||
no_attributes: bool,
|
||||
include_last_changed: bool,
|
||||
) -> Select:
|
||||
"""Baked query to get states for specific entities."""
|
||||
# Engine has a fast dependent subquery optimizer
|
||||
# This query is the result of significant research in
|
||||
# https://github.com/home-assistant/core/issues/132865
|
||||
# A reverse index scan with a limit 1 is the fastest way to get the
|
||||
@@ -570,7 +629,9 @@ def _get_start_time_state_for_entities_stmt(
|
||||
# before a specific point in time for all entities.
|
||||
stmt = (
|
||||
_stmt_and_join_attributes_for_start_state(
|
||||
no_attributes, include_last_changed, False
|
||||
no_attributes=no_attributes,
|
||||
include_last_changed=include_last_changed,
|
||||
include_last_reported=False,
|
||||
)
|
||||
.select_from(StatesMeta)
|
||||
.join(
|
||||
@@ -600,6 +661,55 @@ def _get_start_time_state_for_entities_stmt(
|
||||
)
|
||||
|
||||
|
||||
def _get_start_time_state_for_entities_stmt_group_by(
|
||||
epoch_time: float,
|
||||
metadata_ids: list[int],
|
||||
no_attributes: bool,
|
||||
include_last_changed: bool,
|
||||
) -> Select:
|
||||
"""Baked query to get states for specific entities."""
|
||||
# Simple group-by for MySQL, must use less
|
||||
# than 1000 metadata_ids in the IN clause for MySQL
|
||||
# or it will optimize poorly. Callers are responsible
|
||||
# for ensuring that the number of metadata_ids is less
|
||||
# than 1000.
|
||||
most_recent_states_for_entities_by_date = (
|
||||
select(
|
||||
States.metadata_id.label("max_metadata_id"),
|
||||
func.max(States.last_updated_ts).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated_ts < epoch_time) & States.metadata_id.in_(metadata_ids)
|
||||
)
|
||||
.group_by(States.metadata_id)
|
||||
.subquery()
|
||||
)
|
||||
stmt = (
|
||||
_stmt_and_join_attributes_for_start_state(
|
||||
no_attributes=no_attributes,
|
||||
include_last_changed=include_last_changed,
|
||||
include_last_reported=False,
|
||||
)
|
||||
.join(
|
||||
most_recent_states_for_entities_by_date,
|
||||
and_(
|
||||
States.metadata_id
|
||||
== most_recent_states_for_entities_by_date.c.max_metadata_id,
|
||||
States.last_updated_ts
|
||||
== most_recent_states_for_entities_by_date.c.max_last_updated,
|
||||
),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated_ts < epoch_time) & States.metadata_id.in_(metadata_ids)
|
||||
)
|
||||
)
|
||||
if no_attributes:
|
||||
return stmt
|
||||
return stmt.outerjoin(
|
||||
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
|
||||
)
|
||||
|
||||
|
||||
def _get_oldest_possible_ts(
|
||||
hass: HomeAssistant, utc_point_in_time: datetime
|
||||
) -> float | None:
|
||||
@@ -620,6 +730,7 @@ def _get_start_time_state_stmt(
|
||||
metadata_ids: list[int],
|
||||
no_attributes: bool,
|
||||
include_last_changed: bool,
|
||||
slow_dependent_subquery: bool,
|
||||
) -> Select:
|
||||
"""Return the states at a specific point in time."""
|
||||
if single_metadata_id:
|
||||
@@ -634,7 +745,15 @@ def _get_start_time_state_stmt(
|
||||
)
|
||||
# We have more than one entity to look at so we need to do a query on states
|
||||
# since the last recorder run started.
|
||||
return _get_start_time_state_for_entities_stmt(
|
||||
if slow_dependent_subquery:
|
||||
return _get_start_time_state_for_entities_stmt_group_by(
|
||||
epoch_time,
|
||||
metadata_ids,
|
||||
no_attributes,
|
||||
include_last_changed,
|
||||
)
|
||||
|
||||
return _get_start_time_state_for_entities_stmt_dependent_sub_query(
|
||||
epoch_time,
|
||||
metadata_ids,
|
||||
no_attributes,
|
||||
|
||||
@@ -37,3 +37,13 @@ class DatabaseOptimizer:
|
||||
# https://wiki.postgresql.org/wiki/Loose_indexscan
|
||||
# https://github.com/home-assistant/core/issues/126084
|
||||
slow_range_in_select: bool
|
||||
|
||||
# MySQL 8.x+ can end up with a file-sort on a dependent subquery
|
||||
# which makes the query painfully slow.
|
||||
# https://github.com/home-assistant/core/issues/137178
|
||||
# The solution is to use multiple indexed group-by queries instead
|
||||
# of the subquery as long as the group by does not exceed
|
||||
# 999 elements since as soon as we hit 1000 elements MySQL
|
||||
# will no longer use the group_index_range optimization.
|
||||
# https://github.com/home-assistant/core/issues/132865#issuecomment-2543160459
|
||||
slow_dependent_subquery: bool
|
||||
|
||||
@@ -28,6 +28,7 @@ from homeassistant.helpers.recorder import DATA_RECORDER
|
||||
from homeassistant.helpers.singleton import singleton
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.collection import chunked_or_all
|
||||
from homeassistant.util.unit_conversion import (
|
||||
AreaConverter,
|
||||
BaseUnitConverter,
|
||||
@@ -59,6 +60,7 @@ from .const import (
|
||||
INTEGRATION_PLATFORM_LIST_STATISTIC_IDS,
|
||||
INTEGRATION_PLATFORM_UPDATE_STATISTICS_ISSUES,
|
||||
INTEGRATION_PLATFORM_VALIDATE_STATISTICS,
|
||||
MAX_IDS_FOR_INDEXED_GROUP_BY,
|
||||
SupportedDialect,
|
||||
)
|
||||
from .db_schema import (
|
||||
@@ -1669,6 +1671,7 @@ def _augment_result_with_change(
|
||||
drop_sum = "sum" not in _types
|
||||
prev_sums = {}
|
||||
if tmp := _statistics_at_time(
|
||||
get_instance(hass),
|
||||
session,
|
||||
{metadata[statistic_id][0] for statistic_id in result},
|
||||
table,
|
||||
@@ -2027,7 +2030,39 @@ def get_latest_short_term_statistics_with_session(
|
||||
)
|
||||
|
||||
|
||||
def _generate_statistics_at_time_stmt(
|
||||
def _generate_statistics_at_time_stmt_group_by(
|
||||
table: type[StatisticsBase],
|
||||
metadata_ids: set[int],
|
||||
start_time_ts: float,
|
||||
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
|
||||
) -> StatementLambdaElement:
|
||||
"""Create the statement for finding the statistics for a given time."""
|
||||
# Simple group-by for MySQL, must use less
|
||||
# than 1000 metadata_ids in the IN clause for MySQL
|
||||
# or it will optimize poorly. Callers are responsible
|
||||
# for ensuring that the number of metadata_ids is less
|
||||
# than 1000.
|
||||
return _generate_select_columns_for_types_stmt(table, types) + (
|
||||
lambda q: q.join(
|
||||
most_recent_statistic_ids := (
|
||||
select(
|
||||
func.max(table.start_ts).label("max_start_ts"),
|
||||
table.metadata_id.label("max_metadata_id"),
|
||||
)
|
||||
.filter(table.start_ts < start_time_ts)
|
||||
.filter(table.metadata_id.in_(metadata_ids))
|
||||
.group_by(table.metadata_id)
|
||||
.subquery()
|
||||
),
|
||||
and_(
|
||||
table.start_ts == most_recent_statistic_ids.c.max_start_ts,
|
||||
table.metadata_id == most_recent_statistic_ids.c.max_metadata_id,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _generate_statistics_at_time_stmt_dependent_sub_query(
|
||||
table: type[StatisticsBase],
|
||||
metadata_ids: set[int],
|
||||
start_time_ts: float,
|
||||
@@ -2041,8 +2076,7 @@ def _generate_statistics_at_time_stmt(
|
||||
# databases. Since all databases support this query as a join
|
||||
# condition we can use it as a subquery to get the last start_time_ts
|
||||
# before a specific point in time for all entities.
|
||||
stmt = _generate_select_columns_for_types_stmt(table, types)
|
||||
stmt += (
|
||||
return _generate_select_columns_for_types_stmt(table, types) + (
|
||||
lambda q: q.select_from(StatisticsMeta)
|
||||
.join(
|
||||
table,
|
||||
@@ -2064,10 +2098,10 @@ def _generate_statistics_at_time_stmt(
|
||||
)
|
||||
.where(table.metadata_id.in_(metadata_ids))
|
||||
)
|
||||
return stmt
|
||||
|
||||
|
||||
def _statistics_at_time(
|
||||
instance: Recorder,
|
||||
session: Session,
|
||||
metadata_ids: set[int],
|
||||
table: type[StatisticsBase],
|
||||
@@ -2076,8 +2110,41 @@ def _statistics_at_time(
|
||||
) -> Sequence[Row] | None:
|
||||
"""Return last known statistics, earlier than start_time, for the metadata_ids."""
|
||||
start_time_ts = start_time.timestamp()
|
||||
stmt = _generate_statistics_at_time_stmt(table, metadata_ids, start_time_ts, types)
|
||||
return cast(Sequence[Row], execute_stmt_lambda_element(session, stmt))
|
||||
if TYPE_CHECKING:
|
||||
assert instance.database_engine is not None
|
||||
if not instance.database_engine.optimizer.slow_dependent_subquery:
|
||||
stmt = _generate_statistics_at_time_stmt_dependent_sub_query(
|
||||
table=table,
|
||||
metadata_ids=metadata_ids,
|
||||
start_time_ts=start_time_ts,
|
||||
types=types,
|
||||
)
|
||||
return cast(list[Row], execute_stmt_lambda_element(session, stmt))
|
||||
rows: list[Row] = []
|
||||
# https://github.com/home-assistant/core/issues/132865
|
||||
# If we include the start time state we need to limit the
|
||||
# number of metadata_ids we query for at a time to avoid
|
||||
# hitting limits in the MySQL optimizer that prevent
|
||||
# the start time state query from using an index-only optimization
|
||||
# to find the start time state.
|
||||
for metadata_ids_chunk in chunked_or_all(
|
||||
metadata_ids, MAX_IDS_FOR_INDEXED_GROUP_BY
|
||||
):
|
||||
stmt = _generate_statistics_at_time_stmt_group_by(
|
||||
table=table,
|
||||
metadata_ids=metadata_ids_chunk,
|
||||
start_time_ts=start_time_ts,
|
||||
types=types,
|
||||
)
|
||||
row_chunk = cast(list[Row], execute_stmt_lambda_element(session, stmt))
|
||||
if rows:
|
||||
rows += row_chunk
|
||||
else:
|
||||
# If we have no rows yet, we can just assign the chunk
|
||||
# as this is the common case since its rare that
|
||||
# we exceed the MAX_IDS_FOR_INDEXED_GROUP_BY limit
|
||||
rows = row_chunk
|
||||
return rows
|
||||
|
||||
|
||||
def _build_sum_converted_stats(
|
||||
|
||||
@@ -464,6 +464,7 @@ def setup_connection_for_dialect(
|
||||
"""Execute statements needed for dialect connection."""
|
||||
version: AwesomeVersion | None = None
|
||||
slow_range_in_select = False
|
||||
slow_dependent_subquery = False
|
||||
if dialect_name == SupportedDialect.SQLITE:
|
||||
if first_connection:
|
||||
old_isolation = dbapi_connection.isolation_level # type: ignore[attr-defined]
|
||||
@@ -505,9 +506,8 @@ def setup_connection_for_dialect(
|
||||
result = query_on_connection(dbapi_connection, "SELECT VERSION()")
|
||||
version_string = result[0][0]
|
||||
version = _extract_version_from_server_response(version_string)
|
||||
is_maria_db = "mariadb" in version_string.lower()
|
||||
|
||||
if is_maria_db:
|
||||
if "mariadb" in version_string.lower():
|
||||
if not version or version < MIN_VERSION_MARIA_DB:
|
||||
_raise_if_version_unsupported(
|
||||
version or version_string, "MariaDB", MIN_VERSION_MARIA_DB
|
||||
@@ -523,19 +523,21 @@ def setup_connection_for_dialect(
|
||||
instance.hass,
|
||||
version,
|
||||
)
|
||||
|
||||
slow_range_in_select = bool(
|
||||
not version
|
||||
or version < MARIADB_WITH_FIXED_IN_QUERIES_105
|
||||
or MARIA_DB_106 <= version < MARIADB_WITH_FIXED_IN_QUERIES_106
|
||||
or MARIA_DB_107 <= version < MARIADB_WITH_FIXED_IN_QUERIES_107
|
||||
or MARIA_DB_108 <= version < MARIADB_WITH_FIXED_IN_QUERIES_108
|
||||
)
|
||||
elif not version or version < MIN_VERSION_MYSQL:
|
||||
_raise_if_version_unsupported(
|
||||
version or version_string, "MySQL", MIN_VERSION_MYSQL
|
||||
)
|
||||
|
||||
slow_range_in_select = bool(
|
||||
not version
|
||||
or version < MARIADB_WITH_FIXED_IN_QUERIES_105
|
||||
or MARIA_DB_106 <= version < MARIADB_WITH_FIXED_IN_QUERIES_106
|
||||
or MARIA_DB_107 <= version < MARIADB_WITH_FIXED_IN_QUERIES_107
|
||||
or MARIA_DB_108 <= version < MARIADB_WITH_FIXED_IN_QUERIES_108
|
||||
)
|
||||
else:
|
||||
# MySQL
|
||||
# https://github.com/home-assistant/core/issues/137178
|
||||
slow_dependent_subquery = True
|
||||
|
||||
# Ensure all times are using UTC to avoid issues with daylight savings
|
||||
execute_on_connection(dbapi_connection, "SET time_zone = '+00:00'")
|
||||
@@ -565,7 +567,10 @@ def setup_connection_for_dialect(
|
||||
return DatabaseEngine(
|
||||
dialect=SupportedDialect(dialect_name),
|
||||
version=version,
|
||||
optimizer=DatabaseOptimizer(slow_range_in_select=slow_range_in_select),
|
||||
optimizer=DatabaseOptimizer(
|
||||
slow_range_in_select=slow_range_in_select,
|
||||
slow_dependent_subquery=slow_dependent_subquery,
|
||||
),
|
||||
max_bind_vars=DEFAULT_MAX_BIND_VARS,
|
||||
)
|
||||
|
||||
|
||||
@@ -65,6 +65,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
|
||||
translation_key="no_user_agreement",
|
||||
) from err
|
||||
except RoborockException as err:
|
||||
_LOGGER.debug("Failed to get Roborock home data: %s", err)
|
||||
raise ConfigEntryNotReady(
|
||||
"Failed to get Roborock home data",
|
||||
translation_domain=DOMAIN,
|
||||
@@ -82,13 +83,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
|
||||
# Get a Coordinator if the device is available or if we have connected to the device before
|
||||
coordinators = await asyncio.gather(
|
||||
*build_setup_functions(
|
||||
hass,
|
||||
entry,
|
||||
device_map,
|
||||
user_data,
|
||||
product_info,
|
||||
home_data.rooms,
|
||||
api_client,
|
||||
hass, entry, device_map, user_data, product_info, home_data.rooms
|
||||
),
|
||||
return_exceptions=True,
|
||||
)
|
||||
@@ -140,7 +135,6 @@ def build_setup_functions(
|
||||
user_data: UserData,
|
||||
product_info: dict[str, HomeDataProduct],
|
||||
home_data_rooms: list[HomeDataRoom],
|
||||
api_client: RoborockApiClient,
|
||||
) -> list[
|
||||
Coroutine[
|
||||
Any,
|
||||
@@ -157,7 +151,6 @@ def build_setup_functions(
|
||||
device,
|
||||
product_info[device.product_id],
|
||||
home_data_rooms,
|
||||
api_client,
|
||||
)
|
||||
for device in device_map.values()
|
||||
]
|
||||
@@ -170,12 +163,11 @@ async def setup_device(
|
||||
device: HomeDataDevice,
|
||||
product_info: HomeDataProduct,
|
||||
home_data_rooms: list[HomeDataRoom],
|
||||
api_client: RoborockApiClient,
|
||||
) -> RoborockDataUpdateCoordinator | RoborockDataUpdateCoordinatorA01 | None:
|
||||
"""Set up a coordinator for a given device."""
|
||||
if device.pv == "1.0":
|
||||
return await setup_device_v1(
|
||||
hass, entry, user_data, device, product_info, home_data_rooms, api_client
|
||||
hass, entry, user_data, device, product_info, home_data_rooms
|
||||
)
|
||||
if device.pv == "A01":
|
||||
return await setup_device_a01(hass, entry, user_data, device, product_info)
|
||||
@@ -195,7 +187,6 @@ async def setup_device_v1(
|
||||
device: HomeDataDevice,
|
||||
product_info: HomeDataProduct,
|
||||
home_data_rooms: list[HomeDataRoom],
|
||||
api_client: RoborockApiClient,
|
||||
) -> RoborockDataUpdateCoordinator | None:
|
||||
"""Set up a device Coordinator."""
|
||||
mqtt_client = await hass.async_add_executor_job(
|
||||
@@ -217,15 +208,7 @@ async def setup_device_v1(
|
||||
await mqtt_client.async_release()
|
||||
raise
|
||||
coordinator = RoborockDataUpdateCoordinator(
|
||||
hass,
|
||||
entry,
|
||||
device,
|
||||
networking,
|
||||
product_info,
|
||||
mqtt_client,
|
||||
home_data_rooms,
|
||||
api_client,
|
||||
user_data,
|
||||
hass, entry, device, networking, product_info, mqtt_client, home_data_rooms
|
||||
)
|
||||
try:
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
@@ -36,7 +36,6 @@ PLATFORMS = [
|
||||
Platform.BUTTON,
|
||||
Platform.IMAGE,
|
||||
Platform.NUMBER,
|
||||
Platform.SCENE,
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
|
||||
@@ -10,26 +10,17 @@ import logging
|
||||
from propcache.api import cached_property
|
||||
from roborock import HomeDataRoom
|
||||
from roborock.code_mappings import RoborockCategory
|
||||
from roborock.containers import (
|
||||
DeviceData,
|
||||
HomeDataDevice,
|
||||
HomeDataProduct,
|
||||
HomeDataScene,
|
||||
NetworkInfo,
|
||||
UserData,
|
||||
)
|
||||
from roborock.containers import DeviceData, HomeDataDevice, HomeDataProduct, NetworkInfo
|
||||
from roborock.exceptions import RoborockException
|
||||
from roborock.roborock_message import RoborockDyadDataProtocol, RoborockZeoProtocol
|
||||
from roborock.roborock_typing import DeviceProp
|
||||
from roborock.version_1_apis.roborock_local_client_v1 import RoborockLocalClientV1
|
||||
from roborock.version_1_apis.roborock_mqtt_client_v1 import RoborockMqttClientV1
|
||||
from roborock.version_a01_apis import RoborockClientA01
|
||||
from roborock.web_api import RoborockApiClient
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_CONNECTIONS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.typing import StateType
|
||||
@@ -76,8 +67,6 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
product_info: HomeDataProduct,
|
||||
cloud_api: RoborockMqttClientV1,
|
||||
home_data_rooms: list[HomeDataRoom],
|
||||
api_client: RoborockApiClient,
|
||||
user_data: UserData,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
@@ -100,7 +89,7 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
self.cloud_api = cloud_api
|
||||
self.device_info = DeviceInfo(
|
||||
name=self.roborock_device_info.device.name,
|
||||
identifiers={(DOMAIN, self.duid)},
|
||||
identifiers={(DOMAIN, self.roborock_device_info.device.duid)},
|
||||
manufacturer="Roborock",
|
||||
model=self.roborock_device_info.product.model,
|
||||
model_id=self.roborock_device_info.product.model,
|
||||
@@ -114,10 +103,8 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
self.maps: dict[int, RoborockMapInfo] = {}
|
||||
self._home_data_rooms = {str(room.id): room.name for room in home_data_rooms}
|
||||
self.map_storage = RoborockMapStorage(
|
||||
hass, self.config_entry.entry_id, self.duid_slug
|
||||
hass, self.config_entry.entry_id, slugify(self.duid)
|
||||
)
|
||||
self._user_data = user_data
|
||||
self._api_client = api_client
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Set up the coordinator."""
|
||||
@@ -147,7 +134,7 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
except RoborockException:
|
||||
_LOGGER.warning(
|
||||
"Using the cloud API for device %s. This is not recommended as it can lead to rate limiting. We recommend making your vacuum accessible by your Home Assistant instance",
|
||||
self.duid,
|
||||
self.roborock_device_info.device.duid,
|
||||
)
|
||||
await self.api.async_disconnect()
|
||||
# We use the cloud api if the local api fails to connect.
|
||||
@@ -179,6 +166,7 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
# Get the rooms for that map id.
|
||||
await self.set_current_map_rooms()
|
||||
except RoborockException as ex:
|
||||
_LOGGER.debug("Failed to update data: %s", ex)
|
||||
raise UpdateFailed(ex) from ex
|
||||
return self.roborock_device_info.props
|
||||
|
||||
@@ -206,34 +194,6 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
for room in room_mapping or ()
|
||||
}
|
||||
|
||||
async def get_scenes(self) -> list[HomeDataScene]:
|
||||
"""Get scenes."""
|
||||
try:
|
||||
return await self._api_client.get_scenes(self._user_data, self.duid)
|
||||
except RoborockException as err:
|
||||
_LOGGER.error("Failed to get scenes %s", err)
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_failed",
|
||||
translation_placeholders={
|
||||
"command": "get_scenes",
|
||||
},
|
||||
) from err
|
||||
|
||||
async def execute_scene(self, scene_id: int) -> None:
|
||||
"""Execute scene."""
|
||||
try:
|
||||
await self._api_client.execute_scene(self._user_data, scene_id)
|
||||
except RoborockException as err:
|
||||
_LOGGER.error("Failed to execute scene %s %s", scene_id, err)
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_failed",
|
||||
translation_placeholders={
|
||||
"command": "execute_scene",
|
||||
},
|
||||
) from err
|
||||
|
||||
@cached_property
|
||||
def duid(self) -> str:
|
||||
"""Get the unique id of the device as specified by Roborock."""
|
||||
|
||||
@@ -4,6 +4,7 @@ import asyncio
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime
|
||||
import io
|
||||
import logging
|
||||
|
||||
from roborock import RoborockCommand
|
||||
from vacuum_map_parser_base.config.color import ColorsPalette
|
||||
@@ -30,6 +31,8 @@ from .const import (
|
||||
from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator
|
||||
from .entity import RoborockCoordinatedEntityV1
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -48,7 +51,11 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
def parse_image(map_bytes: bytes) -> bytes | None:
|
||||
parsed_map = parser.parse(map_bytes)
|
||||
try:
|
||||
parsed_map = parser.parse(map_bytes)
|
||||
except (IndexError, ValueError) as err:
|
||||
_LOGGER.debug("Exception when parsing map contents: %s", err)
|
||||
return None
|
||||
if parsed_map.image is None:
|
||||
return None
|
||||
img_byte_arr = io.BytesIO()
|
||||
@@ -150,6 +157,7 @@ class RoborockMap(RoborockCoordinatedEntityV1, ImageEntity):
|
||||
not isinstance(response[0], bytes)
|
||||
or (content := self.parser(response[0])) is None
|
||||
):
|
||||
_LOGGER.debug("Failed to parse map contents: %s", response[0])
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="map_failure",
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
"""Support for Roborock scene."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.scene import Scene as SceneEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import RoborockConfigEntry
|
||||
from .coordinator import RoborockDataUpdateCoordinator
|
||||
from .entity import RoborockEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: RoborockConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up scene platform."""
|
||||
scene_lists = await asyncio.gather(
|
||||
*[coordinator.get_scenes() for coordinator in config_entry.runtime_data.v1],
|
||||
)
|
||||
async_add_entities(
|
||||
RoborockSceneEntity(
|
||||
coordinator,
|
||||
EntityDescription(
|
||||
key=str(scene.id),
|
||||
name=scene.name,
|
||||
),
|
||||
)
|
||||
for coordinator, scenes in zip(
|
||||
config_entry.runtime_data.v1, scene_lists, strict=True
|
||||
)
|
||||
for scene in scenes
|
||||
)
|
||||
|
||||
|
||||
class RoborockSceneEntity(RoborockEntity, SceneEntity):
|
||||
"""A class to define Roborock scene entities."""
|
||||
|
||||
entity_description: EntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: RoborockDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Create a scene entity."""
|
||||
super().__init__(
|
||||
f"{entity_description.key}_{coordinator.duid_slug}",
|
||||
coordinator.device_info,
|
||||
coordinator.api,
|
||||
)
|
||||
self._scene_id = int(entity_description.key)
|
||||
self._coordinator = coordinator
|
||||
self.entity_description = entity_description
|
||||
|
||||
async def async_activate(self, **kwargs: Any) -> None:
|
||||
"""Activate the scene."""
|
||||
await self._coordinator.execute_scene(self._scene_id)
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/sense",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["sense_energy"],
|
||||
"requirements": ["sense-energy==0.13.5"]
|
||||
"requirements": ["sense-energy==0.13.6"]
|
||||
}
|
||||
|
||||
@@ -130,9 +130,10 @@ async def async_setup_entry(
|
||||
"""Handle additions of devices and sensors."""
|
||||
entities: list[SensiboMotionSensor | SensiboDeviceSensor] = []
|
||||
nonlocal added_devices
|
||||
new_devices, remove_devices, added_devices = coordinator.get_devices(
|
||||
new_devices, remove_devices, new_added_devices = coordinator.get_devices(
|
||||
added_devices
|
||||
)
|
||||
added_devices = new_added_devices
|
||||
|
||||
if LOGGER.isEnabledFor(logging.DEBUG):
|
||||
LOGGER.debug(
|
||||
@@ -168,8 +169,7 @@ async def async_setup_entry(
|
||||
device_data.model, DEVICE_SENSOR_TYPES
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
async_add_entities(entities)
|
||||
|
||||
entry.async_on_unload(coordinator.async_add_listener(_add_remove_devices))
|
||||
_add_remove_devices()
|
||||
|
||||
@@ -46,7 +46,8 @@ async def async_setup_entry(
|
||||
def _add_remove_devices() -> None:
|
||||
"""Handle additions of devices and sensors."""
|
||||
nonlocal added_devices
|
||||
new_devices, _, added_devices = coordinator.get_devices(added_devices)
|
||||
new_devices, _, new_added_devices = coordinator.get_devices(added_devices)
|
||||
added_devices = new_added_devices
|
||||
|
||||
if new_devices:
|
||||
async_add_entities(
|
||||
|
||||
@@ -149,7 +149,8 @@ async def async_setup_entry(
|
||||
def _add_remove_devices() -> None:
|
||||
"""Handle additions of devices and sensors."""
|
||||
nonlocal added_devices
|
||||
new_devices, _, added_devices = coordinator.get_devices(added_devices)
|
||||
new_devices, _, new_added_devices = coordinator.get_devices(added_devices)
|
||||
added_devices = new_added_devices
|
||||
|
||||
if new_devices:
|
||||
async_add_entities(
|
||||
|
||||
@@ -56,18 +56,31 @@ class SensiboDataUpdateCoordinator(DataUpdateCoordinator[SensiboData]):
|
||||
) -> tuple[set[str], set[str], set[str]]:
|
||||
"""Addition and removal of devices."""
|
||||
data = self.data
|
||||
motion_sensors = {
|
||||
current_motion_sensors = {
|
||||
sensor_id
|
||||
for device_data in data.parsed.values()
|
||||
if device_data.motion_sensors
|
||||
for sensor_id in device_data.motion_sensors
|
||||
}
|
||||
devices: set[str] = set(data.parsed)
|
||||
new_devices: set[str] = motion_sensors | devices - added_devices
|
||||
remove_devices = added_devices - devices - motion_sensors
|
||||
added_devices = (added_devices - remove_devices) | new_devices
|
||||
current_devices: set[str] = set(data.parsed)
|
||||
LOGGER.debug(
|
||||
"Current devices: %s, moption sensors: %s",
|
||||
current_devices,
|
||||
current_motion_sensors,
|
||||
)
|
||||
new_devices: set[str] = (
|
||||
current_motion_sensors | current_devices
|
||||
) - added_devices
|
||||
remove_devices = added_devices - current_devices - current_motion_sensors
|
||||
new_added_devices = (added_devices - remove_devices) | new_devices
|
||||
|
||||
return (new_devices, remove_devices, added_devices)
|
||||
LOGGER.debug(
|
||||
"New devices: %s, Removed devices: %s, Added devices: %s",
|
||||
new_devices,
|
||||
remove_devices,
|
||||
new_added_devices,
|
||||
)
|
||||
return (new_devices, remove_devices, new_added_devices)
|
||||
|
||||
async def _async_update_data(self) -> SensiboData:
|
||||
"""Fetch data from Sensibo."""
|
||||
|
||||
@@ -76,7 +76,8 @@ async def async_setup_entry(
|
||||
def _add_remove_devices() -> None:
|
||||
"""Handle additions of devices and sensors."""
|
||||
nonlocal added_devices
|
||||
new_devices, _, added_devices = coordinator.get_devices(added_devices)
|
||||
new_devices, _, new_added_devices = coordinator.get_devices(added_devices)
|
||||
added_devices = new_added_devices
|
||||
|
||||
if new_devices:
|
||||
async_add_entities(
|
||||
|
||||
@@ -115,7 +115,8 @@ async def async_setup_entry(
|
||||
def _add_remove_devices() -> None:
|
||||
"""Handle additions of devices and sensors."""
|
||||
nonlocal added_devices
|
||||
new_devices, _, added_devices = coordinator.get_devices(added_devices)
|
||||
new_devices, _, new_added_devices = coordinator.get_devices(added_devices)
|
||||
added_devices = new_added_devices
|
||||
|
||||
if new_devices:
|
||||
async_add_entities(
|
||||
|
||||
@@ -253,9 +253,8 @@ async def async_setup_entry(
|
||||
|
||||
entities: list[SensiboMotionSensor | SensiboDeviceSensor] = []
|
||||
nonlocal added_devices
|
||||
new_devices, remove_devices, added_devices = coordinator.get_devices(
|
||||
added_devices
|
||||
)
|
||||
new_devices, _, new_added_devices = coordinator.get_devices(added_devices)
|
||||
added_devices = new_added_devices
|
||||
|
||||
if new_devices:
|
||||
entities.extend(
|
||||
|
||||
@@ -89,7 +89,8 @@ async def async_setup_entry(
|
||||
def _add_remove_devices() -> None:
|
||||
"""Handle additions of devices and sensors."""
|
||||
nonlocal added_devices
|
||||
new_devices, _, added_devices = coordinator.get_devices(added_devices)
|
||||
new_devices, _, new_added_devices = coordinator.get_devices(added_devices)
|
||||
added_devices = new_added_devices
|
||||
|
||||
if new_devices:
|
||||
async_add_entities(
|
||||
|
||||
@@ -56,7 +56,8 @@ async def async_setup_entry(
|
||||
def _add_remove_devices() -> None:
|
||||
"""Handle additions of devices and sensors."""
|
||||
nonlocal added_devices
|
||||
new_devices, _, added_devices = coordinator.get_devices(added_devices)
|
||||
new_devices, _, new_added_devices = coordinator.get_devices(added_devices)
|
||||
added_devices = new_added_devices
|
||||
|
||||
if new_devices:
|
||||
async_add_entities(
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioshelly"],
|
||||
"requirements": ["aioshelly==13.0.0"],
|
||||
"requirements": ["aioshelly==13.1.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_http._tcp.local.",
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, cast
|
||||
@@ -11,6 +12,7 @@ from pysmartthings import (
|
||||
Attribute,
|
||||
Capability,
|
||||
Device,
|
||||
DeviceEvent,
|
||||
Scene,
|
||||
SmartThings,
|
||||
SmartThingsAuthenticationFailedError,
|
||||
@@ -21,13 +23,21 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
OAuth2Session,
|
||||
async_get_config_entry_implementation,
|
||||
)
|
||||
|
||||
from .const import CONF_INSTALLED_APP_ID, CONF_LOCATION_ID, MAIN, OLD_DATA
|
||||
from .const import (
|
||||
CONF_INSTALLED_APP_ID,
|
||||
CONF_LOCATION_ID,
|
||||
DOMAIN,
|
||||
EVENT_BUTTON,
|
||||
MAIN,
|
||||
OLD_DATA,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -113,6 +123,28 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry)
|
||||
scenes=scenes,
|
||||
)
|
||||
|
||||
def handle_button_press(event: DeviceEvent) -> None:
|
||||
"""Handle a button press."""
|
||||
if (
|
||||
event.capability is Capability.BUTTON
|
||||
and event.attribute is Attribute.BUTTON
|
||||
):
|
||||
hass.bus.async_fire(
|
||||
EVENT_BUTTON,
|
||||
{
|
||||
"component_id": event.component_id,
|
||||
"device_id": event.device_id,
|
||||
"location_id": event.location_id,
|
||||
"value": event.value,
|
||||
"name": entry.runtime_data.devices[event.device_id].device.label,
|
||||
"data": event.data,
|
||||
},
|
||||
)
|
||||
|
||||
entry.async_on_unload(
|
||||
client.add_unspecified_device_event_listener(handle_button_press)
|
||||
)
|
||||
|
||||
entry.async_create_background_task(
|
||||
hass,
|
||||
client.subscribe(
|
||||
@@ -123,6 +155,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
device_entries = dr.async_entries_for_config_entry(device_registry, entry.entry_id)
|
||||
for device_entry in device_entries:
|
||||
device_id = next(
|
||||
identifier[1]
|
||||
for identifier in device_entry.identifiers
|
||||
if identifier[0] == DOMAIN
|
||||
)
|
||||
if device_id in entry.runtime_data.devices:
|
||||
continue
|
||||
device_registry.async_update_device(
|
||||
device_entry.id, remove_config_entry_id=entry.entry_id
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -145,25 +191,62 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
KEEP_CAPABILITY_QUIRK: dict[
|
||||
Capability | str, Callable[[dict[Attribute | str, Status]], bool]
|
||||
] = {
|
||||
Capability.WASHER_OPERATING_STATE: (
|
||||
lambda status: status[Attribute.SUPPORTED_MACHINE_STATES].value is not None
|
||||
),
|
||||
Capability.DEMAND_RESPONSE_LOAD_CONTROL: lambda _: True,
|
||||
}
|
||||
|
||||
POWER_CONSUMPTION_FIELDS = {
|
||||
"energy",
|
||||
"power",
|
||||
"deltaEnergy",
|
||||
"powerEnergy",
|
||||
"energySaved",
|
||||
}
|
||||
|
||||
CAPABILITY_VALIDATION: dict[
|
||||
Capability | str, Callable[[dict[Attribute | str, Status]], bool]
|
||||
] = {
|
||||
Capability.POWER_CONSUMPTION_REPORT: (
|
||||
lambda status: (
|
||||
(power_consumption := status[Attribute.POWER_CONSUMPTION].value) is not None
|
||||
and all(
|
||||
field in cast(dict, power_consumption)
|
||||
for field in POWER_CONSUMPTION_FIELDS
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def process_status(
|
||||
status: dict[str, dict[Capability | str, dict[Attribute | str, Status]]],
|
||||
) -> dict[str, dict[Capability | str, dict[Attribute | str, Status]]]:
|
||||
"""Remove disabled capabilities from status."""
|
||||
if (main_component := status.get("main")) is None or (
|
||||
if (main_component := status.get(MAIN)) is None:
|
||||
return status
|
||||
if (
|
||||
disabled_capabilities_capability := main_component.get(
|
||||
Capability.CUSTOM_DISABLED_CAPABILITIES
|
||||
)
|
||||
) is None:
|
||||
return status
|
||||
disabled_capabilities = cast(
|
||||
list[Capability | str],
|
||||
disabled_capabilities_capability[Attribute.DISABLED_CAPABILITIES].value,
|
||||
)
|
||||
for capability in disabled_capabilities:
|
||||
# We still need to make sure the climate entity can work without this capability
|
||||
if (
|
||||
capability in main_component
|
||||
and capability != Capability.DEMAND_RESPONSE_LOAD_CONTROL
|
||||
):
|
||||
del main_component[capability]
|
||||
) is not None:
|
||||
disabled_capabilities = cast(
|
||||
list[Capability | str],
|
||||
disabled_capabilities_capability[Attribute.DISABLED_CAPABILITIES].value,
|
||||
)
|
||||
if disabled_capabilities is not None:
|
||||
for capability in disabled_capabilities:
|
||||
if capability in main_component and (
|
||||
capability not in KEEP_CAPABILITY_QUIRK
|
||||
or not KEEP_CAPABILITY_QUIRK[capability](main_component[capability])
|
||||
):
|
||||
del main_component[capability]
|
||||
for capability in list(main_component):
|
||||
if capability in CAPABILITY_VALIDATION:
|
||||
if not CAPABILITY_VALIDATION[capability](main_component[capability]):
|
||||
del main_component[capability]
|
||||
return status
|
||||
|
||||
@@ -161,9 +161,7 @@ class SmartThingsThermostat(SmartThingsEntity, ClimateEntity):
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
if self.get_attribute_value(
|
||||
Capability.THERMOSTAT_FAN_MODE, Attribute.THERMOSTAT_FAN_MODE
|
||||
):
|
||||
if self.supports_capability(Capability.THERMOSTAT_FAN_MODE):
|
||||
flags |= ClimateEntityFeature.FAN_MODE
|
||||
return flags
|
||||
|
||||
@@ -345,7 +343,8 @@ class SmartThingsAirConditioner(SmartThingsEntity, ClimateEntity):
|
||||
)
|
||||
self._attr_hvac_modes = self._determine_hvac_modes()
|
||||
self._attr_preset_modes = self._determine_preset_modes()
|
||||
self._attr_swing_modes = self._determine_swing_modes()
|
||||
if self.supports_capability(Capability.FAN_OSCILLATION_MODE):
|
||||
self._attr_swing_modes = self._determine_swing_modes()
|
||||
self._attr_supported_features = self._determine_supported_features()
|
||||
|
||||
def _determine_supported_features(self) -> ClimateEntityFeature:
|
||||
@@ -444,12 +443,15 @@ class SmartThingsAirConditioner(SmartThingsEntity, ClimateEntity):
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return device specific state attributes.
|
||||
|
||||
Include attributes from the Demand Response Load Control (drlc)
|
||||
and Power Consumption capabilities.
|
||||
"""
|
||||
if not self.supports_capability(Capability.DEMAND_RESPONSE_LOAD_CONTROL):
|
||||
return None
|
||||
|
||||
drlc_status = self.get_attribute_value(
|
||||
Capability.DEMAND_RESPONSE_LOAD_CONTROL,
|
||||
Attribute.DEMAND_RESPONSE_LOAD_CONTROL_STATUS,
|
||||
@@ -559,5 +561,6 @@ class SmartThingsAirConditioner(SmartThingsEntity, ClimateEntity):
|
||||
Capability.AIR_CONDITIONER_MODE, Attribute.SUPPORTED_AC_MODES
|
||||
)
|
||||
if (state := AC_MODE_TO_STATE.get(mode)) is not None
|
||||
if state not in modes
|
||||
)
|
||||
return modes
|
||||
|
||||
@@ -11,7 +11,7 @@ from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler
|
||||
|
||||
from .const import CONF_LOCATION_ID, DOMAIN, OLD_DATA, SCOPES
|
||||
from .const import CONF_LOCATION_ID, DOMAIN, OLD_DATA, REQUESTED_SCOPES, SCOPES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -30,10 +30,23 @@ class SmartThingsConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
@property
|
||||
def extra_authorize_data(self) -> dict[str, Any]:
|
||||
"""Extra data that needs to be appended to the authorize url."""
|
||||
return {"scope": " ".join(SCOPES)}
|
||||
return {"scope": " ".join(REQUESTED_SCOPES)}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Check we have the cloud integration set up."""
|
||||
if "cloud" not in self.hass.config.components:
|
||||
return self.async_abort(
|
||||
reason="cloud_not_enabled",
|
||||
description_placeholders={"default_config": "default_config"},
|
||||
)
|
||||
return await super().async_step_user(user_input)
|
||||
|
||||
async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Create an entry for SmartThings."""
|
||||
if not set(data[CONF_TOKEN]["scope"].split()) >= set(SCOPES):
|
||||
return self.async_abort(reason="missing_scopes")
|
||||
client = SmartThings(session=async_get_clientsession(self.hass))
|
||||
client.authenticate(data[CONF_TOKEN][CONF_ACCESS_TOKEN])
|
||||
locations = await client.get_locations()
|
||||
|
||||
@@ -14,9 +14,13 @@ SCOPES = [
|
||||
"x:scenes:*",
|
||||
"r:rules:*",
|
||||
"w:rules:*",
|
||||
"sse",
|
||||
]
|
||||
|
||||
REQUESTED_SCOPES = [
|
||||
*SCOPES,
|
||||
"r:installedapps",
|
||||
"w:installedapps",
|
||||
"sse",
|
||||
]
|
||||
|
||||
CONF_APP_ID = "app_id"
|
||||
@@ -28,3 +32,5 @@ CONF_REFRESH_TOKEN = "refresh_token"
|
||||
|
||||
MAIN = "main"
|
||||
OLD_DATA = "old_data"
|
||||
|
||||
EVENT_BUTTON = "smartthings.button"
|
||||
|
||||
@@ -17,34 +17,40 @@ from .const import DOMAIN
|
||||
EVENT_WAIT_TIME = 5
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
entry: SmartThingsConfigEntry,
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
client = entry.runtime_data.client
|
||||
return await client.get_raw_devices()
|
||||
|
||||
|
||||
async def async_get_device_diagnostics(
|
||||
hass: HomeAssistant, entry: SmartThingsConfigEntry, device: DeviceEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a device entry."""
|
||||
client = entry.runtime_data.client
|
||||
device_id = next(
|
||||
identifier for identifier in device.identifiers if identifier[0] == DOMAIN
|
||||
)[0]
|
||||
)[1]
|
||||
|
||||
device_status = await client.get_raw_device_status(device_id)
|
||||
device_info = await client.get_raw_device(device_id)
|
||||
|
||||
events: list[DeviceEvent] = []
|
||||
|
||||
def register_event(event: DeviceEvent) -> None:
|
||||
events.append(event)
|
||||
|
||||
client = entry.runtime_data.client
|
||||
|
||||
listener = client.add_device_event_listener(device_id, register_event)
|
||||
|
||||
await asyncio.sleep(EVENT_WAIT_TIME)
|
||||
|
||||
listener()
|
||||
|
||||
device_status = await client.get_device_status(device_id)
|
||||
|
||||
status: dict[str, Any] = {}
|
||||
for component, capabilities in device_status.items():
|
||||
status[component] = {}
|
||||
for capability, attributes in capabilities.items():
|
||||
status[component][capability] = {}
|
||||
for attribute, value in attributes.items():
|
||||
status[component][capability][attribute] = asdict(value)
|
||||
return {"events": [asdict(event) for event in events], "status": status}
|
||||
return {
|
||||
"events": [asdict(event) for event in events],
|
||||
"status": device_status,
|
||||
"info": device_info,
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
from pysmartthings import (
|
||||
Attribute,
|
||||
@@ -44,19 +44,24 @@ class SmartThingsEntity(Entity):
|
||||
identifiers={(DOMAIN, device.device.device_id)},
|
||||
name=device.device.label,
|
||||
)
|
||||
if (ocf := device.status[MAIN].get(Capability.OCF)) is not None:
|
||||
if (ocf := device.device.ocf) is not None:
|
||||
self._attr_device_info.update(
|
||||
{
|
||||
"manufacturer": cast(
|
||||
str | None, ocf[Attribute.MANUFACTURER_NAME].value
|
||||
),
|
||||
"model": cast(str | None, ocf[Attribute.MODEL_NUMBER].value),
|
||||
"hw_version": cast(
|
||||
str | None, ocf[Attribute.HARDWARE_VERSION].value
|
||||
),
|
||||
"sw_version": cast(
|
||||
str | None, ocf[Attribute.OCF_FIRMWARE_VERSION].value
|
||||
"manufacturer": ocf.manufacturer_name,
|
||||
"model": (
|
||||
(ocf.model_number.split("|")[0]) if ocf.model_number else None
|
||||
),
|
||||
"hw_version": ocf.hardware_version,
|
||||
"sw_version": ocf.firmware_version,
|
||||
}
|
||||
)
|
||||
if (viper := device.device.viper) is not None:
|
||||
self._attr_device_info.update(
|
||||
{
|
||||
"manufacturer": viper.manufacturer_name,
|
||||
"model": viper.model_name,
|
||||
"hw_version": viper.hardware_version,
|
||||
"sw_version": viper.software_version,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -116,7 +116,7 @@ class SmartThingsFan(SmartThingsEntity, FanEntity):
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if fan is on."""
|
||||
return self.get_attribute_value(Capability.SWITCH, Attribute.SWITCH)
|
||||
return self.get_attribute_value(Capability.SWITCH, Attribute.SWITCH) == "on"
|
||||
|
||||
@property
|
||||
def percentage(self) -> int | None:
|
||||
@@ -132,6 +132,8 @@ class SmartThingsFan(SmartThingsEntity, FanEntity):
|
||||
|
||||
Requires FanEntityFeature.PRESET_MODE.
|
||||
"""
|
||||
if not self.supports_capability(Capability.AIR_CONDITIONER_FAN_MODE):
|
||||
return None
|
||||
return self.get_attribute_value(
|
||||
Capability.AIR_CONDITIONER_FAN_MODE, Attribute.FAN_MODE
|
||||
)
|
||||
@@ -142,6 +144,8 @@ class SmartThingsFan(SmartThingsEntity, FanEntity):
|
||||
|
||||
Requires FanEntityFeature.PRESET_MODE.
|
||||
"""
|
||||
if not self.supports_capability(Capability.AIR_CONDITIONER_FAN_MODE):
|
||||
return None
|
||||
return self.get_attribute_value(
|
||||
Capability.AIR_CONDITIONER_FAN_MODE, Attribute.SUPPORTED_AC_FAN_MODES
|
||||
)
|
||||
|
||||
@@ -3,12 +3,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from pysmartthings import Attribute, Capability, Command, SmartThings
|
||||
from pysmartthings import Attribute, Capability, Command, DeviceEvent, SmartThings
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
ATTR_COLOR_MODE,
|
||||
ATTR_COLOR_TEMP_KELVIN,
|
||||
ATTR_HS_COLOR,
|
||||
ATTR_TRANSITION,
|
||||
@@ -19,6 +20,7 @@ from homeassistant.components.light import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
|
||||
from . import FullDevice, SmartThingsConfigEntry
|
||||
from .const import MAIN
|
||||
@@ -53,7 +55,7 @@ def convert_scale(
|
||||
return round(value * target_scale / value_scale, round_digits)
|
||||
|
||||
|
||||
class SmartThingsLight(SmartThingsEntity, LightEntity):
|
||||
class SmartThingsLight(SmartThingsEntity, LightEntity, RestoreEntity):
|
||||
"""Define a SmartThings Light."""
|
||||
|
||||
_attr_name = None
|
||||
@@ -84,18 +86,28 @@ class SmartThingsLight(SmartThingsEntity, LightEntity):
|
||||
color_modes = set()
|
||||
if self.supports_capability(Capability.COLOR_TEMPERATURE):
|
||||
color_modes.add(ColorMode.COLOR_TEMP)
|
||||
self._attr_color_mode = ColorMode.COLOR_TEMP
|
||||
if self.supports_capability(Capability.COLOR_CONTROL):
|
||||
color_modes.add(ColorMode.HS)
|
||||
self._attr_color_mode = ColorMode.HS
|
||||
if not color_modes and self.supports_capability(Capability.SWITCH_LEVEL):
|
||||
color_modes.add(ColorMode.BRIGHTNESS)
|
||||
if not color_modes:
|
||||
color_modes.add(ColorMode.ONOFF)
|
||||
if len(color_modes) == 1:
|
||||
self._attr_color_mode = list(color_modes)[0]
|
||||
self._attr_supported_color_modes = color_modes
|
||||
features = LightEntityFeature(0)
|
||||
if self.supports_capability(Capability.SWITCH_LEVEL):
|
||||
features |= LightEntityFeature.TRANSITION
|
||||
self._attr_supported_features = features
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if (last_state := await self.async_get_last_extra_data()) is not None:
|
||||
self._attr_color_mode = last_state.as_dict()[ATTR_COLOR_MODE]
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the light on."""
|
||||
tasks = []
|
||||
@@ -195,17 +207,14 @@ class SmartThingsLight(SmartThingsEntity, LightEntity):
|
||||
argument=[level, duration],
|
||||
)
|
||||
|
||||
@property
|
||||
def color_mode(self) -> ColorMode:
|
||||
"""Return the color mode of the light."""
|
||||
if len(self._attr_supported_color_modes) == 1:
|
||||
# The light supports only a single color mode
|
||||
return list(self._attr_supported_color_modes)[0]
|
||||
|
||||
# The light supports hs + color temp, determine which one it is
|
||||
if self._attr_hs_color and self._attr_hs_color[1]:
|
||||
return ColorMode.HS
|
||||
return ColorMode.COLOR_TEMP
|
||||
def _update_handler(self, event: DeviceEvent) -> None:
|
||||
"""Handle device updates."""
|
||||
if event.capability in (Capability.COLOR_CONTROL, Capability.COLOR_TEMPERATURE):
|
||||
self._attr_color_mode = {
|
||||
Capability.COLOR_CONTROL: ColorMode.HS,
|
||||
Capability.COLOR_TEMPERATURE: ColorMode.COLOR_TEMP,
|
||||
}[cast(Capability, event.capability)]
|
||||
super()._update_handler(event)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
|
||||
@@ -29,5 +29,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/smartthings",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pysmartthings"],
|
||||
"requirements": ["pysmartthings==2.0.0"]
|
||||
"requirements": ["pysmartthings==2.7.0"]
|
||||
}
|
||||
|
||||
@@ -130,7 +130,6 @@ class SmartThingsSensorEntityDescription(SensorEntityDescription):
|
||||
unique_id_separator: str = "."
|
||||
capability_ignore_list: list[set[Capability]] | None = None
|
||||
options_attribute: Attribute | None = None
|
||||
except_if_state_none: bool = False
|
||||
|
||||
|
||||
CAPABILITY_TO_SENSORS: dict[
|
||||
@@ -461,7 +460,7 @@ CAPABILITY_TO_SENSORS: dict[
|
||||
translation_key="media_input_source",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options_attribute=Attribute.SUPPORTED_INPUT_SOURCES,
|
||||
value_fn=lambda value: value.lower(),
|
||||
value_fn=lambda value: value.lower() if value else None,
|
||||
)
|
||||
]
|
||||
},
|
||||
@@ -580,7 +579,7 @@ CAPABILITY_TO_SENSORS: dict[
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
value_fn=lambda value: value["energy"] / 1000,
|
||||
except_if_state_none=True,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
SmartThingsSensorEntityDescription(
|
||||
key="power_meter",
|
||||
@@ -589,16 +588,16 @@ CAPABILITY_TO_SENSORS: dict[
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
value_fn=lambda value: value["power"],
|
||||
extra_state_attributes_fn=power_attributes,
|
||||
except_if_state_none=True,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
SmartThingsSensorEntityDescription(
|
||||
key="deltaEnergy_meter",
|
||||
translation_key="energy_difference",
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
value_fn=lambda value: value["deltaEnergy"] / 1000,
|
||||
except_if_state_none=True,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
SmartThingsSensorEntityDescription(
|
||||
key="powerEnergy_meter",
|
||||
@@ -607,7 +606,7 @@ CAPABILITY_TO_SENSORS: dict[
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
value_fn=lambda value: value["powerEnergy"] / 1000,
|
||||
except_if_state_none=True,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
SmartThingsSensorEntityDescription(
|
||||
key="energySaved_meter",
|
||||
@@ -616,7 +615,7 @@ CAPABILITY_TO_SENSORS: dict[
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
value_fn=lambda value: value["energySaved"] / 1000,
|
||||
except_if_state_none=True,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
]
|
||||
},
|
||||
@@ -946,6 +945,7 @@ UNITS = {
|
||||
"F": UnitOfTemperature.FAHRENHEIT,
|
||||
"lux": LIGHT_LUX,
|
||||
"mG": None,
|
||||
"μg/m^3": CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
}
|
||||
|
||||
|
||||
@@ -970,10 +970,6 @@ async def async_setup_entry(
|
||||
for capability_list in description.capability_ignore_list
|
||||
)
|
||||
)
|
||||
and (
|
||||
not description.except_if_state_none
|
||||
or device.status[MAIN][capability][attribute].value is not None
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -23,7 +23,9 @@
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reauth_account_mismatch": "Authenticated account does not match the account to be reauthenticated. Please log in with the correct account and pick the right location.",
|
||||
"reauth_location_mismatch": "Authenticated location does not match the location to be reauthenticated. Please log in with the correct account and pick the right location."
|
||||
"reauth_location_mismatch": "Authenticated location does not match the location to be reauthenticated. Please log in with the correct account and pick the right location.",
|
||||
"missing_scopes": "Authentication failed. Please make sure you have granted all required permissions.",
|
||||
"cloud_not_enabled": "Please make sure you run Home Assistant with `{default_config}` enabled in your configuration.yaml."
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["snoo"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["python-snoo==0.6.0"]
|
||||
"requirements": ["python-snoo==0.6.1"]
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user