mirror of
https://github.com/home-assistant/core.git
synced 2026-02-07 15:46:19 +01:00
Compare commits
305 Commits
joostlek-p
...
state_temp
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
be1ba31ff2 | ||
|
|
3c4ecffa1b | ||
|
|
244e0f5ea8 | ||
|
|
a656b6e26a | ||
|
|
691681a78a | ||
|
|
3bc00824e2 | ||
|
|
7d36a2e3a7 | ||
|
|
b1e3561ead | ||
|
|
bfc814c839 | ||
|
|
5008151688 | ||
|
|
d738c0d6b1 | ||
|
|
e42235285d | ||
|
|
04e69479f4 | ||
|
|
b973916032 | ||
|
|
6f4757ef42 | ||
|
|
a6962e9e1e | ||
|
|
142c10cccc | ||
|
|
c137c96cfd | ||
|
|
f0e0c954e7 | ||
|
|
681961d3a5 | ||
|
|
53d2f6b0c6 | ||
|
|
78c39f8a06 | ||
|
|
a748525e03 | ||
|
|
8ca1fe83b7 | ||
|
|
8968cf704b | ||
|
|
ebe04466f4 | ||
|
|
e31470ba5b | ||
|
|
4bc2951f44 | ||
|
|
8334a0398c | ||
|
|
80a1e0e4cd | ||
|
|
3778f537d5 | ||
|
|
adec157d43 | ||
|
|
8fc3fa51a8 | ||
|
|
4eb688b560 | ||
|
|
9472ff5d36 | ||
|
|
12e8b81ec7 | ||
|
|
ec5e543c09 | ||
|
|
116c745872 | ||
|
|
1fdf152292 | ||
|
|
b816f1a408 | ||
|
|
eb351e6505 | ||
|
|
2f27d55495 | ||
|
|
fa1bed1849 | ||
|
|
b8c19f23f3 | ||
|
|
b677ce6c90 | ||
|
|
d6da686ffe | ||
|
|
f50ef79c72 | ||
|
|
943fb9948b | ||
|
|
7447cf329b | ||
|
|
3d27c0ce52 | ||
|
|
b7496be61f | ||
|
|
57a98240bd | ||
|
|
ff76017ba6 | ||
|
|
f10fcde6d8 | ||
|
|
a7002e3a24 | ||
|
|
bbe03dcab7 | ||
|
|
f77e6cc8fc | ||
|
|
cb8e076703 | ||
|
|
73251fbb1c | ||
|
|
7ff90ca49d | ||
|
|
bab9ec9976 | ||
|
|
1051f85ac0 | ||
|
|
6c7da57af2 | ||
|
|
73e505d48d | ||
|
|
ec65066f5e | ||
|
|
9c4951261c | ||
|
|
00dfc04b86 | ||
|
|
bee07ad284 | ||
|
|
b2108fdd40 | ||
|
|
3730a1a379 | ||
|
|
088c02d38a | ||
|
|
afb247c907 | ||
|
|
0e6bbb30c1 | ||
|
|
fdba791f18 | ||
|
|
d4dec6c7a9 | ||
|
|
f838e85a79 | ||
|
|
04ae966544 | ||
|
|
77dcba0984 | ||
|
|
48f9a12cca | ||
|
|
bdd2ac9ae4 | ||
|
|
2e7113d881 | ||
|
|
6842bfae4c | ||
|
|
392cde20d9 | ||
|
|
a6146fb5a9 | ||
|
|
b2c393db72 | ||
|
|
6104731d53 | ||
|
|
3ed440a3af | ||
|
|
01e7efc7b4 | ||
|
|
60a930554a | ||
|
|
66308a848a | ||
|
|
c71dbd9d4d | ||
|
|
1195c2ec10 | ||
|
|
78a9cd9201 | ||
|
|
639a749a0f | ||
|
|
058f3b8b6e | ||
|
|
926e9261ab | ||
|
|
d6fb860889 | ||
|
|
5e03900e0a | ||
|
|
1e6e5ca1b6 | ||
|
|
60e3b38de1 | ||
|
|
852522219c | ||
|
|
23f1e8d1a3 | ||
|
|
c707bf6264 | ||
|
|
655f009f07 | ||
|
|
3548ab70fd | ||
|
|
e272ab1885 | ||
|
|
d5d1b620d0 | ||
|
|
8b2f4f0f86 | ||
|
|
725269ecda | ||
|
|
c42fc818bf | ||
|
|
5554e38171 | ||
|
|
b25acfe823 | ||
|
|
ff25948e37 | ||
|
|
f85fc7173f | ||
|
|
748cc6386d | ||
|
|
47b232db49 | ||
|
|
c61935fc41 | ||
|
|
414318f3fb | ||
|
|
08985d783f | ||
|
|
e4bcde7d20 | ||
|
|
59bf39f4ed | ||
|
|
510e3977df | ||
|
|
922720576a | ||
|
|
e10b581d4b | ||
|
|
e38eac9415 | ||
|
|
11c9aa9280 | ||
|
|
52c86f8a6a | ||
|
|
6364a9ad98 | ||
|
|
651162b8e7 | ||
|
|
7deca35172 | ||
|
|
073a467fb2 | ||
|
|
3f9590b03b | ||
|
|
b47f989c77 | ||
|
|
4ebffa8d23 | ||
|
|
c5873c6dd0 | ||
|
|
2cb80e083e | ||
|
|
871296dff6 | ||
|
|
c92873bbff | ||
|
|
5fea4915ef | ||
|
|
8fa016059d | ||
|
|
61a29db72c | ||
|
|
5a3aa7874d | ||
|
|
12e2493c42 | ||
|
|
659cd42739 | ||
|
|
7fcea17e83 | ||
|
|
30a85c40da | ||
|
|
57a8f1e0cc | ||
|
|
78aeae577d | ||
|
|
3f95cb37e6 | ||
|
|
12aef4aae5 | ||
|
|
2e12db001d | ||
|
|
573325be97 | ||
|
|
7021fe7495 | ||
|
|
b7999755bd | ||
|
|
99f7a031d6 | ||
|
|
8fc31283b7 | ||
|
|
5ff698c78d | ||
|
|
9469c6ad1c | ||
|
|
35f0505c7b | ||
|
|
a180cabea9 | ||
|
|
4f7348b8bc | ||
|
|
ddf56f053b | ||
|
|
9719d2ef2b | ||
|
|
2afe475234 | ||
|
|
23c304fc75 | ||
|
|
84645d0ca6 | ||
|
|
2bdfc8cf5e | ||
|
|
603e277a5b | ||
|
|
38a7b21052 | ||
|
|
db04c77e62 | ||
|
|
e8204e5f8e | ||
|
|
66cf9c4ed5 | ||
|
|
1f6d28dcbf | ||
|
|
328e838351 | ||
|
|
62a1c8af11 | ||
|
|
b50e599517 | ||
|
|
3c7c9176d2 | ||
|
|
c771f5fe1e | ||
|
|
6dc464ad73 | ||
|
|
ae48e3716e | ||
|
|
1543726095 | ||
|
|
adbace95c3 | ||
|
|
578b43cf61 | ||
|
|
a8b5d1511d | ||
|
|
5a0a1bbbf4 | ||
|
|
bf74ba990a | ||
|
|
70856bd92a | ||
|
|
be6b624081 | ||
|
|
217fbb2849 | ||
|
|
22a14da19c | ||
|
|
20f5d85800 | ||
|
|
88feb5139b | ||
|
|
90cbe272a0 | ||
|
|
511b739bf6 | ||
|
|
9961a499ee | ||
|
|
d8c7ed473b | ||
|
|
2c30a5a14c | ||
|
|
5e3fc858d8 | ||
|
|
f03af213d4 | ||
|
|
1e3ebd5650 | ||
|
|
53936ab062 | ||
|
|
b52a248def | ||
|
|
ea70229426 | ||
|
|
741a3d5009 | ||
|
|
ee8830cc77 | ||
|
|
7fbf25e862 | ||
|
|
e642cd45ae | ||
|
|
179e1c2b00 | ||
|
|
52a99aea0c | ||
|
|
c7b2f236be | ||
|
|
a6e3da43ca | ||
|
|
4d58024d5d | ||
|
|
c7603b39ec | ||
|
|
c17ee0d123 | ||
|
|
97c1e21a69 | ||
|
|
c9a6b1fd45 | ||
|
|
05ceee568e | ||
|
|
08a6b38699 | ||
|
|
4add346272 | ||
|
|
369c8d1e0d | ||
|
|
25ab47a587 | ||
|
|
617ea1925c | ||
|
|
8bacab4f9c | ||
|
|
6d28b99344 | ||
|
|
bbd1cbf5c9 | ||
|
|
cf2e69ed74 | ||
|
|
c32b44b774 | ||
|
|
2f69ed4a8a | ||
|
|
4b3449fe0c | ||
|
|
33e1c6de68 | ||
|
|
81e712ea49 | ||
|
|
d3c5684cd0 | ||
|
|
862b7460b5 | ||
|
|
a65eb57539 | ||
|
|
b537850f52 | ||
|
|
43450d4489 | ||
|
|
f8c052e0ce | ||
|
|
1f3bdfc7b7 | ||
|
|
0652bffd68 | ||
|
|
8322611099 | ||
|
|
134967b817 | ||
|
|
39abae36f0 | ||
|
|
227760f203 | ||
|
|
969809456e | ||
|
|
16c6bd08f8 | ||
|
|
18834849c2 | ||
|
|
e4d820799f | ||
|
|
013a35176a | ||
|
|
8230557aef | ||
|
|
5451063714 | ||
|
|
8cdc7523a4 | ||
|
|
77ccfbd3a9 | ||
|
|
4977ee4998 | ||
|
|
5c0f2d37f0 | ||
|
|
0b5d2ab8e4 | ||
|
|
47f3bf29dd | ||
|
|
62f7cbb51e | ||
|
|
b9e2c5d34c | ||
|
|
1829acd0e1 | ||
|
|
41b9a7a9a3 | ||
|
|
9782637ec8 | ||
|
|
6bd6fa65d2 | ||
|
|
85343a9f53 | ||
|
|
bc607dd013 | ||
|
|
c2c388e0cc | ||
|
|
3fc154e1d7 | ||
|
|
efb29d024e | ||
|
|
263823c92c | ||
|
|
e5e6ed601b | ||
|
|
28dfc997f3 | ||
|
|
f93ab8d519 | ||
|
|
cb359da79e | ||
|
|
6a7385590a | ||
|
|
c0ec987b07 | ||
|
|
26521f8cc0 | ||
|
|
4df1f702bf | ||
|
|
c8422c9fb8 | ||
|
|
f8207a2e0e | ||
|
|
9cc75f3458 | ||
|
|
a233b6b1e3 | ||
|
|
c7677b91da | ||
|
|
1f57bba9cd | ||
|
|
4cc10ca2e2 | ||
|
|
153e1e43e8 | ||
|
|
398dd3ae46 | ||
|
|
17fd850fa6 | ||
|
|
ae062b230c | ||
|
|
d523f85404 | ||
|
|
f28d6582c6 | ||
|
|
1e81e5990e | ||
|
|
5fe2e4b6ed | ||
|
|
914bb3aa76 | ||
|
|
cfa6746115 | ||
|
|
03f9caf3eb | ||
|
|
6b2aaf3fdb | ||
|
|
2c4ea0d584 | ||
|
|
e627811f7a | ||
|
|
150f41641b | ||
|
|
b9a7371996 | ||
|
|
7d0e99da43 | ||
|
|
71f281cc14 | ||
|
|
aec812a475 | ||
|
|
d4b548b169 | ||
|
|
a296324c30 | ||
|
|
cff3d3d6ac |
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.29.0
|
||||
uses: github/codeql-action/init@v3.29.2
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.29.0
|
||||
uses: github/codeql-action/analyze@v3.29.2
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -452,8 +452,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/eq3btsmart/ @eulemitkeule @dbuezas
|
||||
/homeassistant/components/escea/ @lazdavila
|
||||
/tests/components/escea/ @lazdavila
|
||||
/homeassistant/components/esphome/ @OttoWinter @jesserockz @kbx81 @bdraco
|
||||
/tests/components/esphome/ @OttoWinter @jesserockz @kbx81 @bdraco
|
||||
/homeassistant/components/esphome/ @jesserockz @kbx81 @bdraco
|
||||
/tests/components/esphome/ @jesserockz @kbx81 @bdraco
|
||||
/homeassistant/components/eufylife_ble/ @bdr99
|
||||
/tests/components/eufylife_ble/ @bdr99
|
||||
/homeassistant/components/event/ @home-assistant/core
|
||||
|
||||
@@ -1,15 +1,7 @@
|
||||
FROM mcr.microsoft.com/devcontainers/python:1-3.13
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/base:debian
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
# Uninstall pre-installed formatting and linting tools
|
||||
# They would conflict with our pinned versions
|
||||
RUN \
|
||||
pipx uninstall pydocstyle \
|
||||
&& pipx uninstall pycodestyle \
|
||||
&& pipx uninstall mypy \
|
||||
&& pipx uninstall pylint
|
||||
|
||||
RUN \
|
||||
curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
|
||||
&& apt-get update \
|
||||
@@ -32,21 +24,18 @@ RUN \
|
||||
libxml2 \
|
||||
git \
|
||||
cmake \
|
||||
autoconf \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Add go2rtc binary
|
||||
COPY --from=ghcr.io/alexxit/go2rtc:latest /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
# Setup hass-release
|
||||
RUN git clone --depth 1 https://github.com/home-assistant/hass-release \
|
||||
&& uv pip install --system -e hass-release/ \
|
||||
&& chown -R vscode /usr/src/hass-release/data
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv
|
||||
|
||||
RUN uv python install 3.13.2
|
||||
|
||||
USER vscode
|
||||
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"
|
||||
@@ -55,6 +44,10 @@ ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
WORKDIR /tmp
|
||||
|
||||
# Setup hass-release
|
||||
RUN git clone --depth 1 https://github.com/home-assistant/hass-release ~/hass-release \
|
||||
&& uv pip install -e ~/hass-release/
|
||||
|
||||
# Install Python dependencies from requirements
|
||||
COPY requirements.txt ./
|
||||
COPY homeassistant/package_constraints.txt homeassistant/package_constraints.txt
|
||||
@@ -65,4 +58,4 @@ RUN uv pip install -r requirements_test.txt
|
||||
WORKDIR /workspaces
|
||||
|
||||
# Set the default shell to bash instead of sh
|
||||
ENV SHELL /bin/bash
|
||||
ENV SHELL=/bin/bash
|
||||
|
||||
@@ -75,7 +75,6 @@ from .core_config import async_process_ha_core_config
|
||||
from .exceptions import HomeAssistantError
|
||||
from .helpers import (
|
||||
area_registry,
|
||||
backup,
|
||||
category_registry,
|
||||
config_validation as cv,
|
||||
device_registry,
|
||||
@@ -880,10 +879,6 @@ async def _async_set_up_integrations(
|
||||
if "recorder" in all_domains:
|
||||
recorder.async_initialize_recorder(hass)
|
||||
|
||||
# Initialize backup
|
||||
if "backup" in all_domains:
|
||||
backup.async_initialize_backup(hass)
|
||||
|
||||
stages: list[tuple[str, set[str], int | None]] = [
|
||||
*(
|
||||
(name, domain_group, timeout)
|
||||
|
||||
@@ -2,19 +2,45 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from aioamazondevices.api import AmazonEchoApi
|
||||
from aioamazondevices.exceptions import CannotAuthenticate, CannotConnect
|
||||
from aioamazondevices.exceptions import CannotAuthenticate, CannotConnect, WrongCountry
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_CODE, CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.selector import CountrySelector
|
||||
|
||||
from .const import CONF_LOGIN_DATA, DOMAIN
|
||||
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Required(CONF_CODE): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Validate the user input allows us to connect."""
|
||||
|
||||
api = AmazonEchoApi(
|
||||
data[CONF_COUNTRY],
|
||||
data[CONF_USERNAME],
|
||||
data[CONF_PASSWORD],
|
||||
)
|
||||
|
||||
try:
|
||||
data = await api.login_mode_interactive(data[CONF_CODE])
|
||||
finally:
|
||||
await api.close()
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Alexa Devices."""
|
||||
@@ -25,17 +51,14 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the initial step."""
|
||||
errors = {}
|
||||
if user_input:
|
||||
client = AmazonEchoApi(
|
||||
user_input[CONF_COUNTRY],
|
||||
user_input[CONF_USERNAME],
|
||||
user_input[CONF_PASSWORD],
|
||||
)
|
||||
try:
|
||||
data = await client.login_mode_interactive(user_input[CONF_CODE])
|
||||
data = await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except CannotAuthenticate:
|
||||
errors["base"] = "invalid_auth"
|
||||
except WrongCountry:
|
||||
errors["base"] = "wrong_country"
|
||||
else:
|
||||
await self.async_set_unique_id(data["customer_info"]["user_id"])
|
||||
self._abort_if_unique_id_configured()
|
||||
@@ -44,8 +67,6 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=user_input[CONF_USERNAME],
|
||||
data=user_input | {CONF_LOGIN_DATA: data},
|
||||
)
|
||||
finally:
|
||||
await client.close()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
@@ -61,3 +82,43 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauth flow."""
|
||||
self.context["title_placeholders"] = {CONF_USERNAME: entry_data[CONF_USERNAME]}
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauth confirm."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
entry_data = reauth_entry.data
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
await validate_input(self.hass, {**reauth_entry.data, **user_input})
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except CannotAuthenticate:
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data={
|
||||
CONF_USERNAME: entry_data[CONF_USERNAME],
|
||||
CONF_PASSWORD: entry_data[CONF_PASSWORD],
|
||||
CONF_CODE: user_input[CONF_CODE],
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
description_placeholders={CONF_USERNAME: entry_data[CONF_USERNAME]},
|
||||
data_schema=STEP_REAUTH_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -12,10 +12,10 @@ from aioamazondevices.exceptions import (
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA, DOMAIN
|
||||
|
||||
SCAN_INTERVAL = 30
|
||||
|
||||
@@ -55,4 +55,8 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
except (CannotConnect, CannotRetrieveData) as err:
|
||||
raise UpdateFailed(f"Error occurred while updating {self.name}") from err
|
||||
except CannotAuthenticate as err:
|
||||
raise ConfigEntryError("Could not authenticate") from err
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aioamazondevices==3.1.22"]
|
||||
"requirements": ["aioamazondevices==3.2.2"]
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@ rules:
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
reauthentication-flow: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: all tests missing
|
||||
|
||||
@@ -22,17 +22,29 @@
|
||||
"password": "[%key:component::alexa_devices::common::data_description_password%]",
|
||||
"code": "[%key:component::alexa_devices::common::data_description_code%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"code": "[%key:component::alexa_devices::common::data_code%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::alexa_devices::common::data_description_password%]",
|
||||
"code": "[%key:component::alexa_devices::common::data_description_code%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"wrong_country": "Wrong country selected. Please select the country where your Amazon account is registered.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -61,6 +61,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(async_update_options))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -69,6 +71,13 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_update_options(
|
||||
hass: HomeAssistant, entry: AnthropicConfigEntry
|
||||
) -> None:
|
||||
"""Update options."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
"""Migrate integration entry structure."""
|
||||
|
||||
@@ -138,4 +147,34 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
title=DEFAULT_CONVERSATION_NAME,
|
||||
options={},
|
||||
version=2,
|
||||
minor_version=2,
|
||||
)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool:
|
||||
"""Migrate entry."""
|
||||
LOGGER.debug("Migrating from version %s:%s", entry.version, entry.minor_version)
|
||||
|
||||
if entry.version > 2:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
if entry.version == 2 and entry.minor_version == 1:
|
||||
# Correct broken device migration in Home Assistant Core 2025.7.0b0-2025.7.0b1
|
||||
device_registry = dr.async_get(hass)
|
||||
for device in dr.async_entries_for_config_entry(
|
||||
device_registry, entry.entry_id
|
||||
):
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
remove_config_entry_id=entry.entry_id,
|
||||
remove_config_subentry_id=None,
|
||||
)
|
||||
|
||||
hass.config_entries.async_update_entry(entry, minor_version=2)
|
||||
|
||||
LOGGER.debug(
|
||||
"Migration to version %s:%s successful", entry.version, entry.minor_version
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -75,6 +75,7 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Anthropic."""
|
||||
|
||||
VERSION = 2
|
||||
MINOR_VERSION = 2
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
|
||||
@@ -1,69 +1,17 @@
|
||||
"""Conversation support for Anthropic."""
|
||||
|
||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||
import json
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
import anthropic
|
||||
from anthropic import AsyncStream
|
||||
from anthropic._types import NOT_GIVEN
|
||||
from anthropic.types import (
|
||||
InputJSONDelta,
|
||||
MessageDeltaUsage,
|
||||
MessageParam,
|
||||
MessageStreamEvent,
|
||||
RawContentBlockDeltaEvent,
|
||||
RawContentBlockStartEvent,
|
||||
RawContentBlockStopEvent,
|
||||
RawMessageDeltaEvent,
|
||||
RawMessageStartEvent,
|
||||
RawMessageStopEvent,
|
||||
RedactedThinkingBlock,
|
||||
RedactedThinkingBlockParam,
|
||||
SignatureDelta,
|
||||
TextBlock,
|
||||
TextBlockParam,
|
||||
TextDelta,
|
||||
ThinkingBlock,
|
||||
ThinkingBlockParam,
|
||||
ThinkingConfigDisabledParam,
|
||||
ThinkingConfigEnabledParam,
|
||||
ThinkingDelta,
|
||||
ToolParam,
|
||||
ToolResultBlockParam,
|
||||
ToolUseBlock,
|
||||
ToolUseBlockParam,
|
||||
Usage,
|
||||
)
|
||||
from voluptuous_openapi import convert
|
||||
from typing import Literal
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, intent, llm
|
||||
from homeassistant.helpers import intent
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AnthropicConfigEntry
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_PROMPT,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
MIN_THINKING_BUDGET,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
THINKING_MODELS,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
MAX_TOOL_ITERATIONS = 10
|
||||
from .const import CONF_PROMPT, DOMAIN
|
||||
from .entity import AnthropicBaseLLMEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -82,253 +30,10 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
def _format_tool(
|
||||
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
|
||||
) -> ToolParam:
|
||||
"""Format tool specification."""
|
||||
return ToolParam(
|
||||
name=tool.name,
|
||||
description=tool.description or "",
|
||||
input_schema=convert(tool.parameters, custom_serializer=custom_serializer),
|
||||
)
|
||||
|
||||
|
||||
def _convert_content(
|
||||
chat_content: Iterable[conversation.Content],
|
||||
) -> list[MessageParam]:
|
||||
"""Transform HA chat_log content into Anthropic API format."""
|
||||
messages: list[MessageParam] = []
|
||||
|
||||
for content in chat_content:
|
||||
if isinstance(content, conversation.ToolResultContent):
|
||||
tool_result_block = ToolResultBlockParam(
|
||||
type="tool_result",
|
||||
tool_use_id=content.tool_call_id,
|
||||
content=json.dumps(content.tool_result),
|
||||
)
|
||||
if not messages or messages[-1]["role"] != "user":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="user",
|
||||
content=[tool_result_block],
|
||||
)
|
||||
)
|
||||
elif isinstance(messages[-1]["content"], str):
|
||||
messages[-1]["content"] = [
|
||||
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||
tool_result_block,
|
||||
]
|
||||
else:
|
||||
messages[-1]["content"].append(tool_result_block) # type: ignore[attr-defined]
|
||||
elif isinstance(content, conversation.UserContent):
|
||||
# Combine consequent user messages
|
||||
if not messages or messages[-1]["role"] != "user":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="user",
|
||||
content=content.content,
|
||||
)
|
||||
)
|
||||
elif isinstance(messages[-1]["content"], str):
|
||||
messages[-1]["content"] = [
|
||||
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||
TextBlockParam(type="text", text=content.content),
|
||||
]
|
||||
else:
|
||||
messages[-1]["content"].append( # type: ignore[attr-defined]
|
||||
TextBlockParam(type="text", text=content.content)
|
||||
)
|
||||
elif isinstance(content, conversation.AssistantContent):
|
||||
# Combine consequent assistant messages
|
||||
if not messages or messages[-1]["role"] != "assistant":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="assistant",
|
||||
content=[],
|
||||
)
|
||||
)
|
||||
|
||||
if content.content:
|
||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(type="text", text=content.content)
|
||||
)
|
||||
if content.tool_calls:
|
||||
messages[-1]["content"].extend( # type: ignore[union-attr]
|
||||
[
|
||||
ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=tool_call.id,
|
||||
name=tool_call.tool_name,
|
||||
input=tool_call.tool_args,
|
||||
)
|
||||
for tool_call in content.tool_calls
|
||||
]
|
||||
)
|
||||
else:
|
||||
# Note: We don't pass SystemContent here as its passed to the API as the prompt
|
||||
raise TypeError(f"Unexpected content type: {type(content)}")
|
||||
|
||||
return messages
|
||||
|
||||
|
||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||
chat_log: conversation.ChatLog,
|
||||
result: AsyncStream[MessageStreamEvent],
|
||||
messages: list[MessageParam],
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
"""Transform the response stream into HA format.
|
||||
|
||||
A typical stream of responses might look something like the following:
|
||||
- RawMessageStartEvent with no content
|
||||
- RawContentBlockStartEvent with an empty ThinkingBlock (if extended thinking is enabled)
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- ...
|
||||
- RawContentBlockDeltaEvent with a SignatureDelta
|
||||
- RawContentBlockStopEvent
|
||||
- RawContentBlockStartEvent with a RedactedThinkingBlock (occasionally)
|
||||
- RawContentBlockStopEvent (RedactedThinkingBlock does not have a delta)
|
||||
- RawContentBlockStartEvent with an empty TextBlock
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
- ...
|
||||
- RawContentBlockStopEvent
|
||||
- RawContentBlockStartEvent with ToolUseBlock specifying the function name
|
||||
- RawContentBlockDeltaEvent with a InputJSONDelta
|
||||
- RawContentBlockDeltaEvent with a InputJSONDelta
|
||||
- ...
|
||||
- RawContentBlockStopEvent
|
||||
- RawMessageDeltaEvent with a stop_reason='tool_use'
|
||||
- RawMessageStopEvent(type='message_stop')
|
||||
|
||||
Each message could contain multiple blocks of the same type.
|
||||
"""
|
||||
if result is None:
|
||||
raise TypeError("Expected a stream of messages")
|
||||
|
||||
current_message: MessageParam | None = None
|
||||
current_block: (
|
||||
TextBlockParam
|
||||
| ToolUseBlockParam
|
||||
| ThinkingBlockParam
|
||||
| RedactedThinkingBlockParam
|
||||
| None
|
||||
) = None
|
||||
current_tool_args: str
|
||||
input_usage: Usage | None = None
|
||||
|
||||
async for response in result:
|
||||
LOGGER.debug("Received response: %s", response)
|
||||
|
||||
if isinstance(response, RawMessageStartEvent):
|
||||
if response.message.role != "assistant":
|
||||
raise ValueError("Unexpected message role")
|
||||
current_message = MessageParam(role=response.message.role, content=[])
|
||||
input_usage = response.message.usage
|
||||
elif isinstance(response, RawContentBlockStartEvent):
|
||||
if isinstance(response.content_block, ToolUseBlock):
|
||||
current_block = ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input="",
|
||||
)
|
||||
current_tool_args = ""
|
||||
elif isinstance(response.content_block, TextBlock):
|
||||
current_block = TextBlockParam(
|
||||
type="text", text=response.content_block.text
|
||||
)
|
||||
yield {"role": "assistant"}
|
||||
if response.content_block.text:
|
||||
yield {"content": response.content_block.text}
|
||||
elif isinstance(response.content_block, ThinkingBlock):
|
||||
current_block = ThinkingBlockParam(
|
||||
type="thinking",
|
||||
thinking=response.content_block.thinking,
|
||||
signature=response.content_block.signature,
|
||||
)
|
||||
elif isinstance(response.content_block, RedactedThinkingBlock):
|
||||
current_block = RedactedThinkingBlockParam(
|
||||
type="redacted_thinking", data=response.content_block.data
|
||||
)
|
||||
LOGGER.debug(
|
||||
"Some of Claude’s internal reasoning has been automatically "
|
||||
"encrypted for safety reasons. This doesn’t affect the quality of "
|
||||
"responses"
|
||||
)
|
||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||
if current_block is None:
|
||||
raise ValueError("Unexpected delta without a block")
|
||||
if isinstance(response.delta, InputJSONDelta):
|
||||
current_tool_args += response.delta.partial_json
|
||||
elif isinstance(response.delta, TextDelta):
|
||||
text_block = cast(TextBlockParam, current_block)
|
||||
text_block["text"] += response.delta.text
|
||||
yield {"content": response.delta.text}
|
||||
elif isinstance(response.delta, ThinkingDelta):
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
thinking_block["thinking"] += response.delta.thinking
|
||||
elif isinstance(response.delta, SignatureDelta):
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
thinking_block["signature"] += response.delta.signature
|
||||
elif isinstance(response, RawContentBlockStopEvent):
|
||||
if current_block is None:
|
||||
raise ValueError("Unexpected stop event without a current block")
|
||||
if current_block["type"] == "tool_use":
|
||||
# tool block
|
||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||
current_block["input"] = tool_args
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
id=current_block["id"],
|
||||
tool_name=current_block["name"],
|
||||
tool_args=tool_args,
|
||||
)
|
||||
]
|
||||
}
|
||||
elif current_block["type"] == "thinking":
|
||||
# thinking block
|
||||
LOGGER.debug("Thinking: %s", current_block["thinking"])
|
||||
|
||||
if current_message is None:
|
||||
raise ValueError("Unexpected stop event without a current message")
|
||||
current_message["content"].append(current_block) # type: ignore[union-attr]
|
||||
current_block = None
|
||||
elif isinstance(response, RawMessageDeltaEvent):
|
||||
if (usage := response.usage) is not None:
|
||||
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
||||
if response.delta.stop_reason == "refusal":
|
||||
raise HomeAssistantError("Potential policy violation detected")
|
||||
elif isinstance(response, RawMessageStopEvent):
|
||||
if current_message is not None:
|
||||
messages.append(current_message)
|
||||
current_message = None
|
||||
|
||||
|
||||
def _create_token_stats(
|
||||
input_usage: Usage | None, response_usage: MessageDeltaUsage
|
||||
) -> dict[str, Any]:
|
||||
"""Create token stats for conversation agent tracing."""
|
||||
input_tokens = 0
|
||||
cached_input_tokens = 0
|
||||
if input_usage:
|
||||
input_tokens = input_usage.input_tokens
|
||||
cached_input_tokens = input_usage.cache_creation_input_tokens or 0
|
||||
output_tokens = response_usage.output_tokens
|
||||
return {
|
||||
"stats": {
|
||||
"input_tokens": input_tokens,
|
||||
"cached_input_tokens": cached_input_tokens,
|
||||
"output_tokens": output_tokens,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class AnthropicConversationEntity(
|
||||
conversation.ConversationEntity, conversation.AbstractConversationAgent
|
||||
conversation.ConversationEntity,
|
||||
conversation.AbstractConversationAgent,
|
||||
AnthropicBaseLLMEntity,
|
||||
):
|
||||
"""Anthropic conversation agent."""
|
||||
|
||||
@@ -336,17 +41,7 @@ class AnthropicConversationEntity(
|
||||
|
||||
def __init__(self, entry: AnthropicConfigEntry, subentry: ConfigSubentry) -> None:
|
||||
"""Initialize the agent."""
|
||||
self.entry = entry
|
||||
self.subentry = subentry
|
||||
self._attr_name = subentry.title
|
||||
self._attr_unique_id = subentry.subentry_id
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
name=subentry.title,
|
||||
manufacturer="Anthropic",
|
||||
model="Claude",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
super().__init__(entry, subentry)
|
||||
if self.subentry.data.get(CONF_LLM_HASS_API):
|
||||
self._attr_supported_features = (
|
||||
conversation.ConversationEntityFeature.CONTROL
|
||||
@@ -357,13 +52,6 @@ class AnthropicConversationEntity(
|
||||
"""Return a list of supported languages."""
|
||||
return MATCH_ALL
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to Home Assistant."""
|
||||
await super().async_added_to_hass()
|
||||
self.entry.async_on_unload(
|
||||
self.entry.add_update_listener(self._async_entry_update_listener)
|
||||
)
|
||||
|
||||
async def _async_handle_message(
|
||||
self,
|
||||
user_input: conversation.ConversationInput,
|
||||
@@ -394,77 +82,3 @@ class AnthropicConversationEntity(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
continue_conversation=chat_log.continue_conversation,
|
||||
)
|
||||
|
||||
async def _async_handle_chat_log(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.subentry.data
|
||||
|
||||
tools: list[ToolParam] | None = None
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
system = chat_log.content[0]
|
||||
if not isinstance(system, conversation.SystemContent):
|
||||
raise TypeError("First message must be a system message")
|
||||
messages = _convert_content(chat_log.content[1:])
|
||||
|
||||
client = self.entry.runtime_data
|
||||
|
||||
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
model_args = {
|
||||
"model": model,
|
||||
"messages": messages,
|
||||
"tools": tools or NOT_GIVEN,
|
||||
"max_tokens": options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
"system": system.content,
|
||||
"stream": True,
|
||||
}
|
||||
if model in THINKING_MODELS and thinking_budget >= MIN_THINKING_BUDGET:
|
||||
model_args["thinking"] = ThinkingConfigEnabledParam(
|
||||
type="enabled", budget_tokens=thinking_budget
|
||||
)
|
||||
else:
|
||||
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||
model_args["temperature"] = options.get(
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
)
|
||||
|
||||
try:
|
||||
stream = await client.messages.create(**model_args)
|
||||
except anthropic.AnthropicError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Sorry, I had a problem talking to Anthropic: {err}"
|
||||
) from err
|
||||
|
||||
messages.extend(
|
||||
_convert_content(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
self.entity_id,
|
||||
_transform_stream(chat_log, stream, messages),
|
||||
)
|
||||
if not isinstance(content, conversation.AssistantContent)
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
break
|
||||
|
||||
async def _async_entry_update_listener(
|
||||
self, hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> None:
|
||||
"""Handle options update."""
|
||||
# Reload as we update device info + entity name + supported features
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
393
homeassistant/components/anthropic/entity.py
Normal file
393
homeassistant/components/anthropic/entity.py
Normal file
@@ -0,0 +1,393 @@
|
||||
"""Base entity for Anthropic."""
|
||||
|
||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||
import json
|
||||
from typing import Any, cast
|
||||
|
||||
import anthropic
|
||||
from anthropic import AsyncStream
|
||||
from anthropic._types import NOT_GIVEN
|
||||
from anthropic.types import (
|
||||
InputJSONDelta,
|
||||
MessageDeltaUsage,
|
||||
MessageParam,
|
||||
MessageStreamEvent,
|
||||
RawContentBlockDeltaEvent,
|
||||
RawContentBlockStartEvent,
|
||||
RawContentBlockStopEvent,
|
||||
RawMessageDeltaEvent,
|
||||
RawMessageStartEvent,
|
||||
RawMessageStopEvent,
|
||||
RedactedThinkingBlock,
|
||||
RedactedThinkingBlockParam,
|
||||
SignatureDelta,
|
||||
TextBlock,
|
||||
TextBlockParam,
|
||||
TextDelta,
|
||||
ThinkingBlock,
|
||||
ThinkingBlockParam,
|
||||
ThinkingConfigDisabledParam,
|
||||
ThinkingConfigEnabledParam,
|
||||
ThinkingDelta,
|
||||
ToolParam,
|
||||
ToolResultBlockParam,
|
||||
ToolUseBlock,
|
||||
ToolUseBlockParam,
|
||||
Usage,
|
||||
)
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, llm
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from . import AnthropicConfigEntry
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
MIN_THINKING_BUDGET,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
THINKING_MODELS,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
MAX_TOOL_ITERATIONS = 10
|
||||
|
||||
|
||||
def _format_tool(
|
||||
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
|
||||
) -> ToolParam:
|
||||
"""Format tool specification."""
|
||||
return ToolParam(
|
||||
name=tool.name,
|
||||
description=tool.description or "",
|
||||
input_schema=convert(tool.parameters, custom_serializer=custom_serializer),
|
||||
)
|
||||
|
||||
|
||||
def _convert_content(
|
||||
chat_content: Iterable[conversation.Content],
|
||||
) -> list[MessageParam]:
|
||||
"""Transform HA chat_log content into Anthropic API format."""
|
||||
messages: list[MessageParam] = []
|
||||
|
||||
for content in chat_content:
|
||||
if isinstance(content, conversation.ToolResultContent):
|
||||
tool_result_block = ToolResultBlockParam(
|
||||
type="tool_result",
|
||||
tool_use_id=content.tool_call_id,
|
||||
content=json.dumps(content.tool_result),
|
||||
)
|
||||
if not messages or messages[-1]["role"] != "user":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="user",
|
||||
content=[tool_result_block],
|
||||
)
|
||||
)
|
||||
elif isinstance(messages[-1]["content"], str):
|
||||
messages[-1]["content"] = [
|
||||
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||
tool_result_block,
|
||||
]
|
||||
else:
|
||||
messages[-1]["content"].append(tool_result_block) # type: ignore[attr-defined]
|
||||
elif isinstance(content, conversation.UserContent):
|
||||
# Combine consequent user messages
|
||||
if not messages or messages[-1]["role"] != "user":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="user",
|
||||
content=content.content,
|
||||
)
|
||||
)
|
||||
elif isinstance(messages[-1]["content"], str):
|
||||
messages[-1]["content"] = [
|
||||
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||
TextBlockParam(type="text", text=content.content),
|
||||
]
|
||||
else:
|
||||
messages[-1]["content"].append( # type: ignore[attr-defined]
|
||||
TextBlockParam(type="text", text=content.content)
|
||||
)
|
||||
elif isinstance(content, conversation.AssistantContent):
|
||||
# Combine consequent assistant messages
|
||||
if not messages or messages[-1]["role"] != "assistant":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="assistant",
|
||||
content=[],
|
||||
)
|
||||
)
|
||||
|
||||
if content.content:
|
||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(type="text", text=content.content)
|
||||
)
|
||||
if content.tool_calls:
|
||||
messages[-1]["content"].extend( # type: ignore[union-attr]
|
||||
[
|
||||
ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=tool_call.id,
|
||||
name=tool_call.tool_name,
|
||||
input=tool_call.tool_args,
|
||||
)
|
||||
for tool_call in content.tool_calls
|
||||
]
|
||||
)
|
||||
else:
|
||||
# Note: We don't pass SystemContent here as its passed to the API as the prompt
|
||||
raise TypeError(f"Unexpected content type: {type(content)}")
|
||||
|
||||
return messages
|
||||
|
||||
|
||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||
chat_log: conversation.ChatLog,
|
||||
result: AsyncStream[MessageStreamEvent],
|
||||
messages: list[MessageParam],
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
"""Transform the response stream into HA format.
|
||||
|
||||
A typical stream of responses might look something like the following:
|
||||
- RawMessageStartEvent with no content
|
||||
- RawContentBlockStartEvent with an empty ThinkingBlock (if extended thinking is enabled)
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- ...
|
||||
- RawContentBlockDeltaEvent with a SignatureDelta
|
||||
- RawContentBlockStopEvent
|
||||
- RawContentBlockStartEvent with a RedactedThinkingBlock (occasionally)
|
||||
- RawContentBlockStopEvent (RedactedThinkingBlock does not have a delta)
|
||||
- RawContentBlockStartEvent with an empty TextBlock
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
- ...
|
||||
- RawContentBlockStopEvent
|
||||
- RawContentBlockStartEvent with ToolUseBlock specifying the function name
|
||||
- RawContentBlockDeltaEvent with a InputJSONDelta
|
||||
- RawContentBlockDeltaEvent with a InputJSONDelta
|
||||
- ...
|
||||
- RawContentBlockStopEvent
|
||||
- RawMessageDeltaEvent with a stop_reason='tool_use'
|
||||
- RawMessageStopEvent(type='message_stop')
|
||||
|
||||
Each message could contain multiple blocks of the same type.
|
||||
"""
|
||||
if result is None:
|
||||
raise TypeError("Expected a stream of messages")
|
||||
|
||||
current_message: MessageParam | None = None
|
||||
current_block: (
|
||||
TextBlockParam
|
||||
| ToolUseBlockParam
|
||||
| ThinkingBlockParam
|
||||
| RedactedThinkingBlockParam
|
||||
| None
|
||||
) = None
|
||||
current_tool_args: str
|
||||
input_usage: Usage | None = None
|
||||
|
||||
async for response in result:
|
||||
LOGGER.debug("Received response: %s", response)
|
||||
|
||||
if isinstance(response, RawMessageStartEvent):
|
||||
if response.message.role != "assistant":
|
||||
raise ValueError("Unexpected message role")
|
||||
current_message = MessageParam(role=response.message.role, content=[])
|
||||
input_usage = response.message.usage
|
||||
elif isinstance(response, RawContentBlockStartEvent):
|
||||
if isinstance(response.content_block, ToolUseBlock):
|
||||
current_block = ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input="",
|
||||
)
|
||||
current_tool_args = ""
|
||||
elif isinstance(response.content_block, TextBlock):
|
||||
current_block = TextBlockParam(
|
||||
type="text", text=response.content_block.text
|
||||
)
|
||||
yield {"role": "assistant"}
|
||||
if response.content_block.text:
|
||||
yield {"content": response.content_block.text}
|
||||
elif isinstance(response.content_block, ThinkingBlock):
|
||||
current_block = ThinkingBlockParam(
|
||||
type="thinking",
|
||||
thinking=response.content_block.thinking,
|
||||
signature=response.content_block.signature,
|
||||
)
|
||||
elif isinstance(response.content_block, RedactedThinkingBlock):
|
||||
current_block = RedactedThinkingBlockParam(
|
||||
type="redacted_thinking", data=response.content_block.data
|
||||
)
|
||||
LOGGER.debug(
|
||||
"Some of Claude’s internal reasoning has been automatically "
|
||||
"encrypted for safety reasons. This doesn’t affect the quality of "
|
||||
"responses"
|
||||
)
|
||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||
if current_block is None:
|
||||
raise ValueError("Unexpected delta without a block")
|
||||
if isinstance(response.delta, InputJSONDelta):
|
||||
current_tool_args += response.delta.partial_json
|
||||
elif isinstance(response.delta, TextDelta):
|
||||
text_block = cast(TextBlockParam, current_block)
|
||||
text_block["text"] += response.delta.text
|
||||
yield {"content": response.delta.text}
|
||||
elif isinstance(response.delta, ThinkingDelta):
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
thinking_block["thinking"] += response.delta.thinking
|
||||
elif isinstance(response.delta, SignatureDelta):
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
thinking_block["signature"] += response.delta.signature
|
||||
elif isinstance(response, RawContentBlockStopEvent):
|
||||
if current_block is None:
|
||||
raise ValueError("Unexpected stop event without a current block")
|
||||
if current_block["type"] == "tool_use":
|
||||
# tool block
|
||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||
current_block["input"] = tool_args
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
id=current_block["id"],
|
||||
tool_name=current_block["name"],
|
||||
tool_args=tool_args,
|
||||
)
|
||||
]
|
||||
}
|
||||
elif current_block["type"] == "thinking":
|
||||
# thinking block
|
||||
LOGGER.debug("Thinking: %s", current_block["thinking"])
|
||||
|
||||
if current_message is None:
|
||||
raise ValueError("Unexpected stop event without a current message")
|
||||
current_message["content"].append(current_block) # type: ignore[union-attr]
|
||||
current_block = None
|
||||
elif isinstance(response, RawMessageDeltaEvent):
|
||||
if (usage := response.usage) is not None:
|
||||
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
||||
if response.delta.stop_reason == "refusal":
|
||||
raise HomeAssistantError("Potential policy violation detected")
|
||||
elif isinstance(response, RawMessageStopEvent):
|
||||
if current_message is not None:
|
||||
messages.append(current_message)
|
||||
current_message = None
|
||||
|
||||
|
||||
def _create_token_stats(
|
||||
input_usage: Usage | None, response_usage: MessageDeltaUsage
|
||||
) -> dict[str, Any]:
|
||||
"""Create token stats for conversation agent tracing."""
|
||||
input_tokens = 0
|
||||
cached_input_tokens = 0
|
||||
if input_usage:
|
||||
input_tokens = input_usage.input_tokens
|
||||
cached_input_tokens = input_usage.cache_creation_input_tokens or 0
|
||||
output_tokens = response_usage.output_tokens
|
||||
return {
|
||||
"stats": {
|
||||
"input_tokens": input_tokens,
|
||||
"cached_input_tokens": cached_input_tokens,
|
||||
"output_tokens": output_tokens,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class AnthropicBaseLLMEntity(Entity):
|
||||
"""Anthropic base LLM entity."""
|
||||
|
||||
def __init__(self, entry: AnthropicConfigEntry, subentry: ConfigSubentry) -> None:
|
||||
"""Initialize the entity."""
|
||||
self.entry = entry
|
||||
self.subentry = subentry
|
||||
self._attr_name = subentry.title
|
||||
self._attr_unique_id = subentry.subentry_id
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
name=subentry.title,
|
||||
manufacturer="Anthropic",
|
||||
model="Claude",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
async def _async_handle_chat_log(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.subentry.data
|
||||
|
||||
tools: list[ToolParam] | None = None
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
system = chat_log.content[0]
|
||||
if not isinstance(system, conversation.SystemContent):
|
||||
raise TypeError("First message must be a system message")
|
||||
messages = _convert_content(chat_log.content[1:])
|
||||
|
||||
client = self.entry.runtime_data
|
||||
|
||||
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
model_args = {
|
||||
"model": model,
|
||||
"messages": messages,
|
||||
"tools": tools or NOT_GIVEN,
|
||||
"max_tokens": options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
"system": system.content,
|
||||
"stream": True,
|
||||
}
|
||||
if model in THINKING_MODELS and thinking_budget >= MIN_THINKING_BUDGET:
|
||||
model_args["thinking"] = ThinkingConfigEnabledParam(
|
||||
type="enabled", budget_tokens=thinking_budget
|
||||
)
|
||||
else:
|
||||
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||
model_args["temperature"] = options.get(
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
)
|
||||
|
||||
try:
|
||||
stream = await client.messages.create(**model_args)
|
||||
except anthropic.AnthropicError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Sorry, I had a problem talking to Anthropic: {err}"
|
||||
) from err
|
||||
|
||||
messages.extend(
|
||||
_convert_content(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
self.entity_id,
|
||||
_transform_stream(chat_log, stream, messages),
|
||||
)
|
||||
if not isinstance(content, conversation.AssistantContent)
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
break
|
||||
@@ -71,9 +71,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
cv.make_entity_service_schema(
|
||||
{
|
||||
vol.Optional("message"): str,
|
||||
vol.Optional("media_id"): str,
|
||||
vol.Optional("media_id"): _media_id_validator,
|
||||
vol.Optional("preannounce"): bool,
|
||||
vol.Optional("preannounce_media_id"): str,
|
||||
vol.Optional("preannounce_media_id"): _media_id_validator,
|
||||
}
|
||||
),
|
||||
cv.has_at_least_one_key("message", "media_id"),
|
||||
@@ -81,15 +81,16 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"async_internal_announce",
|
||||
[AssistSatelliteEntityFeature.ANNOUNCE],
|
||||
)
|
||||
|
||||
component.async_register_entity_service(
|
||||
"start_conversation",
|
||||
vol.All(
|
||||
cv.make_entity_service_schema(
|
||||
{
|
||||
vol.Optional("start_message"): str,
|
||||
vol.Optional("start_media_id"): str,
|
||||
vol.Optional("start_media_id"): _media_id_validator,
|
||||
vol.Optional("preannounce"): bool,
|
||||
vol.Optional("preannounce_media_id"): str,
|
||||
vol.Optional("preannounce_media_id"): _media_id_validator,
|
||||
vol.Optional("extra_system_prompt"): str,
|
||||
}
|
||||
),
|
||||
@@ -135,9 +136,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
{
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_domain(DOMAIN),
|
||||
vol.Optional("question"): str,
|
||||
vol.Optional("question_media_id"): str,
|
||||
vol.Optional("question_media_id"): _media_id_validator,
|
||||
vol.Optional("preannounce"): bool,
|
||||
vol.Optional("preannounce_media_id"): str,
|
||||
vol.Optional("preannounce_media_id"): _media_id_validator,
|
||||
vol.Optional("answers"): [
|
||||
{
|
||||
vol.Required("id"): str,
|
||||
@@ -204,3 +205,20 @@ def has_one_non_empty_item(value: list[str]) -> list[str]:
|
||||
raise vol.Invalid("sentences cannot be empty")
|
||||
|
||||
return value
|
||||
|
||||
|
||||
# Validator for media_id fields that accepts both string and media selector format
|
||||
_media_id_validator = vol.Any(
|
||||
cv.string, # Plain string format
|
||||
vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required("media_content_id"): cv.string,
|
||||
vol.Required("media_content_type"): cv.string,
|
||||
vol.Remove("metadata"): dict, # Ignore metadata if present
|
||||
}
|
||||
),
|
||||
# Extract media_content_id from media selector format
|
||||
lambda x: x["media_content_id"],
|
||||
),
|
||||
)
|
||||
|
||||
@@ -14,7 +14,9 @@ announce:
|
||||
media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
media:
|
||||
accept:
|
||||
- audio/*
|
||||
preannounce:
|
||||
required: false
|
||||
default: true
|
||||
@@ -23,7 +25,9 @@ announce:
|
||||
preannounce_media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
media:
|
||||
accept:
|
||||
- audio/*
|
||||
start_conversation:
|
||||
target:
|
||||
entity:
|
||||
@@ -40,7 +44,9 @@ start_conversation:
|
||||
start_media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
media:
|
||||
accept:
|
||||
- audio/*
|
||||
extra_system_prompt:
|
||||
required: false
|
||||
selector:
|
||||
@@ -53,7 +59,9 @@ start_conversation:
|
||||
preannounce_media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
media:
|
||||
accept:
|
||||
- audio/*
|
||||
ask_question:
|
||||
fields:
|
||||
entity_id:
|
||||
@@ -72,7 +80,9 @@ ask_question:
|
||||
question_media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
media:
|
||||
accept:
|
||||
- audio/*
|
||||
preannounce:
|
||||
required: false
|
||||
default: true
|
||||
@@ -81,7 +91,9 @@ ask_question:
|
||||
preannounce_media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
media:
|
||||
accept:
|
||||
- audio/*
|
||||
answers:
|
||||
required: false
|
||||
selector:
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
|
||||
from homeassistant.config_entries import SOURCE_SYSTEM
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, discovery_flow
|
||||
from homeassistant.helpers.backup import DATA_BACKUP
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -37,7 +37,6 @@ from .manager import (
|
||||
IdleEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
ManagerStateEvent,
|
||||
NewBackup,
|
||||
RestoreBackupEvent,
|
||||
RestoreBackupStage,
|
||||
@@ -72,12 +71,12 @@ __all__ = [
|
||||
"IncorrectPasswordError",
|
||||
"LocalBackupAgent",
|
||||
"ManagerBackup",
|
||||
"ManagerStateEvent",
|
||||
"NewBackup",
|
||||
"RestoreBackupEvent",
|
||||
"RestoreBackupStage",
|
||||
"RestoreBackupState",
|
||||
"WrittenBackup",
|
||||
"async_get_manager",
|
||||
"suggested_filename",
|
||||
"suggested_filename_from_name_date",
|
||||
]
|
||||
@@ -104,13 +103,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
backup_manager = BackupManager(hass, reader_writer)
|
||||
hass.data[DATA_MANAGER] = backup_manager
|
||||
try:
|
||||
await backup_manager.async_setup()
|
||||
except Exception as err:
|
||||
hass.data[DATA_BACKUP].manager_ready.set_exception(err)
|
||||
raise
|
||||
else:
|
||||
hass.data[DATA_BACKUP].manager_ready.set_result(None)
|
||||
await backup_manager.async_setup()
|
||||
|
||||
async_register_websocket_handlers(hass, with_hassio)
|
||||
|
||||
@@ -143,3 +136,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bo
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
@callback
|
||||
def async_get_manager(hass: HomeAssistant) -> BackupManager:
|
||||
"""Get the backup manager instance.
|
||||
|
||||
Raises HomeAssistantError if the backup integration is not available.
|
||||
"""
|
||||
if DATA_MANAGER not in hass.data:
|
||||
raise HomeAssistantError("Backup integration is not available")
|
||||
|
||||
return hass.data[DATA_MANAGER]
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
"""Websocket commands for the Backup integration."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.backup import async_subscribe_events
|
||||
|
||||
from .const import DATA_MANAGER
|
||||
from .manager import ManagerStateEvent
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_websocket_handlers(hass: HomeAssistant) -> None:
|
||||
"""Register websocket commands."""
|
||||
websocket_api.async_register_command(hass, handle_subscribe_events)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"})
|
||||
@websocket_api.async_response
|
||||
async def handle_subscribe_events(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to backup events."""
|
||||
|
||||
def on_event(event: ManagerStateEvent) -> None:
|
||||
connection.send_message(websocket_api.event_message(msg["id"], event))
|
||||
|
||||
if DATA_MANAGER in hass.data:
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
on_event(manager.last_event)
|
||||
connection.subscriptions[msg["id"]] = async_subscribe_events(hass, on_event)
|
||||
connection.send_result(msg["id"])
|
||||
@@ -8,10 +8,6 @@ from datetime import datetime
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.backup import (
|
||||
async_subscribe_events,
|
||||
async_subscribe_platform_events,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
@@ -56,8 +52,8 @@ class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]):
|
||||
update_interval=None,
|
||||
)
|
||||
self.unsubscribe: list[Callable[[], None]] = [
|
||||
async_subscribe_events(hass, self._on_event),
|
||||
async_subscribe_platform_events(hass, self._on_event),
|
||||
backup_manager.async_subscribe_events(self._on_event),
|
||||
backup_manager.async_subscribe_platform_events(self._on_event),
|
||||
]
|
||||
|
||||
self.backup_manager = backup_manager
|
||||
|
||||
@@ -36,7 +36,6 @@ from homeassistant.helpers import (
|
||||
issue_registry as ir,
|
||||
start,
|
||||
)
|
||||
from homeassistant.helpers.backup import DATA_BACKUP
|
||||
from homeassistant.helpers.json import json_bytes
|
||||
from homeassistant.util import dt as dt_util, json as json_util
|
||||
|
||||
@@ -372,12 +371,10 @@ class BackupManager:
|
||||
# Latest backup event and backup event subscribers
|
||||
self.last_event: ManagerStateEvent = BlockedEvent()
|
||||
self.last_action_event: ManagerStateEvent | None = None
|
||||
self._backup_event_subscriptions = hass.data[
|
||||
DATA_BACKUP
|
||||
].backup_event_subscriptions
|
||||
self._backup_platform_event_subscriptions = hass.data[
|
||||
DATA_BACKUP
|
||||
].backup_platform_event_subscriptions
|
||||
self._backup_event_subscriptions: list[Callable[[ManagerStateEvent], None]] = []
|
||||
self._backup_platform_event_subscriptions: list[
|
||||
Callable[[BackupPlatformEvent], None]
|
||||
] = []
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the backup manager."""
|
||||
@@ -1385,6 +1382,32 @@ class BackupManager:
|
||||
for subscription in self._backup_event_subscriptions:
|
||||
subscription(event)
|
||||
|
||||
@callback
|
||||
def async_subscribe_events(
|
||||
self,
|
||||
on_event: Callable[[ManagerStateEvent], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe events."""
|
||||
|
||||
def remove_subscription() -> None:
|
||||
self._backup_event_subscriptions.remove(on_event)
|
||||
|
||||
self._backup_event_subscriptions.append(on_event)
|
||||
return remove_subscription
|
||||
|
||||
@callback
|
||||
def async_subscribe_platform_events(
|
||||
self,
|
||||
on_event: Callable[[BackupPlatformEvent], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe to backup platform events."""
|
||||
|
||||
def remove_subscription() -> None:
|
||||
self._backup_platform_event_subscriptions.remove(on_event)
|
||||
|
||||
self._backup_platform_event_subscriptions.append(on_event)
|
||||
return remove_subscription
|
||||
|
||||
def _create_automatic_backup_failed_issue(
|
||||
self, translation_key: str, translation_placeholders: dict[str, str] | None
|
||||
) -> None:
|
||||
|
||||
@@ -19,9 +19,14 @@ from homeassistant.components.onboarding import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.backup import async_get_manager as async_get_backup_manager
|
||||
|
||||
from . import BackupManager, Folder, IncorrectPasswordError, http as backup_http
|
||||
from . import (
|
||||
BackupManager,
|
||||
Folder,
|
||||
IncorrectPasswordError,
|
||||
async_get_manager,
|
||||
http as backup_http,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.components.onboarding import OnboardingStoreData
|
||||
@@ -54,7 +59,7 @@ def with_backup_manager[_ViewT: BaseOnboardingView, **_P](
|
||||
if self._data["done"]:
|
||||
raise HTTPUnauthorized
|
||||
|
||||
manager = await async_get_backup_manager(request.app[KEY_HASS])
|
||||
manager = async_get_manager(request.app[KEY_HASS])
|
||||
return await func(self, manager, request, *args, **kwargs)
|
||||
|
||||
return with_backup
|
||||
|
||||
@@ -10,7 +10,11 @@ from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .config import Day, ScheduleRecurrence
|
||||
from .const import DATA_MANAGER, LOGGER
|
||||
from .manager import DecryptOnDowloadNotSupported, IncorrectPasswordError
|
||||
from .manager import (
|
||||
DecryptOnDowloadNotSupported,
|
||||
IncorrectPasswordError,
|
||||
ManagerStateEvent,
|
||||
)
|
||||
from .models import BackupNotFound, Folder
|
||||
|
||||
|
||||
@@ -30,6 +34,7 @@ def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) ->
|
||||
websocket_api.async_register_command(hass, handle_create_with_automatic_settings)
|
||||
websocket_api.async_register_command(hass, handle_delete)
|
||||
websocket_api.async_register_command(hass, handle_restore)
|
||||
websocket_api.async_register_command(hass, handle_subscribe_events)
|
||||
|
||||
websocket_api.async_register_command(hass, handle_config_info)
|
||||
websocket_api.async_register_command(hass, handle_config_update)
|
||||
@@ -417,3 +422,22 @@ def handle_config_update(
|
||||
changes.pop("type")
|
||||
manager.config.update(**changes)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"})
|
||||
@websocket_api.async_response
|
||||
async def handle_subscribe_events(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to backup events."""
|
||||
|
||||
def on_event(event: ManagerStateEvent) -> None:
|
||||
connection.send_message(websocket_api.event_message(msg["id"], event))
|
||||
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
on_event(manager.last_event)
|
||||
connection.subscriptions[msg["id"]] = manager.async_subscribe_events(on_event)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"bleak-retry-connector==3.9.0",
|
||||
"bluetooth-adapters==0.21.4",
|
||||
"bluetooth-auto-recovery==1.5.2",
|
||||
"bluetooth-data-tools==1.28.1",
|
||||
"bluetooth-data-tools==1.28.2",
|
||||
"dbus-fast==2.43.0",
|
||||
"habluetooth==3.49.0"
|
||||
]
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN
|
||||
from .services import setup_services
|
||||
from .services import async_setup_services
|
||||
from .types import BoschAlarmConfigEntry
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@@ -29,7 +29,7 @@ PLATFORMS: list[Platform] = [
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up bosch alarm services."""
|
||||
setup_services(hass)
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ from typing import Any
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.util import dt as dt_util
|
||||
@@ -66,7 +66,8 @@ async def async_set_panel_date(call: ServiceCall) -> None:
|
||||
) from err
|
||||
|
||||
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the bosch alarm integration."""
|
||||
|
||||
hass.services.async_register(
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==0.104.0"],
|
||||
"requirements": ["hass-nabucasa==0.105.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -6,11 +6,18 @@ from operator import itemgetter
|
||||
import numpy as np
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.components.sensor import (
|
||||
CONF_STATE_CLASS,
|
||||
DEVICE_CLASSES_SCHEMA as SENSOR_DEVICE_CLASSES_SCHEMA,
|
||||
DOMAIN as SENSOR_DOMAIN,
|
||||
STATE_CLASSES_SCHEMA as SENSOR_STATE_CLASSES_SCHEMA,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_ATTRIBUTE,
|
||||
CONF_DEVICE_CLASS,
|
||||
CONF_MAXIMUM,
|
||||
CONF_MINIMUM,
|
||||
CONF_NAME,
|
||||
CONF_SOURCE,
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
@@ -50,20 +57,23 @@ def datapoints_greater_than_degree(value: dict) -> dict:
|
||||
|
||||
COMPENSATION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_SOURCE): cv.entity_id,
|
||||
vol.Optional(CONF_ATTRIBUTE): cv.string,
|
||||
vol.Required(CONF_DATAPOINTS): [
|
||||
vol.ExactSequence([vol.Coerce(float), vol.Coerce(float)])
|
||||
],
|
||||
vol.Optional(CONF_UNIQUE_ID): cv.string,
|
||||
vol.Optional(CONF_ATTRIBUTE): cv.string,
|
||||
vol.Optional(CONF_UPPER_LIMIT, default=False): cv.boolean,
|
||||
vol.Optional(CONF_LOWER_LIMIT, default=False): cv.boolean,
|
||||
vol.Optional(CONF_PRECISION, default=DEFAULT_PRECISION): cv.positive_int,
|
||||
vol.Optional(CONF_DEGREE, default=DEFAULT_DEGREE): vol.All(
|
||||
vol.Coerce(int),
|
||||
vol.Range(min=1, max=7),
|
||||
),
|
||||
vol.Optional(CONF_DEVICE_CLASS): SENSOR_DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_LOWER_LIMIT, default=False): cv.boolean,
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_PRECISION, default=DEFAULT_PRECISION): cv.positive_int,
|
||||
vol.Required(CONF_SOURCE): cv.entity_id,
|
||||
vol.Optional(CONF_STATE_CLASS): SENSOR_STATE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_UNIQUE_ID): cv.string,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
vol.Optional(CONF_UPPER_LIMIT, default=False): cv.boolean,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -7,15 +7,23 @@ from typing import Any
|
||||
|
||||
import numpy as np
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.components.sensor import (
|
||||
ATTR_STATE_CLASS,
|
||||
CONF_STATE_CLASS,
|
||||
SensorEntity,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_DEVICE_CLASS,
|
||||
ATTR_UNIT_OF_MEASUREMENT,
|
||||
CONF_ATTRIBUTE,
|
||||
CONF_DEVICE_CLASS,
|
||||
CONF_MAXIMUM,
|
||||
CONF_MINIMUM,
|
||||
CONF_NAME,
|
||||
CONF_SOURCE,
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
@@ -59,24 +67,13 @@ async def async_setup_platform(
|
||||
|
||||
source: str = conf[CONF_SOURCE]
|
||||
attribute: str | None = conf.get(CONF_ATTRIBUTE)
|
||||
name = f"{DEFAULT_NAME} {source}"
|
||||
if attribute is not None:
|
||||
name = f"{name} {attribute}"
|
||||
if not (name := conf.get(CONF_NAME)):
|
||||
name = f"{DEFAULT_NAME} {source}"
|
||||
if attribute is not None:
|
||||
name = f"{name} {attribute}"
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
CompensationSensor(
|
||||
conf.get(CONF_UNIQUE_ID),
|
||||
name,
|
||||
source,
|
||||
attribute,
|
||||
conf[CONF_PRECISION],
|
||||
conf[CONF_POLYNOMIAL],
|
||||
conf.get(CONF_UNIT_OF_MEASUREMENT),
|
||||
conf[CONF_MINIMUM],
|
||||
conf[CONF_MAXIMUM],
|
||||
)
|
||||
]
|
||||
[CompensationSensor(conf.get(CONF_UNIQUE_ID), name, source, attribute, conf)]
|
||||
)
|
||||
|
||||
|
||||
@@ -91,23 +88,27 @@ class CompensationSensor(SensorEntity):
|
||||
name: str,
|
||||
source: str,
|
||||
attribute: str | None,
|
||||
precision: int,
|
||||
polynomial: np.poly1d,
|
||||
unit_of_measurement: str | None,
|
||||
minimum: tuple[float, float] | None,
|
||||
maximum: tuple[float, float] | None,
|
||||
config: dict[str, Any],
|
||||
) -> None:
|
||||
"""Initialize the Compensation sensor."""
|
||||
|
||||
self._attr_name = name
|
||||
self._source_entity_id = source
|
||||
self._precision = precision
|
||||
self._source_attribute = attribute
|
||||
self._attr_native_unit_of_measurement = unit_of_measurement
|
||||
|
||||
self._precision = config[CONF_PRECISION]
|
||||
self._attr_native_unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT)
|
||||
|
||||
polynomial: np.poly1d = config[CONF_POLYNOMIAL]
|
||||
self._poly = polynomial
|
||||
self._coefficients = polynomial.coefficients.tolist()
|
||||
|
||||
self._attr_unique_id = unique_id
|
||||
self._attr_name = name
|
||||
self._minimum = minimum
|
||||
self._maximum = maximum
|
||||
self._minimum = config[CONF_MINIMUM]
|
||||
self._maximum = config[CONF_MAXIMUM]
|
||||
|
||||
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
|
||||
self._attr_state_class = config.get(CONF_STATE_CLASS)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle added to Hass."""
|
||||
@@ -137,13 +138,40 @@ class CompensationSensor(SensorEntity):
|
||||
"""Handle sensor state changes."""
|
||||
new_state: State | None
|
||||
if (new_state := event.data["new_state"]) is None:
|
||||
_LOGGER.warning(
|
||||
"While updating compensation %s, the new_state is None", self.name
|
||||
)
|
||||
self._attr_native_value = None
|
||||
self.async_write_ha_state()
|
||||
return
|
||||
|
||||
if new_state.state == STATE_UNKNOWN:
|
||||
self._attr_native_value = None
|
||||
self.async_write_ha_state()
|
||||
return
|
||||
|
||||
if new_state.state == STATE_UNAVAILABLE:
|
||||
self._attr_available = False
|
||||
self.async_write_ha_state()
|
||||
return
|
||||
|
||||
self._attr_available = True
|
||||
|
||||
if self.native_unit_of_measurement is None and self._source_attribute is None:
|
||||
self._attr_native_unit_of_measurement = new_state.attributes.get(
|
||||
ATTR_UNIT_OF_MEASUREMENT
|
||||
)
|
||||
|
||||
if self._attr_device_class is None and (
|
||||
device_class := new_state.attributes.get(ATTR_DEVICE_CLASS)
|
||||
):
|
||||
self._attr_device_class = device_class
|
||||
|
||||
if self._attr_state_class is None and (
|
||||
state_class := new_state.attributes.get(ATTR_STATE_CLASS)
|
||||
):
|
||||
self._attr_state_class = state_class
|
||||
|
||||
if self._source_attribute:
|
||||
value = new_state.attributes.get(self._source_attribute)
|
||||
else:
|
||||
|
||||
@@ -5,8 +5,9 @@ from pycoolmasternet_async import CoolMasterNet
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import CONF_SWING_SUPPORT
|
||||
from .const import CONF_SWING_SUPPORT, DOMAIN
|
||||
from .coordinator import CoolmasterConfigEntry, CoolmasterDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.CLIMATE, Platform.SENSOR]
|
||||
@@ -48,3 +49,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: CoolmasterConfigEntry) -
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: CoolmasterConfigEntry) -> bool:
|
||||
"""Unload a Coolmaster config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_remove_config_entry_device(
|
||||
hass: HomeAssistant,
|
||||
config_entry: CoolmasterConfigEntry,
|
||||
device_entry: dr.DeviceEntry,
|
||||
) -> bool:
|
||||
"""Remove a config entry from a device."""
|
||||
return not device_entry.identifiers.intersection(
|
||||
(DOMAIN, unit_id) for unit_id in config_entry.runtime_data.data
|
||||
)
|
||||
|
||||
@@ -19,10 +19,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
SUPPORT_MINIMAL_SERVICES = VacuumEntityFeature.TURN_ON | VacuumEntityFeature.TURN_OFF
|
||||
|
||||
SUPPORT_BASIC_SERVICES = (
|
||||
VacuumEntityFeature.STATE
|
||||
| VacuumEntityFeature.START
|
||||
| VacuumEntityFeature.STOP
|
||||
| VacuumEntityFeature.BATTERY
|
||||
VacuumEntityFeature.STATE | VacuumEntityFeature.START | VacuumEntityFeature.STOP
|
||||
)
|
||||
|
||||
SUPPORT_MOST_SERVICES = (
|
||||
@@ -31,7 +28,6 @@ SUPPORT_MOST_SERVICES = (
|
||||
| VacuumEntityFeature.STOP
|
||||
| VacuumEntityFeature.PAUSE
|
||||
| VacuumEntityFeature.RETURN_HOME
|
||||
| VacuumEntityFeature.BATTERY
|
||||
| VacuumEntityFeature.FAN_SPEED
|
||||
)
|
||||
|
||||
@@ -46,7 +42,6 @@ SUPPORT_ALL_SERVICES = (
|
||||
| VacuumEntityFeature.SEND_COMMAND
|
||||
| VacuumEntityFeature.LOCATE
|
||||
| VacuumEntityFeature.STATUS
|
||||
| VacuumEntityFeature.BATTERY
|
||||
| VacuumEntityFeature.LOCATE
|
||||
| VacuumEntityFeature.MAP
|
||||
| VacuumEntityFeature.CLEAN_SPOT
|
||||
@@ -90,12 +85,6 @@ class StateDemoVacuum(StateVacuumEntity):
|
||||
self._attr_activity = VacuumActivity.DOCKED
|
||||
self._fan_speed = FAN_SPEEDS[1]
|
||||
self._cleaned_area: float = 0
|
||||
self._battery_level = 100
|
||||
|
||||
@property
|
||||
def battery_level(self) -> int:
|
||||
"""Return the current battery level of the vacuum."""
|
||||
return max(0, min(100, self._battery_level))
|
||||
|
||||
@property
|
||||
def fan_speed(self) -> str:
|
||||
@@ -117,7 +106,6 @@ class StateDemoVacuum(StateVacuumEntity):
|
||||
if self._attr_activity != VacuumActivity.CLEANING:
|
||||
self._attr_activity = VacuumActivity.CLEANING
|
||||
self._cleaned_area += 1.32
|
||||
self._battery_level -= 1
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
def pause(self) -> None:
|
||||
@@ -142,7 +130,6 @@ class StateDemoVacuum(StateVacuumEntity):
|
||||
"""Perform a spot clean-up."""
|
||||
self._attr_activity = VacuumActivity.CLEANING
|
||||
self._cleaned_area += 1.32
|
||||
self._battery_level -= 1
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
def set_fan_speed(self, fan_speed: str, **kwargs: Any) -> None:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pydoods"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pydoods==1.0.2", "Pillow==11.2.1"]
|
||||
"requirements": ["pydoods==1.0.2", "Pillow==11.3.0"]
|
||||
}
|
||||
|
||||
@@ -92,7 +92,7 @@ SENSORS: list[DROPSensorEntityDescription] = [
|
||||
native_unit_of_measurement=UnitOfVolume.GALLONS,
|
||||
suggested_display_precision=1,
|
||||
value_fn=lambda device: device.drop_api.water_used_today(),
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
DROPSensorEntityDescription(
|
||||
key=AVERAGE_WATER_USED,
|
||||
|
||||
@@ -12,7 +12,7 @@ from .bridge import DynaliteBridge
|
||||
from .const import DOMAIN, LOGGER, PLATFORMS
|
||||
from .convert_config import convert_config
|
||||
from .panel import async_register_dynalite_frontend
|
||||
from .services import setup_services
|
||||
from .services import async_setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
@@ -21,7 +21,7 @@ type DynaliteConfigEntry = ConfigEntry[DynaliteBridge]
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Dynalite platform."""
|
||||
setup_services(hass)
|
||||
async_setup_services(hass)
|
||||
|
||||
await async_register_dynalite_frontend(hass)
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ async def _request_channel_level(service_call: ServiceCall) -> None:
|
||||
|
||||
|
||||
@callback
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the Dynalite platform."""
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==13.4.0"]
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==13.5.0"]
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eheimdigital"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["eheimdigital==1.2.0"],
|
||||
"requirements": ["eheimdigital==1.3.0"],
|
||||
"zeroconf": [
|
||||
{ "type": "_http._tcp.local.", "name": "eheimdigital._http._tcp.local." }
|
||||
]
|
||||
|
||||
@@ -16,7 +16,12 @@ from homeassistant.config_entries import (
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import selector
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
selector,
|
||||
)
|
||||
|
||||
from .const import (
|
||||
CONF_MESSAGE,
|
||||
@@ -26,6 +31,9 @@ from .const import (
|
||||
FEED_ID,
|
||||
FEED_NAME,
|
||||
FEED_TAG,
|
||||
SYNC_MODE,
|
||||
SYNC_MODE_AUTO,
|
||||
SYNC_MODE_MANUAL,
|
||||
)
|
||||
|
||||
|
||||
@@ -102,6 +110,17 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"mode": "dropdown",
|
||||
"multiple": True,
|
||||
}
|
||||
if user_input.get(SYNC_MODE) == SYNC_MODE_AUTO:
|
||||
return self.async_create_entry(
|
||||
title=sensor_name(self.url),
|
||||
data={
|
||||
CONF_URL: self.url,
|
||||
CONF_API_KEY: self.api_key,
|
||||
CONF_ONLY_INCLUDE_FEEDID: [
|
||||
feed[FEED_ID] for feed in result[CONF_MESSAGE]
|
||||
],
|
||||
},
|
||||
)
|
||||
return await self.async_step_choose_feeds()
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
@@ -110,6 +129,15 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
{
|
||||
vol.Required(CONF_URL): str,
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
vol.Required(
|
||||
SYNC_MODE, default=SYNC_MODE_MANUAL
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[SYNC_MODE_MANUAL, SYNC_MODE_AUTO],
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
translation_key=SYNC_MODE,
|
||||
)
|
||||
),
|
||||
}
|
||||
),
|
||||
user_input,
|
||||
|
||||
@@ -14,6 +14,9 @@ EMONCMS_UUID_DOC_URL = (
|
||||
FEED_ID = "id"
|
||||
FEED_NAME = "name"
|
||||
FEED_TAG = "tag"
|
||||
SYNC_MODE = "sync_mode"
|
||||
SYNC_MODE_AUTO = "auto"
|
||||
SYNC_MODE_MANUAL = "manual"
|
||||
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
@@ -7,7 +7,8 @@
|
||||
"user": {
|
||||
"data": {
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"sync_mode": "Synchronization mode"
|
||||
},
|
||||
"data_description": {
|
||||
"url": "Server URL starting with the protocol (http or https)",
|
||||
@@ -24,6 +25,14 @@
|
||||
"already_configured": "This server is already configured"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"sync_mode": {
|
||||
"options": {
|
||||
"auto": "Synchronize all available Feeds",
|
||||
"manual": "Select which Feeds to synchronize"
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"energy": {
|
||||
|
||||
@@ -363,7 +363,7 @@
|
||||
"discharging": "[%key:common::state::discharging%]",
|
||||
"idle": "[%key:common::state::idle%]",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"full": "Full"
|
||||
"full": "[%key:common::state::full%]"
|
||||
}
|
||||
},
|
||||
"acb_available_energy": {
|
||||
|
||||
@@ -281,7 +281,7 @@ class EsphomeEntity(EsphomeBaseEntity, Generic[_InfoT, _StateT]):
|
||||
|
||||
_static_info: _InfoT
|
||||
_state: _StateT
|
||||
_has_state: bool
|
||||
_has_state: bool = False
|
||||
unique_id: str
|
||||
|
||||
def __init__(
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "esphome",
|
||||
"name": "ESPHome",
|
||||
"after_dependencies": ["hassio", "zeroconf", "tag"],
|
||||
"codeowners": ["@OttoWinter", "@jesserockz", "@kbx81", "@bdraco"],
|
||||
"codeowners": ["@jesserockz", "@kbx81", "@bdraco"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["assist_pipeline", "bluetooth", "intent", "ffmpeg", "http"],
|
||||
"dhcp": [
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==33.1.1",
|
||||
"aioesphomeapi==34.1.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==2.16.0"
|
||||
],
|
||||
|
||||
@@ -81,6 +81,7 @@ class EsphomeSensor(EsphomeEntity[SensorInfo, SensorState], SensorEntity):
|
||||
# if the string is empty
|
||||
if unit_of_measurement := static_info.unit_of_measurement:
|
||||
self._attr_native_unit_of_measurement = unit_of_measurement
|
||||
self._attr_suggested_display_precision = static_info.accuracy_decimals
|
||||
self._attr_device_class = try_parse_enum(
|
||||
SensorDeviceClass, static_info.device_class
|
||||
)
|
||||
@@ -97,7 +98,7 @@ class EsphomeSensor(EsphomeEntity[SensorInfo, SensorState], SensorEntity):
|
||||
self._attr_state_class = _STATE_CLASSES.from_esphome(state_class)
|
||||
|
||||
@property
|
||||
def native_value(self) -> datetime | str | None:
|
||||
def native_value(self) -> datetime | int | float | None:
|
||||
"""Return the state of the entity."""
|
||||
if not self._has_state or (state := self._state).missing_state:
|
||||
return None
|
||||
@@ -106,7 +107,7 @@ class EsphomeSensor(EsphomeEntity[SensorInfo, SensorState], SensorEntity):
|
||||
return None
|
||||
if self.device_class is SensorDeviceClass.TIMESTAMP:
|
||||
return dt_util.utc_from_timestamp(state_float)
|
||||
return f"{state_float:.{self._static_info.accuracy_decimals}f}"
|
||||
return state_float
|
||||
|
||||
|
||||
class EsphomeTextSensor(EsphomeEntity[TextSensorInfo, TextSensorState], SensorEntity):
|
||||
|
||||
@@ -19,7 +19,7 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="chlorine",
|
||||
translation_key="chlorine",
|
||||
native_unit_of_measurement=UnitOfElectricPotential.MILLIVOLT,
|
||||
native_unit_of_measurement="mg/L",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250627.0"]
|
||||
"requirements": ["home-assistant-frontend==20250702.0"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/generic",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["av==13.1.0", "Pillow==11.2.1"]
|
||||
"requirements": ["av==13.1.0", "Pillow==11.3.0"]
|
||||
}
|
||||
|
||||
@@ -207,6 +207,8 @@ async def async_setup_entry(
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(async_update_options))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -220,6 +222,13 @@ async def async_unload_entry(
|
||||
return True
|
||||
|
||||
|
||||
async def async_update_options(
|
||||
hass: HomeAssistant, entry: GoogleGenerativeAIConfigEntry
|
||||
) -> None:
|
||||
"""Update options."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
"""Migrate integration entry structure."""
|
||||
|
||||
@@ -299,4 +308,50 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
title=DEFAULT_TITLE,
|
||||
options={},
|
||||
version=2,
|
||||
minor_version=2,
|
||||
)
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, entry: GoogleGenerativeAIConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate entry."""
|
||||
LOGGER.debug("Migrating from version %s:%s", entry.version, entry.minor_version)
|
||||
|
||||
if entry.version > 2:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
if entry.version == 2 and entry.minor_version == 1:
|
||||
# Add TTS subentry which was missing in 2025.7.0b0
|
||||
if not any(
|
||||
subentry.subentry_type == "tts" for subentry in entry.subentries.values()
|
||||
):
|
||||
hass.config_entries.async_add_subentry(
|
||||
entry,
|
||||
ConfigSubentry(
|
||||
data=MappingProxyType(RECOMMENDED_TTS_OPTIONS),
|
||||
subentry_type="tts",
|
||||
title=DEFAULT_TTS_NAME,
|
||||
unique_id=None,
|
||||
),
|
||||
)
|
||||
|
||||
# Correct broken device migration in Home Assistant Core 2025.7.0b0-2025.7.0b1
|
||||
device_registry = dr.async_get(hass)
|
||||
for device in dr.async_entries_for_config_entry(
|
||||
device_registry, entry.entry_id
|
||||
):
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
remove_config_entry_id=entry.entry_id,
|
||||
remove_config_subentry_id=None,
|
||||
)
|
||||
|
||||
hass.config_entries.async_update_entry(entry, minor_version=2)
|
||||
|
||||
LOGGER.debug(
|
||||
"Migration to version %s:%s successful", entry.version, entry.minor_version
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -92,6 +92,7 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Google Generative AI Conversation."""
|
||||
|
||||
VERSION = 2
|
||||
MINOR_VERSION = 2
|
||||
|
||||
async def async_step_api(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -329,13 +330,14 @@ async def google_generative_ai_config_option_schema(
|
||||
api_models = [api_model async for api_model in api_models_pager]
|
||||
models = [
|
||||
SelectOptionDict(
|
||||
label=api_model.display_name,
|
||||
label=api_model.name.lstrip("models/"),
|
||||
value=api_model.name,
|
||||
)
|
||||
for api_model in sorted(api_models, key=lambda x: x.display_name or "")
|
||||
for api_model in sorted(
|
||||
api_models, key=lambda x: x.name.lstrip("models/") or ""
|
||||
)
|
||||
if (
|
||||
api_model.display_name
|
||||
and api_model.name
|
||||
api_model.name
|
||||
and ("tts" in api_model.name) == (subentry_type == "tts")
|
||||
and "vision" not in api_model.name
|
||||
and api_model.supported_actions
|
||||
|
||||
@@ -61,9 +61,6 @@ class GoogleGenerativeAIConversationEntity(
|
||||
self.hass, "conversation", self.entry.entry_id, self.entity_id
|
||||
)
|
||||
conversation.async_set_agent(self.hass, self.entry, self)
|
||||
self.entry.async_on_unload(
|
||||
self.entry.add_update_listener(self._async_entry_update_listener)
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""When entity will be removed from Home Assistant."""
|
||||
@@ -103,10 +100,3 @@ class GoogleGenerativeAIConversationEntity(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
continue_conversation=chat_log.continue_conversation,
|
||||
)
|
||||
|
||||
async def _async_entry_update_listener(
|
||||
self, hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> None:
|
||||
"""Handle options update."""
|
||||
# Reload as we update device info + entity name + supported features
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
@@ -27,7 +27,7 @@ from .const import (
|
||||
SIGNAL_PAIRED_SENSOR_COORDINATOR_ADDED,
|
||||
)
|
||||
from .coordinator import GuardianDataUpdateCoordinator
|
||||
from .services import setup_services
|
||||
from .services import async_setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
@@ -55,7 +55,7 @@ class GuardianData:
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Elexa Guardian component."""
|
||||
setup_services(hass)
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -122,8 +122,9 @@ async def async_upgrade_firmware(call: ServiceCall, data: GuardianData) -> None:
|
||||
)
|
||||
|
||||
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register the Renault services."""
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register the guardian services."""
|
||||
for service_name, schema, method in (
|
||||
(
|
||||
SERVICE_NAME_PAIR_SENSOR,
|
||||
|
||||
@@ -48,13 +48,13 @@ from homeassistant.components.backup import (
|
||||
RestoreBackupStage,
|
||||
RestoreBackupState,
|
||||
WrittenBackup,
|
||||
async_get_manager as async_get_backup_manager,
|
||||
suggested_filename as suggested_backup_filename,
|
||||
suggested_filename_from_name_date,
|
||||
)
|
||||
from homeassistant.const import __version__ as HAVERSION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.backup import async_get_manager as async_get_backup_manager
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
@@ -839,7 +839,7 @@ async def backup_addon_before_update(
|
||||
|
||||
async def backup_core_before_update(hass: HomeAssistant) -> None:
|
||||
"""Prepare for updating core."""
|
||||
backup_manager = await async_get_backup_manager(hass)
|
||||
backup_manager = async_get_backup_manager(hass)
|
||||
client = get_supervisor_client(hass)
|
||||
|
||||
try:
|
||||
|
||||
@@ -11,6 +11,7 @@ from urllib.parse import quote
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import ClientTimeout, ClientWebSocketResponse, hdrs, web
|
||||
from aiohttp.helpers import must_be_empty_body
|
||||
from aiohttp.web_exceptions import HTTPBadGateway, HTTPBadRequest
|
||||
from multidict import CIMultiDict
|
||||
from yarl import URL
|
||||
@@ -184,13 +185,16 @@ class HassIOIngress(HomeAssistantView):
|
||||
content_type = "application/octet-stream"
|
||||
|
||||
# Simple request
|
||||
if result.status in (204, 304) or (
|
||||
if (empty_body := must_be_empty_body(result.method, result.status)) or (
|
||||
content_length is not UNDEFINED
|
||||
and (content_length_int := int(content_length))
|
||||
<= MAX_SIMPLE_RESPONSE_SIZE
|
||||
):
|
||||
# Return Response
|
||||
body = await result.read()
|
||||
if empty_body:
|
||||
body = None
|
||||
else:
|
||||
body = await result.read()
|
||||
simple_response = web.Response(
|
||||
headers=headers,
|
||||
status=result.status,
|
||||
|
||||
@@ -9,9 +9,9 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import services
|
||||
from .const import DOMAIN
|
||||
from .coordinator import HeosConfigEntry, HeosCoordinator
|
||||
from .services import async_setup_services
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||
|
||||
@@ -22,7 +22,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the HEOS component."""
|
||||
services.register(hass)
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components.media_player import ATTR_MEDIA_VOLUME_LEVEL
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
@@ -44,7 +44,8 @@ HEOS_SIGN_IN_SCHEMA = vol.Schema(
|
||||
HEOS_SIGN_OUT_SCHEMA = vol.Schema({})
|
||||
|
||||
|
||||
def register(hass: HomeAssistant) -> None:
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register HEOS services."""
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
|
||||
@@ -23,7 +23,7 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from .api import AsyncConfigEntryAuth
|
||||
from .const import DOMAIN, OLD_NEW_UNIQUE_ID_SUFFIX_MAP
|
||||
from .coordinator import HomeConnectConfigEntry, HomeConnectCoordinator
|
||||
from .services import register_actions
|
||||
from .services import async_setup_services
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -43,7 +43,7 @@ PLATFORMS = [
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Home Connect component."""
|
||||
register_actions(hass)
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ from aiohomeconnect.model.error import HomeConnectError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_DEVICE_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
@@ -522,7 +522,8 @@ async def async_service_start_program(call: ServiceCall) -> None:
|
||||
await _async_service_program(call, True)
|
||||
|
||||
|
||||
def register_actions(hass: HomeAssistant) -> None:
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register custom actions."""
|
||||
|
||||
hass.services.async_register(
|
||||
|
||||
@@ -27,6 +27,7 @@ from homeassistant.config_entries import (
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
|
||||
@@ -67,6 +68,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
self.addon_start_task: asyncio.Task | None = None
|
||||
self.addon_uninstall_task: asyncio.Task | None = None
|
||||
self.firmware_install_task: asyncio.Task | None = None
|
||||
self.installing_firmware_name: str | None = None
|
||||
|
||||
def _get_translation_placeholders(self) -> dict[str, str]:
|
||||
"""Shared translation placeholders."""
|
||||
@@ -152,8 +154,12 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
assert self._device is not None
|
||||
|
||||
if not self.firmware_install_task:
|
||||
# We 100% need to install new firmware only if the wrong firmware is
|
||||
# currently installed
|
||||
# Keep track of the firmware we're working with, for error messages
|
||||
self.installing_firmware_name = firmware_name
|
||||
|
||||
# Installing new firmware is only truly required if the wrong type is
|
||||
# installed: upgrading to the latest release of the current firmware type
|
||||
# isn't strictly necessary for functionality.
|
||||
firmware_install_required = self._probed_firmware_info is None or (
|
||||
self._probed_firmware_info.firmware_type
|
||||
!= expected_installed_firmware_type
|
||||
@@ -167,7 +173,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
fw_manifest = next(
|
||||
fw for fw in manifest.firmwares if fw.filename.startswith(fw_type)
|
||||
)
|
||||
except (StopIteration, TimeoutError, ClientError, ManifestMissing) as err:
|
||||
except (StopIteration, TimeoutError, ClientError, ManifestMissing):
|
||||
_LOGGER.warning(
|
||||
"Failed to fetch firmware update manifest", exc_info=True
|
||||
)
|
||||
@@ -179,13 +185,9 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
)
|
||||
return self.async_show_progress_done(next_step_id=next_step_id)
|
||||
|
||||
raise AbortFlow(
|
||||
"fw_download_failed",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"firmware_name": firmware_name,
|
||||
},
|
||||
) from err
|
||||
return self.async_show_progress_done(
|
||||
next_step_id="firmware_download_failed"
|
||||
)
|
||||
|
||||
if not firmware_install_required:
|
||||
assert self._probed_firmware_info is not None
|
||||
@@ -205,7 +207,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
|
||||
try:
|
||||
fw_data = await client.async_fetch_firmware(fw_manifest)
|
||||
except (TimeoutError, ClientError, ValueError) as err:
|
||||
except (TimeoutError, ClientError, ValueError):
|
||||
_LOGGER.warning("Failed to fetch firmware update", exc_info=True)
|
||||
|
||||
# If we cannot download new firmware, we shouldn't block setup
|
||||
@@ -216,13 +218,9 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
return self.async_show_progress_done(next_step_id=next_step_id)
|
||||
|
||||
# Otherwise, fail
|
||||
raise AbortFlow(
|
||||
"fw_download_failed",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"firmware_name": firmware_name,
|
||||
},
|
||||
) from err
|
||||
return self.async_show_progress_done(
|
||||
next_step_id="firmware_download_failed"
|
||||
)
|
||||
|
||||
self.firmware_install_task = self.hass.async_create_task(
|
||||
async_flash_silabs_firmware(
|
||||
@@ -249,8 +247,40 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
progress_task=self.firmware_install_task,
|
||||
)
|
||||
|
||||
try:
|
||||
await self.firmware_install_task
|
||||
except HomeAssistantError:
|
||||
_LOGGER.exception("Failed to flash firmware")
|
||||
return self.async_show_progress_done(next_step_id="firmware_install_failed")
|
||||
|
||||
return self.async_show_progress_done(next_step_id=next_step_id)
|
||||
|
||||
async def async_step_firmware_download_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when firmware download failed."""
|
||||
assert self.installing_firmware_name is not None
|
||||
return self.async_abort(
|
||||
reason="fw_download_failed",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"firmware_name": self.installing_firmware_name,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_firmware_install_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when firmware install failed."""
|
||||
assert self.installing_firmware_name is not None
|
||||
return self.async_abort(
|
||||
reason="fw_install_failed",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"firmware_name": self.installing_firmware_name,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_pick_firmware_zigbee(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
||||
@@ -37,7 +37,8 @@
|
||||
"zha_still_using_stick": "This {model} is in use by the Zigbee Home Automation integration. Please migrate your Zigbee network to another adapter or delete the integration and try again.",
|
||||
"otbr_still_using_stick": "This {model} is in use by the OpenThread Border Router add-on. If you use the Thread network, make sure you have alternative border routers. Uninstall the add-on and try again.",
|
||||
"unsupported_firmware": "The radio firmware on your {model} could not be determined. Make sure that no other integration or add-on is currently trying to communicate with the device. If you are running Home Assistant OS in a virtual machine or in Docker, please make sure that permissions are set correctly for the device.",
|
||||
"fw_download_failed": "{firmware_name} firmware for your {model} failed to download. Make sure Home Assistant has internet access and try again."
|
||||
"fw_download_failed": "{firmware_name} firmware for your {model} failed to download. Make sure Home Assistant has internet access and try again.",
|
||||
"fw_install_failed": "{firmware_name} firmware failed to install, check Home Assistant logs for more information."
|
||||
},
|
||||
"progress": {
|
||||
"install_firmware": "Please wait while {firmware_name} firmware is installed to your {model}, this will take a few minutes. Do not make any changes to your hardware or software until this finishes."
|
||||
|
||||
@@ -93,7 +93,8 @@
|
||||
"zha_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::zha_still_using_stick%]",
|
||||
"otbr_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_still_using_stick%]",
|
||||
"unsupported_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::unsupported_firmware%]",
|
||||
"fw_download_failed": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::fw_download_failed%]"
|
||||
"fw_download_failed": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::fw_download_failed%]",
|
||||
"fw_install_failed": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::fw_install_failed%]"
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
@@ -147,7 +148,8 @@
|
||||
"zha_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::zha_still_using_stick%]",
|
||||
"otbr_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_still_using_stick%]",
|
||||
"unsupported_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::unsupported_firmware%]",
|
||||
"fw_download_failed": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::fw_download_failed%]"
|
||||
"fw_download_failed": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::fw_download_failed%]",
|
||||
"fw_install_failed": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::fw_install_failed%]"
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
|
||||
@@ -118,7 +118,8 @@
|
||||
"zha_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::zha_still_using_stick%]",
|
||||
"otbr_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_still_using_stick%]",
|
||||
"unsupported_firmware": "The radio firmware on your {model} could not be determined. Make sure that no other integration or add-on is currently trying to communicate with the device.",
|
||||
"fw_download_failed": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::fw_download_failed%]"
|
||||
"fw_download_failed": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::fw_download_failed%]",
|
||||
"fw_install_failed": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::fw_install_failed%]"
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
|
||||
@@ -9,17 +9,9 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import (
|
||||
async_create_clientsession,
|
||||
async_get_clientsession,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import (
|
||||
_LOGGER,
|
||||
CONF_COOL_AWAY_TEMPERATURE,
|
||||
CONF_HEAT_AWAY_TEMPERATURE,
|
||||
DOMAIN,
|
||||
)
|
||||
from .const import _LOGGER, CONF_COOL_AWAY_TEMPERATURE, CONF_HEAT_AWAY_TEMPERATURE
|
||||
|
||||
UPDATE_LOOP_SLEEP_TIME = 5
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.HUMIDIFIER, Platform.SENSOR, Platform.SWITCH]
|
||||
@@ -56,11 +48,11 @@ async def async_setup_entry(
|
||||
username = config_entry.data[CONF_USERNAME]
|
||||
password = config_entry.data[CONF_PASSWORD]
|
||||
|
||||
if len(hass.config_entries.async_entries(DOMAIN)) > 1:
|
||||
session = async_create_clientsession(hass)
|
||||
else:
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
# Always create a new session for Honeywell to prevent cookie injection
|
||||
# issues. Even with response_url handling in aiosomecomfort 0.0.33+,
|
||||
# cookies can still leak into other integrations when using the shared
|
||||
# session. See issue #147395.
|
||||
session = async_create_clientsession(hass)
|
||||
client = aiosomecomfort.AIOSomeComfort(username, password, session=session)
|
||||
try:
|
||||
await client.login()
|
||||
|
||||
@@ -16,7 +16,7 @@ from homeassistant.config_entries import (
|
||||
)
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import (
|
||||
CONF_COOL_AWAY_TEMPERATURE,
|
||||
@@ -114,10 +114,14 @@ class HoneywellConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def is_valid(self, **kwargs) -> bool:
|
||||
"""Check if login credentials are valid."""
|
||||
# Always create a new session for Honeywell to prevent cookie injection
|
||||
# issues. Even with response_url handling in aiosomecomfort 0.0.33+,
|
||||
# cookies can still leak into other integrations when using the shared
|
||||
# session. See issue #147395.
|
||||
client = aiosomecomfort.AIOSomeComfort(
|
||||
kwargs[CONF_USERNAME],
|
||||
kwargs[CONF_PASSWORD],
|
||||
session=async_get_clientsession(self.hass),
|
||||
session=async_create_clientsession(self.hass),
|
||||
)
|
||||
|
||||
await client.login()
|
||||
|
||||
@@ -63,8 +63,8 @@ from .utils import get_device_macs, non_verifying_requests_session
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle Huawei LTE config flow."""
|
||||
class HuaweiLteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Huawei LTE config flow."""
|
||||
|
||||
VERSION = 3
|
||||
|
||||
@@ -75,9 +75,9 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> OptionsFlowHandler:
|
||||
) -> HuaweiLteOptionsFlow:
|
||||
"""Get options flow."""
|
||||
return OptionsFlowHandler()
|
||||
return HuaweiLteOptionsFlow()
|
||||
|
||||
async def _async_show_user_form(
|
||||
self,
|
||||
@@ -354,7 +354,7 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_update_reload_and_abort(entry, data=new_data)
|
||||
|
||||
|
||||
class OptionsFlowHandler(OptionsFlow):
|
||||
class HuaweiLteOptionsFlow(OptionsFlow):
|
||||
"""Huawei LTE options flow."""
|
||||
|
||||
async def async_step_init(
|
||||
|
||||
@@ -73,7 +73,6 @@ class AutomowerCalendarEntity(AutomowerBaseEntity, CalendarEntity):
|
||||
schedule = self.mower_attributes.calendar
|
||||
cursor = schedule.timeline.active_after(dt_util.now())
|
||||
program_event = next(cursor, None)
|
||||
_LOGGER.debug("program_event %s", program_event)
|
||||
if not program_event:
|
||||
return None
|
||||
work_area_name = None
|
||||
|
||||
@@ -1,7 +1,19 @@
|
||||
"""The constants for the Husqvarna Automower integration."""
|
||||
|
||||
from aioautomower.model import MowerStates
|
||||
|
||||
DOMAIN = "husqvarna_automower"
|
||||
EXECUTION_TIME_DELAY = 5
|
||||
NAME = "Husqvarna Automower"
|
||||
OAUTH2_AUTHORIZE = "https://api.authentication.husqvarnagroup.dev/v1/oauth2/authorize"
|
||||
OAUTH2_TOKEN = "https://api.authentication.husqvarnagroup.dev/v1/oauth2/token"
|
||||
|
||||
ERROR_STATES = [
|
||||
MowerStates.ERROR_AT_POWER_UP,
|
||||
MowerStates.ERROR,
|
||||
MowerStates.FATAL_ERROR,
|
||||
MowerStates.OFF,
|
||||
MowerStates.STOPPED,
|
||||
MowerStates.WAIT_POWER_UP,
|
||||
MowerStates.WAIT_UPDATING,
|
||||
]
|
||||
|
||||
@@ -18,7 +18,7 @@ from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AutomowerConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .const import DOMAIN, ERROR_STATES
|
||||
from .coordinator import AutomowerDataUpdateCoordinator
|
||||
from .entity import AutomowerAvailableEntity, handle_sending_exception
|
||||
|
||||
@@ -108,18 +108,28 @@ class AutomowerLawnMowerEntity(AutomowerAvailableEntity, LawnMowerEntity):
|
||||
def activity(self) -> LawnMowerActivity:
|
||||
"""Return the state of the mower."""
|
||||
mower_attributes = self.mower_attributes
|
||||
if mower_attributes.mower.state in ERROR_STATES:
|
||||
return LawnMowerActivity.ERROR
|
||||
if mower_attributes.mower.state in PAUSED_STATES:
|
||||
return LawnMowerActivity.PAUSED
|
||||
if (mower_attributes.mower.state == "RESTRICTED") or (
|
||||
mower_attributes.mower.activity in DOCKED_ACTIVITIES
|
||||
if mower_attributes.mower.activity == MowerActivities.GOING_HOME:
|
||||
return LawnMowerActivity.RETURNING
|
||||
if (
|
||||
mower_attributes.mower.state is MowerStates.RESTRICTED
|
||||
or mower_attributes.mower.activity in DOCKED_ACTIVITIES
|
||||
):
|
||||
return LawnMowerActivity.DOCKED
|
||||
if mower_attributes.mower.state in MowerStates.IN_OPERATION:
|
||||
if mower_attributes.mower.activity == MowerActivities.GOING_HOME:
|
||||
return LawnMowerActivity.RETURNING
|
||||
return LawnMowerActivity.MOWING
|
||||
return LawnMowerActivity.ERROR
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return the available attribute of the entity."""
|
||||
return (
|
||||
super().available and self.mower_attributes.mower.state != MowerStates.OFF
|
||||
)
|
||||
|
||||
@property
|
||||
def work_areas(self) -> dict[int, WorkArea] | None:
|
||||
"""Return the work areas of the mower."""
|
||||
|
||||
@@ -7,13 +7,7 @@ import logging
|
||||
from operator import attrgetter
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from aioautomower.model import (
|
||||
MowerAttributes,
|
||||
MowerModes,
|
||||
MowerStates,
|
||||
RestrictedReasons,
|
||||
WorkArea,
|
||||
)
|
||||
from aioautomower.model import MowerAttributes, MowerModes, RestrictedReasons, WorkArea
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@@ -27,6 +21,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from . import AutomowerConfigEntry
|
||||
from .const import ERROR_STATES
|
||||
from .coordinator import AutomowerDataUpdateCoordinator
|
||||
from .entity import (
|
||||
AutomowerBaseEntity,
|
||||
@@ -166,15 +161,6 @@ ERROR_KEYS = [
|
||||
"zone_generator_problem",
|
||||
]
|
||||
|
||||
ERROR_STATES = [
|
||||
MowerStates.ERROR_AT_POWER_UP,
|
||||
MowerStates.ERROR,
|
||||
MowerStates.FATAL_ERROR,
|
||||
MowerStates.OFF,
|
||||
MowerStates.STOPPED,
|
||||
MowerStates.WAIT_POWER_UP,
|
||||
MowerStates.WAIT_UPDATING,
|
||||
]
|
||||
|
||||
ERROR_KEY_LIST = list(
|
||||
dict.fromkeys(ERROR_KEYS + [state.lower() for state in ERROR_STATES])
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/image_upload",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["Pillow==11.2.1"]
|
||||
"requirements": ["Pillow==11.3.0"]
|
||||
}
|
||||
|
||||
@@ -10,4 +10,8 @@ OHM = "Ω"
|
||||
DISCOVERY_SVC_UUID = "9eae1000-9d0d-48c5-aa55-33e27f9bc533"
|
||||
|
||||
MAX_TEMP: int = 450
|
||||
MAX_TEMP_F: int = 850
|
||||
MIN_TEMP: int = 10
|
||||
MIN_TEMP_F: int = 50
|
||||
MIN_BOOST_TEMP: int = 250
|
||||
MIN_BOOST_TEMP_F: int = 480
|
||||
|
||||
@@ -168,7 +168,9 @@ class IronOSSettingsCoordinator(IronOSBaseCoordinator[SettingsDataResponse]):
|
||||
|
||||
if self.device.is_connected and characteristics:
|
||||
try:
|
||||
return await self.device.get_settings(list(characteristics))
|
||||
return await self.device.get_settings(
|
||||
list(characteristics | {CharSetting.TEMP_UNIT})
|
||||
)
|
||||
except CommunicationError as e:
|
||||
_LOGGER.debug("Failed to fetch settings", exc_info=e)
|
||||
|
||||
|
||||
@@ -209,6 +209,12 @@
|
||||
"state": {
|
||||
"off": "mdi:card-bulleted-off-outline"
|
||||
}
|
||||
},
|
||||
"boost": {
|
||||
"default": "mdi:thermometer-high",
|
||||
"state": {
|
||||
"off": "mdi:thermometer-off"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,10 +6,9 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
|
||||
from pynecil import CharSetting, LiveDataResponse, SettingsDataResponse
|
||||
from pynecil import CharSetting, LiveDataResponse, SettingsDataResponse, TempUnit
|
||||
|
||||
from homeassistant.components.number import (
|
||||
DEFAULT_MAX_VALUE,
|
||||
NumberDeviceClass,
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
@@ -24,9 +23,17 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.unit_conversion import TemperatureConverter
|
||||
|
||||
from . import IronOSConfigEntry
|
||||
from .const import MAX_TEMP, MIN_TEMP
|
||||
from .const import (
|
||||
MAX_TEMP,
|
||||
MAX_TEMP_F,
|
||||
MIN_BOOST_TEMP,
|
||||
MIN_BOOST_TEMP_F,
|
||||
MIN_TEMP,
|
||||
MIN_TEMP_F,
|
||||
)
|
||||
from .coordinator import IronOSCoordinators
|
||||
from .entity import IronOSBaseEntity
|
||||
|
||||
@@ -38,9 +45,10 @@ class IronOSNumberEntityDescription(NumberEntityDescription):
|
||||
"""Describes IronOS number entity."""
|
||||
|
||||
value_fn: Callable[[LiveDataResponse, SettingsDataResponse], float | int | None]
|
||||
max_value_fn: Callable[[LiveDataResponse], float | int] | None = None
|
||||
characteristic: CharSetting
|
||||
raw_value_fn: Callable[[float], float | int] | None = None
|
||||
native_max_value_f: float | None = None
|
||||
native_min_value_f: float | None = None
|
||||
|
||||
|
||||
class PinecilNumber(StrEnum):
|
||||
@@ -74,44 +82,6 @@ def multiply(value: float | None, multiplier: float) -> float | None:
|
||||
|
||||
|
||||
PINECIL_NUMBER_DESCRIPTIONS: tuple[IronOSNumberEntityDescription, ...] = (
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.SETPOINT_TEMP,
|
||||
translation_key=PinecilNumber.SETPOINT_TEMP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
value_fn=lambda data, _: data.setpoint_temp,
|
||||
characteristic=CharSetting.SETPOINT_TEMP,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=MIN_TEMP,
|
||||
native_step=5,
|
||||
max_value_fn=lambda data: min(data.max_tip_temp_ability or MAX_TEMP, MAX_TEMP),
|
||||
),
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.SLEEP_TEMP,
|
||||
translation_key=PinecilNumber.SLEEP_TEMP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
value_fn=lambda _, settings: settings.get("sleep_temp"),
|
||||
characteristic=CharSetting.SLEEP_TEMP,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=MIN_TEMP,
|
||||
native_max_value=MAX_TEMP,
|
||||
native_step=10,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.BOOST_TEMP,
|
||||
translation_key=PinecilNumber.BOOST_TEMP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
value_fn=lambda _, settings: settings.get("boost_temp"),
|
||||
characteristic=CharSetting.BOOST_TEMP,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=0,
|
||||
native_max_value=MAX_TEMP,
|
||||
native_step=10,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.QC_MAX_VOLTAGE,
|
||||
translation_key=PinecilNumber.QC_MAX_VOLTAGE,
|
||||
@@ -296,32 +266,6 @@ PINECIL_NUMBER_DESCRIPTIONS: tuple[IronOSNumberEntityDescription, ...] = (
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.TEMP_INCREMENT_SHORT,
|
||||
translation_key=PinecilNumber.TEMP_INCREMENT_SHORT,
|
||||
value_fn=(lambda _, settings: settings.get("temp_increment_short")),
|
||||
characteristic=CharSetting.TEMP_INCREMENT_SHORT,
|
||||
raw_value_fn=lambda value: value,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=1,
|
||||
native_max_value=50,
|
||||
native_step=1,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
),
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.TEMP_INCREMENT_LONG,
|
||||
translation_key=PinecilNumber.TEMP_INCREMENT_LONG,
|
||||
value_fn=(lambda _, settings: settings.get("temp_increment_long")),
|
||||
characteristic=CharSetting.TEMP_INCREMENT_LONG,
|
||||
raw_value_fn=lambda value: value,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=5,
|
||||
native_max_value=90,
|
||||
native_step=5,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
),
|
||||
)
|
||||
|
||||
PINECIL_NUMBER_DESCRIPTIONS_V223: tuple[IronOSNumberEntityDescription, ...] = (
|
||||
@@ -341,6 +285,82 @@ PINECIL_NUMBER_DESCRIPTIONS_V223: tuple[IronOSNumberEntityDescription, ...] = (
|
||||
),
|
||||
)
|
||||
|
||||
"""
|
||||
The `device_class` attribute was removed from the `setpoint_temperature`, `sleep_temperature`, and `boost_temp` entities.
|
||||
These entities represent user-defined input values, not measured temperatures, and their
|
||||
interpretation depends on the device's current unit configuration. Applying a device_class
|
||||
results in automatic unit conversions, which introduce rounding errors due to the use of integers.
|
||||
This can prevent the correct value from being set, as the input is modified during synchronization with the device.
|
||||
"""
|
||||
PINECIL_TEMP_NUMBER_DESCRIPTIONS: tuple[IronOSNumberEntityDescription, ...] = (
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.SLEEP_TEMP,
|
||||
translation_key=PinecilNumber.SLEEP_TEMP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda _, settings: settings.get("sleep_temp"),
|
||||
characteristic=CharSetting.SLEEP_TEMP,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=MIN_TEMP,
|
||||
native_max_value=MAX_TEMP,
|
||||
native_min_value_f=MIN_TEMP_F,
|
||||
native_max_value_f=MAX_TEMP_F,
|
||||
native_step=10,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.BOOST_TEMP,
|
||||
translation_key=PinecilNumber.BOOST_TEMP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda _, settings: settings.get("boost_temp"),
|
||||
characteristic=CharSetting.BOOST_TEMP,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=MIN_BOOST_TEMP,
|
||||
native_min_value_f=MIN_BOOST_TEMP_F,
|
||||
native_max_value=MAX_TEMP,
|
||||
native_max_value_f=MAX_TEMP_F,
|
||||
native_step=10,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.TEMP_INCREMENT_SHORT,
|
||||
translation_key=PinecilNumber.TEMP_INCREMENT_SHORT,
|
||||
value_fn=(lambda _, settings: settings.get("temp_increment_short")),
|
||||
characteristic=CharSetting.TEMP_INCREMENT_SHORT,
|
||||
raw_value_fn=lambda value: value,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=1,
|
||||
native_max_value=50,
|
||||
native_step=1,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.TEMP_INCREMENT_LONG,
|
||||
translation_key=PinecilNumber.TEMP_INCREMENT_LONG,
|
||||
value_fn=(lambda _, settings: settings.get("temp_increment_long")),
|
||||
characteristic=CharSetting.TEMP_INCREMENT_LONG,
|
||||
raw_value_fn=lambda value: value,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=5,
|
||||
native_max_value=90,
|
||||
native_step=5,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
)
|
||||
|
||||
PINECIL_SETPOINT_NUMBER_DESCRIPTION = IronOSNumberEntityDescription(
|
||||
key=PinecilNumber.SETPOINT_TEMP,
|
||||
translation_key=PinecilNumber.SETPOINT_TEMP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data, _: data.setpoint_temp,
|
||||
characteristic=CharSetting.SETPOINT_TEMP,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=MIN_TEMP,
|
||||
native_max_value=MAX_TEMP,
|
||||
native_min_value_f=MIN_TEMP_F,
|
||||
native_max_value_f=MAX_TEMP_F,
|
||||
native_step=5,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -354,9 +374,18 @@ async def async_setup_entry(
|
||||
if coordinators.live_data.v223_features:
|
||||
descriptions += PINECIL_NUMBER_DESCRIPTIONS_V223
|
||||
|
||||
async_add_entities(
|
||||
entities = [
|
||||
IronOSNumberEntity(coordinators, description) for description in descriptions
|
||||
]
|
||||
|
||||
entities.extend(
|
||||
IronOSTemperatureNumberEntity(coordinators, description)
|
||||
for description in PINECIL_TEMP_NUMBER_DESCRIPTIONS
|
||||
)
|
||||
entities.append(
|
||||
IronOSSetpointNumberEntity(coordinators, PINECIL_SETPOINT_NUMBER_DESCRIPTION)
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class IronOSNumberEntity(IronOSBaseEntity, NumberEntity):
|
||||
@@ -388,15 +417,6 @@ class IronOSNumberEntity(IronOSBaseEntity, NumberEntity):
|
||||
self.coordinator.data, self.settings.data
|
||||
)
|
||||
|
||||
@property
|
||||
def native_max_value(self) -> float:
|
||||
"""Return sensor state."""
|
||||
|
||||
if self.entity_description.max_value_fn is not None:
|
||||
return self.entity_description.max_value_fn(self.coordinator.data)
|
||||
|
||||
return self.entity_description.native_max_value or DEFAULT_MAX_VALUE
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
|
||||
@@ -407,3 +427,70 @@ class IronOSNumberEntity(IronOSBaseEntity, NumberEntity):
|
||||
)
|
||||
)
|
||||
await self.settings.async_request_refresh()
|
||||
|
||||
|
||||
class IronOSTemperatureNumberEntity(IronOSNumberEntity):
|
||||
"""Implementation of a IronOS temperature number entity."""
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement of the sensor, if any."""
|
||||
|
||||
return (
|
||||
UnitOfTemperature.FAHRENHEIT
|
||||
if self.settings.data.get("temp_unit") is TempUnit.FAHRENHEIT
|
||||
else UnitOfTemperature.CELSIUS
|
||||
)
|
||||
|
||||
@property
|
||||
def native_min_value(self) -> float:
|
||||
"""Return the minimum value."""
|
||||
|
||||
return (
|
||||
self.entity_description.native_min_value_f
|
||||
if self.entity_description.native_min_value_f
|
||||
and self.native_unit_of_measurement is UnitOfTemperature.FAHRENHEIT
|
||||
else super().native_min_value
|
||||
)
|
||||
|
||||
@property
|
||||
def native_max_value(self) -> float:
|
||||
"""Return the maximum value."""
|
||||
|
||||
return (
|
||||
self.entity_description.native_max_value_f
|
||||
if self.entity_description.native_max_value_f
|
||||
and self.native_unit_of_measurement is UnitOfTemperature.FAHRENHEIT
|
||||
else super().native_max_value
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
if (
|
||||
self.entity_description.key is PinecilNumber.BOOST_TEMP
|
||||
and self.native_value == 0
|
||||
):
|
||||
return False
|
||||
return super().available
|
||||
|
||||
|
||||
class IronOSSetpointNumberEntity(IronOSTemperatureNumberEntity):
|
||||
"""IronOS setpoint temperature entity."""
|
||||
|
||||
@property
|
||||
def native_max_value(self) -> float:
|
||||
"""Return the maximum value."""
|
||||
|
||||
return (
|
||||
min(
|
||||
TemperatureConverter.convert(
|
||||
float(max_tip_c),
|
||||
UnitOfTemperature.CELSIUS,
|
||||
self.native_unit_of_measurement,
|
||||
),
|
||||
super().native_max_value,
|
||||
)
|
||||
if (max_tip_c := self.coordinator.data.max_tip_temp_ability) is not None
|
||||
else super().native_max_value
|
||||
)
|
||||
|
||||
@@ -278,6 +278,9 @@
|
||||
},
|
||||
"calibrate_cjc": {
|
||||
"name": "Calibrate CJC"
|
||||
},
|
||||
"boost": {
|
||||
"name": "Boost"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -7,7 +7,7 @@ from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
from typing import Any
|
||||
|
||||
from pynecil import CharSetting, SettingsDataResponse
|
||||
from pynecil import CharSetting, SettingsDataResponse, TempUnit
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
@@ -15,6 +15,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import IronOSConfigEntry
|
||||
from .const import MIN_BOOST_TEMP, MIN_BOOST_TEMP_F
|
||||
from .coordinator import IronOSCoordinators
|
||||
from .entity import IronOSBaseEntity
|
||||
|
||||
@@ -39,6 +40,7 @@ class IronOSSwitch(StrEnum):
|
||||
INVERT_BUTTONS = "invert_buttons"
|
||||
DISPLAY_INVERT = "display_invert"
|
||||
CALIBRATE_CJC = "calibrate_cjc"
|
||||
BOOST = "boost"
|
||||
|
||||
|
||||
SWITCH_DESCRIPTIONS: tuple[IronOSSwitchEntityDescription, ...] = (
|
||||
@@ -94,6 +96,13 @@ SWITCH_DESCRIPTIONS: tuple[IronOSSwitchEntityDescription, ...] = (
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
IronOSSwitchEntityDescription(
|
||||
key=IronOSSwitch.BOOST,
|
||||
translation_key=IronOSSwitch.BOOST,
|
||||
characteristic=CharSetting.BOOST_TEMP,
|
||||
is_on_fn=lambda x: bool(x.get("boost_temp")),
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -136,7 +145,15 @@ class IronOSSwitchEntity(IronOSBaseEntity, SwitchEntity):
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
await self.settings.write(self.entity_description.characteristic, True)
|
||||
if self.entity_description.key is IronOSSwitch.BOOST:
|
||||
await self.settings.write(
|
||||
self.entity_description.characteristic,
|
||||
MIN_BOOST_TEMP_F
|
||||
if self.settings.data.get("temp_unit") is TempUnit.FAHRENHEIT
|
||||
else MIN_BOOST_TEMP,
|
||||
)
|
||||
else:
|
||||
await self.settings.write(self.entity_description.characteristic, True)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
|
||||
@@ -108,22 +108,22 @@ def get_statistics(
|
||||
if monthly_consumptions := get_consumptions(data, value_type):
|
||||
return [
|
||||
{
|
||||
"value": as_number(
|
||||
get_values_by_type(
|
||||
consumptions=consumptions,
|
||||
consumption_type=consumption_type,
|
||||
).get(
|
||||
"additionalValue"
|
||||
if value_type == IstaValueType.ENERGY
|
||||
else "value"
|
||||
)
|
||||
),
|
||||
"value": as_number(value),
|
||||
"date": consumptions["date"],
|
||||
}
|
||||
for consumptions in monthly_consumptions
|
||||
if get_values_by_type(
|
||||
consumptions=consumptions,
|
||||
consumption_type=consumption_type,
|
||||
).get("additionalValue" if value_type == IstaValueType.ENERGY else "value")
|
||||
if (
|
||||
value := (
|
||||
consumption := get_values_by_type(
|
||||
consumptions=consumptions,
|
||||
consumption_type=consumption_type,
|
||||
)
|
||||
).get(
|
||||
"additionalValue"
|
||||
if value_type == IstaValueType.ENERGY
|
||||
and consumption.get("additionalValue") is not None
|
||||
else "value"
|
||||
)
|
||||
)
|
||||
]
|
||||
return None
|
||||
|
||||
@@ -91,7 +91,7 @@ from .schema import (
|
||||
TimeSchema,
|
||||
WeatherSchema,
|
||||
)
|
||||
from .services import register_knx_services
|
||||
from .services import async_setup_services
|
||||
from .storage.config_store import STORAGE_KEY as CONFIG_STORAGE_KEY, KNXConfigStore
|
||||
from .telegrams import STORAGE_KEY as TELEGRAMS_STORAGE_KEY, Telegrams
|
||||
from .websocket import register_panel
|
||||
@@ -138,7 +138,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
if (conf := config.get(DOMAIN)) is not None:
|
||||
hass.data[_KNX_YAML_CONFIG] = dict(conf)
|
||||
|
||||
register_knx_services(hass)
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -39,7 +39,8 @@ from .const import (
|
||||
KNX_MODULE_KEY,
|
||||
)
|
||||
from .entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity
|
||||
from .storage.const import CONF_ENTITY, CONF_GA_PASSIVE, CONF_GA_SENSOR, CONF_GA_STATE
|
||||
from .storage.const import CONF_ENTITY, CONF_GA_SENSOR
|
||||
from .storage.util import ConfigExtractor
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -146,17 +147,17 @@ class KnxUiBinarySensor(_KnxBinarySensor, KnxUiEntity):
|
||||
unique_id=unique_id,
|
||||
entity_config=config[CONF_ENTITY],
|
||||
)
|
||||
knx_conf = ConfigExtractor(config[DOMAIN])
|
||||
self._device = XknxBinarySensor(
|
||||
xknx=knx_module.xknx,
|
||||
name=config[CONF_ENTITY][CONF_NAME],
|
||||
group_address_state=[
|
||||
config[DOMAIN][CONF_GA_SENSOR][CONF_GA_STATE],
|
||||
*config[DOMAIN][CONF_GA_SENSOR][CONF_GA_PASSIVE],
|
||||
],
|
||||
sync_state=config[DOMAIN][CONF_SYNC_STATE],
|
||||
invert=config[DOMAIN].get(CONF_INVERT, False),
|
||||
ignore_internal_state=config[DOMAIN].get(CONF_IGNORE_INTERNAL_STATE, False),
|
||||
context_timeout=config[DOMAIN].get(CONF_CONTEXT_TIMEOUT),
|
||||
reset_after=config[DOMAIN].get(CONF_RESET_AFTER),
|
||||
group_address_state=knx_conf.get_state_and_passive(CONF_GA_SENSOR),
|
||||
sync_state=knx_conf.get(CONF_SYNC_STATE),
|
||||
invert=knx_conf.get(CONF_INVERT, default=False),
|
||||
ignore_internal_state=knx_conf.get(
|
||||
CONF_IGNORE_INTERNAL_STATE, default=False
|
||||
),
|
||||
context_timeout=knx_conf.get(CONF_CONTEXT_TIMEOUT),
|
||||
reset_after=knx_conf.get(CONF_RESET_AFTER),
|
||||
)
|
||||
self._attr_force_update = self._device.ignore_internal_state
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Literal
|
||||
from typing import Any
|
||||
|
||||
from xknx import XKNX
|
||||
from xknx.devices import Cover as XknxCover
|
||||
@@ -35,15 +35,13 @@ from .schema import CoverSchema
|
||||
from .storage.const import (
|
||||
CONF_ENTITY,
|
||||
CONF_GA_ANGLE,
|
||||
CONF_GA_PASSIVE,
|
||||
CONF_GA_POSITION_SET,
|
||||
CONF_GA_POSITION_STATE,
|
||||
CONF_GA_STATE,
|
||||
CONF_GA_STEP,
|
||||
CONF_GA_STOP,
|
||||
CONF_GA_UP_DOWN,
|
||||
CONF_GA_WRITE,
|
||||
)
|
||||
from .storage.util import ConfigExtractor
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -230,38 +228,24 @@ class KnxYamlCover(_KnxCover, KnxYamlEntity):
|
||||
def _create_ui_cover(xknx: XKNX, knx_config: ConfigType, name: str) -> XknxCover:
|
||||
"""Return a KNX Light device to be used within XKNX."""
|
||||
|
||||
def get_address(
|
||||
key: str, address_type: Literal["write", "state"] = CONF_GA_WRITE
|
||||
) -> str | None:
|
||||
"""Get a single group address for given key."""
|
||||
return knx_config[key][address_type] if key in knx_config else None
|
||||
|
||||
def get_addresses(
|
||||
key: str, address_type: Literal["write", "state"] = CONF_GA_STATE
|
||||
) -> list[Any] | None:
|
||||
"""Get group address including passive addresses as list."""
|
||||
return (
|
||||
[knx_config[key][address_type], *knx_config[key][CONF_GA_PASSIVE]]
|
||||
if key in knx_config
|
||||
else None
|
||||
)
|
||||
conf = ConfigExtractor(knx_config)
|
||||
|
||||
return XknxCover(
|
||||
xknx=xknx,
|
||||
name=name,
|
||||
group_address_long=get_addresses(CONF_GA_UP_DOWN, CONF_GA_WRITE),
|
||||
group_address_short=get_addresses(CONF_GA_STEP, CONF_GA_WRITE),
|
||||
group_address_stop=get_addresses(CONF_GA_STOP, CONF_GA_WRITE),
|
||||
group_address_position=get_addresses(CONF_GA_POSITION_SET, CONF_GA_WRITE),
|
||||
group_address_position_state=get_addresses(CONF_GA_POSITION_STATE),
|
||||
group_address_angle=get_address(CONF_GA_ANGLE),
|
||||
group_address_angle_state=get_addresses(CONF_GA_ANGLE),
|
||||
travel_time_down=knx_config[CoverConf.TRAVELLING_TIME_DOWN],
|
||||
travel_time_up=knx_config[CoverConf.TRAVELLING_TIME_UP],
|
||||
invert_updown=knx_config.get(CoverConf.INVERT_UPDOWN, False),
|
||||
invert_position=knx_config.get(CoverConf.INVERT_POSITION, False),
|
||||
invert_angle=knx_config.get(CoverConf.INVERT_ANGLE, False),
|
||||
sync_state=knx_config[CONF_SYNC_STATE],
|
||||
group_address_long=conf.get_write_and_passive(CONF_GA_UP_DOWN),
|
||||
group_address_short=conf.get_write_and_passive(CONF_GA_STEP),
|
||||
group_address_stop=conf.get_write_and_passive(CONF_GA_STOP),
|
||||
group_address_position=conf.get_write_and_passive(CONF_GA_POSITION_SET),
|
||||
group_address_position_state=conf.get_state_and_passive(CONF_GA_POSITION_STATE),
|
||||
group_address_angle=conf.get_write(CONF_GA_ANGLE),
|
||||
group_address_angle_state=conf.get_state_and_passive(CONF_GA_ANGLE),
|
||||
travel_time_down=conf.get(CoverConf.TRAVELLING_TIME_DOWN),
|
||||
travel_time_up=conf.get(CoverConf.TRAVELLING_TIME_UP),
|
||||
invert_updown=conf.get(CoverConf.INVERT_UPDOWN, default=False),
|
||||
invert_position=conf.get(CoverConf.INVERT_POSITION, default=False),
|
||||
invert_angle=conf.get(CoverConf.INVERT_ANGLE, default=False),
|
||||
sync_state=conf.get(CONF_SYNC_STATE),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -35,7 +35,6 @@ from .schema import LightSchema
|
||||
from .storage.const import (
|
||||
CONF_COLOR_TEMP_MAX,
|
||||
CONF_COLOR_TEMP_MIN,
|
||||
CONF_DPT,
|
||||
CONF_ENTITY,
|
||||
CONF_GA_BLUE_BRIGHTNESS,
|
||||
CONF_GA_BLUE_SWITCH,
|
||||
@@ -45,17 +44,15 @@ from .storage.const import (
|
||||
CONF_GA_GREEN_BRIGHTNESS,
|
||||
CONF_GA_GREEN_SWITCH,
|
||||
CONF_GA_HUE,
|
||||
CONF_GA_PASSIVE,
|
||||
CONF_GA_RED_BRIGHTNESS,
|
||||
CONF_GA_RED_SWITCH,
|
||||
CONF_GA_SATURATION,
|
||||
CONF_GA_STATE,
|
||||
CONF_GA_SWITCH,
|
||||
CONF_GA_WHITE_BRIGHTNESS,
|
||||
CONF_GA_WHITE_SWITCH,
|
||||
CONF_GA_WRITE,
|
||||
)
|
||||
from .storage.entity_store_schema import LightColorMode
|
||||
from .storage.util import ConfigExtractor
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -203,94 +200,92 @@ def _create_yaml_light(xknx: XKNX, config: ConfigType) -> XknxLight:
|
||||
def _create_ui_light(xknx: XKNX, knx_config: ConfigType, name: str) -> XknxLight:
|
||||
"""Return a KNX Light device to be used within XKNX."""
|
||||
|
||||
def get_write(key: str) -> str | None:
|
||||
"""Get the write group address."""
|
||||
return knx_config[key][CONF_GA_WRITE] if key in knx_config else None
|
||||
|
||||
def get_state(key: str) -> list[Any] | None:
|
||||
"""Get the state group address."""
|
||||
return (
|
||||
[knx_config[key][CONF_GA_STATE], *knx_config[key][CONF_GA_PASSIVE]]
|
||||
if key in knx_config
|
||||
else None
|
||||
)
|
||||
|
||||
def get_dpt(key: str) -> str | None:
|
||||
"""Get the DPT."""
|
||||
return knx_config[key].get(CONF_DPT) if key in knx_config else None
|
||||
conf = ConfigExtractor(knx_config)
|
||||
|
||||
group_address_tunable_white = None
|
||||
group_address_tunable_white_state = None
|
||||
group_address_color_temp = None
|
||||
group_address_color_temp_state = None
|
||||
|
||||
color_temperature_type = ColorTemperatureType.UINT_2_BYTE
|
||||
if ga_color_temp := knx_config.get(CONF_GA_COLOR_TEMP):
|
||||
if ga_color_temp[CONF_DPT] == ColorTempModes.RELATIVE.value:
|
||||
group_address_tunable_white = ga_color_temp[CONF_GA_WRITE]
|
||||
group_address_tunable_white_state = [
|
||||
ga_color_temp[CONF_GA_STATE],
|
||||
*ga_color_temp[CONF_GA_PASSIVE],
|
||||
]
|
||||
if _color_temp_dpt := conf.get_dpt(CONF_GA_COLOR_TEMP):
|
||||
if _color_temp_dpt == ColorTempModes.RELATIVE.value:
|
||||
group_address_tunable_white = conf.get_write(CONF_GA_COLOR_TEMP)
|
||||
group_address_tunable_white_state = conf.get_state_and_passive(
|
||||
CONF_GA_COLOR_TEMP
|
||||
)
|
||||
else:
|
||||
# absolute uint or float
|
||||
group_address_color_temp = ga_color_temp[CONF_GA_WRITE]
|
||||
group_address_color_temp_state = [
|
||||
ga_color_temp[CONF_GA_STATE],
|
||||
*ga_color_temp[CONF_GA_PASSIVE],
|
||||
]
|
||||
if ga_color_temp[CONF_DPT] == ColorTempModes.ABSOLUTE_FLOAT.value:
|
||||
group_address_color_temp = conf.get_write(CONF_GA_COLOR_TEMP)
|
||||
group_address_color_temp_state = conf.get_state_and_passive(
|
||||
CONF_GA_COLOR_TEMP
|
||||
)
|
||||
if _color_temp_dpt == ColorTempModes.ABSOLUTE_FLOAT.value:
|
||||
color_temperature_type = ColorTemperatureType.FLOAT_2_BYTE
|
||||
|
||||
_color_dpt = get_dpt(CONF_GA_COLOR)
|
||||
color_dpt = conf.get_dpt(CONF_GA_COLOR)
|
||||
|
||||
return XknxLight(
|
||||
xknx,
|
||||
name=name,
|
||||
group_address_switch=get_write(CONF_GA_SWITCH),
|
||||
group_address_switch_state=get_state(CONF_GA_SWITCH),
|
||||
group_address_brightness=get_write(CONF_GA_BRIGHTNESS),
|
||||
group_address_brightness_state=get_state(CONF_GA_BRIGHTNESS),
|
||||
group_address_color=get_write(CONF_GA_COLOR)
|
||||
if _color_dpt == LightColorMode.RGB
|
||||
group_address_switch=conf.get_write(CONF_GA_SWITCH),
|
||||
group_address_switch_state=conf.get_state_and_passive(CONF_GA_SWITCH),
|
||||
group_address_brightness=conf.get_write(CONF_GA_BRIGHTNESS),
|
||||
group_address_brightness_state=conf.get_state_and_passive(CONF_GA_BRIGHTNESS),
|
||||
group_address_color=conf.get_write(CONF_GA_COLOR)
|
||||
if color_dpt == LightColorMode.RGB
|
||||
else None,
|
||||
group_address_color_state=get_state(CONF_GA_COLOR)
|
||||
if _color_dpt == LightColorMode.RGB
|
||||
group_address_color_state=conf.get_state_and_passive(CONF_GA_COLOR)
|
||||
if color_dpt == LightColorMode.RGB
|
||||
else None,
|
||||
group_address_rgbw=get_write(CONF_GA_COLOR)
|
||||
if _color_dpt == LightColorMode.RGBW
|
||||
group_address_rgbw=conf.get_write(CONF_GA_COLOR)
|
||||
if color_dpt == LightColorMode.RGBW
|
||||
else None,
|
||||
group_address_rgbw_state=get_state(CONF_GA_COLOR)
|
||||
if _color_dpt == LightColorMode.RGBW
|
||||
group_address_rgbw_state=conf.get_state_and_passive(CONF_GA_COLOR)
|
||||
if color_dpt == LightColorMode.RGBW
|
||||
else None,
|
||||
group_address_hue=get_write(CONF_GA_HUE),
|
||||
group_address_hue_state=get_state(CONF_GA_HUE),
|
||||
group_address_saturation=get_write(CONF_GA_SATURATION),
|
||||
group_address_saturation_state=get_state(CONF_GA_SATURATION),
|
||||
group_address_xyy_color=get_write(CONF_GA_COLOR)
|
||||
if _color_dpt == LightColorMode.XYY
|
||||
group_address_hue=conf.get_write(CONF_GA_HUE),
|
||||
group_address_hue_state=conf.get_state_and_passive(CONF_GA_HUE),
|
||||
group_address_saturation=conf.get_write(CONF_GA_SATURATION),
|
||||
group_address_saturation_state=conf.get_state_and_passive(CONF_GA_SATURATION),
|
||||
group_address_xyy_color=conf.get_write(CONF_GA_COLOR)
|
||||
if color_dpt == LightColorMode.XYY
|
||||
else None,
|
||||
group_address_xyy_color_state=get_write(CONF_GA_COLOR)
|
||||
if _color_dpt == LightColorMode.XYY
|
||||
group_address_xyy_color_state=conf.get_write(CONF_GA_COLOR)
|
||||
if color_dpt == LightColorMode.XYY
|
||||
else None,
|
||||
group_address_tunable_white=group_address_tunable_white,
|
||||
group_address_tunable_white_state=group_address_tunable_white_state,
|
||||
group_address_color_temperature=group_address_color_temp,
|
||||
group_address_color_temperature_state=group_address_color_temp_state,
|
||||
group_address_switch_red=get_write(CONF_GA_RED_SWITCH),
|
||||
group_address_switch_red_state=get_state(CONF_GA_RED_SWITCH),
|
||||
group_address_brightness_red=get_write(CONF_GA_RED_BRIGHTNESS),
|
||||
group_address_brightness_red_state=get_state(CONF_GA_RED_BRIGHTNESS),
|
||||
group_address_switch_green=get_write(CONF_GA_GREEN_SWITCH),
|
||||
group_address_switch_green_state=get_state(CONF_GA_GREEN_SWITCH),
|
||||
group_address_brightness_green=get_write(CONF_GA_GREEN_BRIGHTNESS),
|
||||
group_address_brightness_green_state=get_state(CONF_GA_GREEN_BRIGHTNESS),
|
||||
group_address_switch_blue=get_write(CONF_GA_BLUE_SWITCH),
|
||||
group_address_switch_blue_state=get_state(CONF_GA_BLUE_SWITCH),
|
||||
group_address_brightness_blue=get_write(CONF_GA_BLUE_BRIGHTNESS),
|
||||
group_address_brightness_blue_state=get_state(CONF_GA_BLUE_BRIGHTNESS),
|
||||
group_address_switch_white=get_write(CONF_GA_WHITE_SWITCH),
|
||||
group_address_switch_white_state=get_state(CONF_GA_WHITE_SWITCH),
|
||||
group_address_brightness_white=get_write(CONF_GA_WHITE_BRIGHTNESS),
|
||||
group_address_brightness_white_state=get_state(CONF_GA_WHITE_BRIGHTNESS),
|
||||
group_address_switch_red=conf.get_write(CONF_GA_RED_SWITCH),
|
||||
group_address_switch_red_state=conf.get_state_and_passive(CONF_GA_RED_SWITCH),
|
||||
group_address_brightness_red=conf.get_write(CONF_GA_RED_BRIGHTNESS),
|
||||
group_address_brightness_red_state=conf.get_state_and_passive(
|
||||
CONF_GA_RED_BRIGHTNESS
|
||||
),
|
||||
group_address_switch_green=conf.get_write(CONF_GA_GREEN_SWITCH),
|
||||
group_address_switch_green_state=conf.get_state_and_passive(
|
||||
CONF_GA_GREEN_SWITCH
|
||||
),
|
||||
group_address_brightness_green=conf.get_write(CONF_GA_GREEN_BRIGHTNESS),
|
||||
group_address_brightness_green_state=conf.get_state_and_passive(
|
||||
CONF_GA_GREEN_BRIGHTNESS
|
||||
),
|
||||
group_address_switch_blue=conf.get_write(CONF_GA_BLUE_SWITCH),
|
||||
group_address_switch_blue_state=conf.get_state_and_passive(CONF_GA_BLUE_SWITCH),
|
||||
group_address_brightness_blue=conf.get_write(CONF_GA_BLUE_BRIGHTNESS),
|
||||
group_address_brightness_blue_state=conf.get_state_and_passive(
|
||||
CONF_GA_BLUE_BRIGHTNESS
|
||||
),
|
||||
group_address_switch_white=conf.get_write(CONF_GA_WHITE_SWITCH),
|
||||
group_address_switch_white_state=conf.get_state_and_passive(
|
||||
CONF_GA_WHITE_SWITCH
|
||||
),
|
||||
group_address_brightness_white=conf.get_write(CONF_GA_WHITE_BRIGHTNESS),
|
||||
group_address_brightness_white_state=conf.get_state_and_passive(
|
||||
CONF_GA_WHITE_BRIGHTNESS
|
||||
),
|
||||
color_temperature_type=color_temperature_type,
|
||||
min_kelvin=knx_config[CONF_COLOR_TEMP_MIN],
|
||||
max_kelvin=knx_config[CONF_COLOR_TEMP_MAX],
|
||||
|
||||
@@ -41,7 +41,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
def register_knx_services(hass: HomeAssistant) -> None:
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register KNX integration services."""
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
|
||||
51
homeassistant/components/knx/storage/util.py
Normal file
51
homeassistant/components/knx/storage/util.py
Normal file
@@ -0,0 +1,51 @@
|
||||
"""Utility functions for the KNX integration."""
|
||||
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_DPT, CONF_GA_PASSIVE, CONF_GA_STATE, CONF_GA_WRITE
|
||||
|
||||
|
||||
def nested_get(dic: ConfigType, *keys: str, default: Any | None = None) -> Any:
|
||||
"""Get the value from a nested dictionary."""
|
||||
for key in keys:
|
||||
if key not in dic:
|
||||
return default
|
||||
dic = dic[key]
|
||||
return dic
|
||||
|
||||
|
||||
class ConfigExtractor:
|
||||
"""Helper class for extracting values from a knx config store dictionary."""
|
||||
|
||||
__slots__ = ("get",)
|
||||
|
||||
def __init__(self, config: ConfigType) -> None:
|
||||
"""Initialize the extractor."""
|
||||
self.get = partial(nested_get, config)
|
||||
|
||||
def get_write(self, *path: str) -> str | None:
|
||||
"""Get the write group address."""
|
||||
return self.get(*path, CONF_GA_WRITE) # type: ignore[no-any-return]
|
||||
|
||||
def get_state(self, *path: str) -> str | None:
|
||||
"""Get the state group address."""
|
||||
return self.get(*path, CONF_GA_STATE) # type: ignore[no-any-return]
|
||||
|
||||
def get_write_and_passive(self, *path: str) -> list[Any | None]:
|
||||
"""Get the group addresses of write and passive."""
|
||||
write = self.get(*path, CONF_GA_WRITE)
|
||||
passive = self.get(*path, CONF_GA_PASSIVE)
|
||||
return [write, *passive] if passive else [write]
|
||||
|
||||
def get_state_and_passive(self, *path: str) -> list[Any | None]:
|
||||
"""Get the group addresses of state and passive."""
|
||||
state = self.get(*path, CONF_GA_STATE)
|
||||
passive = self.get(*path, CONF_GA_PASSIVE)
|
||||
return [state, *passive] if passive else [state]
|
||||
|
||||
def get_dpt(self, *path: str) -> str | None:
|
||||
"""Get the data point type of a group address config key."""
|
||||
return self.get(*path, CONF_DPT) # type: ignore[no-any-return]
|
||||
@@ -36,13 +36,8 @@ from .const import (
|
||||
)
|
||||
from .entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity
|
||||
from .schema import SwitchSchema
|
||||
from .storage.const import (
|
||||
CONF_ENTITY,
|
||||
CONF_GA_PASSIVE,
|
||||
CONF_GA_STATE,
|
||||
CONF_GA_SWITCH,
|
||||
CONF_GA_WRITE,
|
||||
)
|
||||
from .storage.const import CONF_ENTITY, CONF_GA_SWITCH
|
||||
from .storage.util import ConfigExtractor
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -142,15 +137,13 @@ class KnxUiSwitch(_KnxSwitch, KnxUiEntity):
|
||||
unique_id=unique_id,
|
||||
entity_config=config[CONF_ENTITY],
|
||||
)
|
||||
knx_conf = ConfigExtractor(config[DOMAIN])
|
||||
self._device = XknxSwitch(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_ENTITY][CONF_NAME],
|
||||
group_address=config[DOMAIN][CONF_GA_SWITCH][CONF_GA_WRITE],
|
||||
group_address_state=[
|
||||
config[DOMAIN][CONF_GA_SWITCH][CONF_GA_STATE],
|
||||
*config[DOMAIN][CONF_GA_SWITCH][CONF_GA_PASSIVE],
|
||||
],
|
||||
respond_to_read=config[DOMAIN][CONF_RESPOND_TO_READ],
|
||||
sync_state=config[DOMAIN][CONF_SYNC_STATE],
|
||||
invert=config[DOMAIN][CONF_INVERT],
|
||||
group_address=knx_conf.get_write(CONF_GA_SWITCH),
|
||||
group_address_state=knx_conf.get_state_and_passive(CONF_GA_SWITCH),
|
||||
respond_to_read=knx_conf.get(CONF_RESPOND_TO_READ),
|
||||
sync_state=knx_conf.get(CONF_SYNC_STATE),
|
||||
invert=knx_conf.get(CONF_INVERT),
|
||||
)
|
||||
|
||||
@@ -104,7 +104,11 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: LcnConfigEntry) -
|
||||
) as ex:
|
||||
await lcn_connection.async_close()
|
||||
raise ConfigEntryNotReady(
|
||||
f"Unable to connect to {config_entry.title}: {ex}"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
translation_placeholders={
|
||||
"config_entry_title": config_entry.title,
|
||||
},
|
||||
) from ex
|
||||
|
||||
_LOGGER.info('LCN connected to "%s"', config_entry.title)
|
||||
|
||||
@@ -26,6 +26,7 @@ from homeassistant.const import (
|
||||
CONF_SWITCHES,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -100,7 +101,11 @@ def get_resource(domain_name: str, domain_data: ConfigType) -> str:
|
||||
return cast(str, domain_data["setpoint"])
|
||||
if domain_name == "scene":
|
||||
return f"{domain_data['register']}{domain_data['scene']}"
|
||||
raise ValueError("Unknown domain")
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_domain",
|
||||
translation_placeholders={CONF_DOMAIN: domain_name},
|
||||
)
|
||||
|
||||
|
||||
def generate_unique_id(
|
||||
@@ -304,6 +309,8 @@ def get_device_config(
|
||||
def is_states_string(states_string: str) -> list[str]:
|
||||
"""Validate the given states string and return states list."""
|
||||
if len(states_string) != 8:
|
||||
raise ValueError("Invalid length of states string")
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="invalid_length_of_states_string"
|
||||
)
|
||||
states = {"1": "ON", "0": "OFF", "T": "TOGGLE", "-": "NOCHANGE"}
|
||||
return [states[state_string] for state_string in states_string]
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pypck"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pypck==0.8.9", "lcn-frontend==0.2.5"]
|
||||
"requirements": ["pypck==0.8.10", "lcn-frontend==0.2.5"]
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ rules:
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
|
||||
@@ -330,8 +330,9 @@ class SendKeys(LcnServiceCall):
|
||||
if (delay_time := service.data[CONF_TIME]) != 0:
|
||||
hit = pypck.lcn_defs.SendKeyCommand.HIT
|
||||
if pypck.lcn_defs.SendKeyCommand[service.data[CONF_STATE]] != hit:
|
||||
raise ValueError(
|
||||
"Only hit command is allowed when sending deferred keys."
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_send_keys_action",
|
||||
)
|
||||
delay_unit = pypck.lcn_defs.TimeUnit.parse(service.data[CONF_TIME_UNIT])
|
||||
await device_connection.send_keys_hit_deferred(keys, delay_time, delay_unit)
|
||||
@@ -368,8 +369,9 @@ class LockKeys(LcnServiceCall):
|
||||
|
||||
if (delay_time := service.data[CONF_TIME]) != 0:
|
||||
if table_id != 0:
|
||||
raise ValueError(
|
||||
"Only table A is allowed when locking keys for a specific time."
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_lock_keys_table",
|
||||
)
|
||||
delay_unit = pypck.lcn_defs.TimeUnit.parse(service.data[CONF_TIME_UNIT])
|
||||
await device_connection.lock_keys_tab_a_temporary(
|
||||
|
||||
@@ -414,11 +414,23 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"invalid_address": {
|
||||
"message": "LCN device for given address has not been configured."
|
||||
"cannot_connect": {
|
||||
"message": "Unable to connect to {config_entry_title}."
|
||||
},
|
||||
"invalid_device_id": {
|
||||
"message": "LCN device for given device ID has not been configured."
|
||||
"message": "LCN device for given device ID {device_id} has not been configured."
|
||||
},
|
||||
"invalid_domain": {
|
||||
"message": "Invalid domain {domain}."
|
||||
},
|
||||
"invalid_send_keys_action": {
|
||||
"message": "Invalid state for sending keys. Only 'hit' allowed for deferred sending."
|
||||
},
|
||||
"invalid_lock_keys_table": {
|
||||
"message": "Invalid table for locking keys. Only table A allowed when locking for a specific time."
|
||||
},
|
||||
"invalid_length_of_states_string": {
|
||||
"message": "Invalid length of states string. Expected 8 characters."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ld2410_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bluetooth-data-tools==1.28.1", "ld2410-ble==0.1.1"]
|
||||
"requirements": ["bluetooth-data-tools==1.28.2", "ld2410-ble==0.1.1"]
|
||||
}
|
||||
|
||||
@@ -35,5 +35,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/led_ble",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["bluetooth-data-tools==1.28.1", "led-ble==1.1.7"]
|
||||
"requirements": ["bluetooth-data-tools==1.28.2", "led-ble==1.1.7"]
|
||||
}
|
||||
|
||||
@@ -780,10 +780,10 @@
|
||||
"battery_level": {
|
||||
"name": "Battery",
|
||||
"state": {
|
||||
"high": "Full",
|
||||
"high": "[%key:common::state::full%]",
|
||||
"mid": "[%key:common::state::medium%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"warning": "Empty"
|
||||
"warning": "[%key:common::state::empty%]"
|
||||
}
|
||||
},
|
||||
"relative_to_start": {
|
||||
|
||||
@@ -70,7 +70,7 @@
|
||||
"motor_fault_short": "Motor shorted",
|
||||
"motor_ot_amps": "Motor overtorqued",
|
||||
"motor_disconnected": "Motor disconnected",
|
||||
"empty": "Empty"
|
||||
"empty": "[%key:common::state::empty%]"
|
||||
}
|
||||
},
|
||||
"last_seen": {
|
||||
|
||||
@@ -200,7 +200,7 @@ async def async_remove_config_entry_device(
|
||||
hass: HomeAssistant, entry: LookinConfigEntry, device_entry: dr.DeviceEntry
|
||||
) -> bool:
|
||||
"""Remove lookin config entry from a device."""
|
||||
data: LookinData = hass.data[DOMAIN][entry.entry_id]
|
||||
data = entry.runtime_data
|
||||
all_identifiers: set[tuple[str, str]] = {
|
||||
(DOMAIN, data.lookin_device.id),
|
||||
*((DOMAIN, remote["UUID"]) for remote in data.devices),
|
||||
|
||||
@@ -45,7 +45,7 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.json import JsonObjectType, load_json_object
|
||||
|
||||
from .const import ATTR_FORMAT, ATTR_IMAGES, CONF_ROOMS_REGEX, DOMAIN, FORMAT_HTML
|
||||
from .services import register_services
|
||||
from .services import async_setup_services
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -128,7 +128,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
config[CONF_COMMANDS],
|
||||
)
|
||||
|
||||
register_services(hass)
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["matrix_client"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["matrix-nio==0.25.2", "Pillow==11.2.1"]
|
||||
"requirements": ["matrix-nio==0.25.2", "Pillow==11.3.0"]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ from typing import TYPE_CHECKING
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.notify import ATTR_DATA, ATTR_MESSAGE, ATTR_TARGET
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import (
|
||||
@@ -50,7 +50,8 @@ async def _handle_send_message(call: ServiceCall) -> None:
|
||||
await matrix_bot.handle_send_message(call)
|
||||
|
||||
|
||||
def register_services(hass: HomeAssistant) -> None:
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the Matrix bot component."""
|
||||
|
||||
hass.services.async_register(
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"dependencies": ["websocket_api"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/matter",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["python-matter-server==7.0.0"],
|
||||
"requirements": ["python-matter-server==8.0.0"],
|
||||
"zeroconf": ["_matter._tcp.local.", "_matterc._udp.local."]
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ from typing import Any, cast
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
from chip.clusters.ClusterObjects import ClusterAttributeDescriptor, ClusterCommand
|
||||
from matter_server.client.models import device_types
|
||||
from matter_server.common import custom_clusters
|
||||
|
||||
from homeassistant.components.number import (
|
||||
@@ -18,6 +19,7 @@ from homeassistant.components.number import (
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
PERCENTAGE,
|
||||
EntityCategory,
|
||||
Platform,
|
||||
UnitOfLength,
|
||||
@@ -123,6 +125,31 @@ class MatterRangeNumber(MatterEntity, NumberEntity):
|
||||
)
|
||||
|
||||
|
||||
class MatterLevelControlNumber(MatterEntity, NumberEntity):
|
||||
"""Representation of a Matter Attribute as a Number entity."""
|
||||
|
||||
entity_description: MatterNumberEntityDescription
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set level value."""
|
||||
send_value = int(value)
|
||||
if value_convert := self.entity_description.ha_to_native_value:
|
||||
send_value = value_convert(value)
|
||||
await self.send_device_command(
|
||||
clusters.LevelControl.Commands.MoveToLevel(
|
||||
level=send_value,
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update_from_device(self) -> None:
|
||||
"""Update from device."""
|
||||
value = self.get_matter_attribute_value(self._entity_info.primary_attribute)
|
||||
if value_convert := self.entity_description.measurement_to_ha:
|
||||
value = value_convert(value)
|
||||
self._attr_native_value = value
|
||||
|
||||
|
||||
# Discovery schema(s) to map Matter Attributes to HA entities
|
||||
DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
@@ -239,6 +266,26 @@ DISCOVERY_SCHEMAS = [
|
||||
),
|
||||
vendor_id=(4874,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.NUMBER,
|
||||
entity_description=MatterNumberEntityDescription(
|
||||
key="pump_setpoint",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
translation_key="pump_setpoint",
|
||||
native_max_value=100,
|
||||
native_min_value=0.5,
|
||||
native_step=0.5,
|
||||
measurement_to_ha=(
|
||||
lambda x: None if x is None else x / 2 # Matter range (1-200)
|
||||
),
|
||||
ha_to_native_value=lambda x: round(x * 2), # HA range 0.5–100.0%
|
||||
mode=NumberMode.SLIDER,
|
||||
),
|
||||
entity_class=MatterLevelControlNumber,
|
||||
required_attributes=(clusters.LevelControl.Attributes.CurrentLevel,),
|
||||
device_type=(device_types.Pump,),
|
||||
allow_multi=True,
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.NUMBER,
|
||||
entity_description=MatterNumberEntityDescription(
|
||||
|
||||
@@ -180,6 +180,9 @@
|
||||
"altitude": {
|
||||
"name": "Altitude above sea level"
|
||||
},
|
||||
"pump_setpoint": {
|
||||
"name": "Setpoint"
|
||||
},
|
||||
"temperature_offset": {
|
||||
"name": "Temperature offset"
|
||||
},
|
||||
|
||||
@@ -32,11 +32,18 @@ class ModelContextServerProtocolConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
llm_apis = {api.id: api.name for api in llm.async_get_apis(self.hass)}
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(
|
||||
title=llm_apis[user_input[CONF_LLM_HASS_API]], data=user_input
|
||||
)
|
||||
if not user_input[CONF_LLM_HASS_API]:
|
||||
errors[CONF_LLM_HASS_API] = "llm_api_required"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title=", ".join(
|
||||
llm_apis[api_id] for api_id in user_input[CONF_LLM_HASS_API]
|
||||
),
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
@@ -44,7 +51,7 @@ class ModelContextServerProtocolConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
default=llm.LLM_API_ASSIST,
|
||||
default=[llm.LLM_API_ASSIST],
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
@@ -53,10 +60,12 @@ class ModelContextServerProtocolConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
value=llm_api_id,
|
||||
)
|
||||
for llm_api_id, name in llm_apis.items()
|
||||
]
|
||||
],
|
||||
multiple=True,
|
||||
)
|
||||
),
|
||||
}
|
||||
),
|
||||
description_placeholders={"more_info_url": MORE_INFO_URL},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user