forked from home-assistant/core
Compare commits
274 Commits
climate-to
...
config_sub
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c0ef7212b2 | ||
|
|
92dad98b14 | ||
|
|
f6214a22c1 | ||
|
|
8541e6e9cd | ||
|
|
1fbf929819 | ||
|
|
07734239b4 | ||
|
|
82b13d6b75 | ||
|
|
3ef9f7360f | ||
|
|
fa6cc8edfe | ||
|
|
a12a42710f | ||
|
|
a4653bb8dc | ||
|
|
d2bf58e1ba | ||
|
|
53fd84a5a4 | ||
|
|
6b4c27e700 | ||
|
|
04754ac83a | ||
|
|
42e465a4f3 | ||
|
|
6fca5022b1 | ||
|
|
68f8c3e9ed | ||
|
|
90b2504d5a | ||
|
|
875727ed27 | ||
|
|
f1c62000e1 | ||
|
|
e38f21c4ef | ||
|
|
00c052bb22 | ||
|
|
111ef13a3f | ||
|
|
89c73f56b1 | ||
|
|
d13c14eedb | ||
|
|
9532e98166 | ||
|
|
6884d790ca | ||
|
|
6ab45f8c9e | ||
|
|
7009a96711 | ||
|
|
a47fa08a9b | ||
|
|
4eb23f3039 | ||
|
|
1c314b5c02 | ||
|
|
edee58f114 | ||
|
|
ef652e57d1 | ||
|
|
b956aa68da | ||
|
|
75ce89dc41 | ||
|
|
a9540e893f | ||
|
|
dd5625436b | ||
|
|
7a484ee0ae | ||
|
|
e5c5d1bcfd | ||
|
|
56a9cd010e | ||
|
|
b7b5577f0c | ||
|
|
0787257cc0 | ||
|
|
54263f1325 | ||
|
|
14d2f2c589 | ||
|
|
c533f63a87 | ||
|
|
cd30f75be9 | ||
|
|
527775a5f1 | ||
|
|
99d7f462a0 | ||
|
|
67e2379d2b | ||
|
|
fb0047ead0 | ||
|
|
9764d704bd | ||
|
|
3690d7c2b4 | ||
|
|
204b5989e0 | ||
|
|
3892f6d8f3 | ||
|
|
140ff50eaf | ||
|
|
5ef06b1f33 | ||
|
|
9638bee8de | ||
|
|
cd88a8cebd | ||
|
|
d896b4e66a | ||
|
|
e4eb414be8 | ||
|
|
fce5be928e | ||
|
|
c4455c709b | ||
|
|
2c7a1446b8 | ||
|
|
20cf21d88e | ||
|
|
eafbf1d1fd | ||
|
|
acd95975e4 | ||
|
|
bc22e34fc3 | ||
|
|
bf0cf1c30f | ||
|
|
e95bfe438b | ||
|
|
0a457979ec | ||
|
|
2f295efb3f | ||
|
|
74613ae0c4 | ||
|
|
4d4cfabfba | ||
|
|
7ae81bae4c | ||
|
|
7ec10bfd6f | ||
|
|
d662a4465c | ||
|
|
66b4b24612 | ||
|
|
a2077405e2 | ||
|
|
f0a1a6c2ad | ||
|
|
32b7b5aa66 | ||
|
|
871a7d0dc1 | ||
|
|
da807001ab | ||
|
|
a104799893 | ||
|
|
45d1624d70 | ||
|
|
1059cf3f07 | ||
|
|
dd34a10934 | ||
|
|
d4f3dd2335 | ||
|
|
0ecb1ea8cf | ||
|
|
3d5a42749d | ||
|
|
a2c2d37eb1 | ||
|
|
f68c16586d | ||
|
|
11d80065ef | ||
|
|
7012648bf8 | ||
|
|
d96b2499e2 | ||
|
|
a41bdfe0cc | ||
|
|
0d3872a4c7 | ||
|
|
65d8d071dd | ||
|
|
bb97a16756 | ||
|
|
c9a607aa45 | ||
|
|
c7993eff99 | ||
|
|
8a880d6134 | ||
|
|
cc0fb80481 | ||
|
|
276806d3e1 | ||
|
|
0589df7d95 | ||
|
|
aab676a313 | ||
|
|
7f473b8260 | ||
|
|
fea4a00424 | ||
|
|
7d146ddae0 | ||
|
|
8f06e0903f | ||
|
|
677ba3a6a6 | ||
|
|
a322deaab8 | ||
|
|
584439cade | ||
|
|
baa13debcc | ||
|
|
1d42890748 | ||
|
|
622d23cadd | ||
|
|
ebeb2ecb09 | ||
|
|
b3cb2928fc | ||
|
|
b639466453 | ||
|
|
69241e4ca6 | ||
|
|
80371a865e | ||
|
|
c9dbb205dd | ||
|
|
197ff932af | ||
|
|
287b7eec13 | ||
|
|
e6da6d9612 | ||
|
|
d4f38099ae | ||
|
|
9f2cb7bf56 | ||
|
|
8a84abd50f | ||
|
|
b15e08ca9c | ||
|
|
3fb980901e | ||
|
|
bd3a3fd26c | ||
|
|
dfcb977a1d | ||
|
|
94ad6ae814 | ||
|
|
97aa93f92b | ||
|
|
ee025198e8 | ||
|
|
90265e2afd | ||
|
|
a53554dad3 | ||
|
|
2b6ad84cf5 | ||
|
|
92655fd640 | ||
|
|
e43f72c452 | ||
|
|
9320ccfa4f | ||
|
|
336af8b551 | ||
|
|
8a2f8dc736 | ||
|
|
dc048bfcf5 | ||
|
|
fb474827b5 | ||
|
|
eec5fb2133 | ||
|
|
8ad7c522f4 | ||
|
|
c7f6630718 | ||
|
|
afa95293dc | ||
|
|
36582f9ac2 | ||
|
|
19852ecc24 | ||
|
|
5726d090b0 | ||
|
|
add401ffcf | ||
|
|
fd12ae2ccd | ||
|
|
e15eda3aa2 | ||
|
|
cc0adcf47f | ||
|
|
06580ce10f | ||
|
|
b78e39da2d | ||
|
|
46824a2a53 | ||
|
|
ee01289ee8 | ||
|
|
0bd22eabc7 | ||
|
|
c901352bef | ||
|
|
23ed62c1bc | ||
|
|
0ef254bc9a | ||
|
|
629d108078 | ||
|
|
6f3544fa47 | ||
|
|
cb389d29ea | ||
|
|
ac26ca2da5 | ||
|
|
d5bcb73d33 | ||
|
|
e6a18357db | ||
|
|
13ec0659ff | ||
|
|
a7fb20ab58 | ||
|
|
657da47458 | ||
|
|
a4708876a9 | ||
|
|
4239c5b557 | ||
|
|
836354bb99 | ||
|
|
a7af042e57 | ||
|
|
09476ade82 | ||
|
|
25937d7868 | ||
|
|
4e74d14beb | ||
|
|
309b7eb436 | ||
|
|
cf238cd8f7 | ||
|
|
ee46edffa3 | ||
|
|
876b3423ba | ||
|
|
2752a35e23 | ||
|
|
9e8df72c0d | ||
|
|
5439613bff | ||
|
|
3b5455bc49 | ||
|
|
104151d322 | ||
|
|
a329828bdf | ||
|
|
aa9e721e8b | ||
|
|
1b49f88be9 | ||
|
|
c345f2d548 | ||
|
|
1d731875ae | ||
|
|
0c3489c1b3 | ||
|
|
c5865c6d18 | ||
|
|
e1a0fb2f1a | ||
|
|
d725cdae13 | ||
|
|
e1bd82ea32 | ||
|
|
4bcc551b61 | ||
|
|
08019e76d8 | ||
|
|
0b32342bf0 | ||
|
|
add4e1a708 | ||
|
|
fb3105bdc0 | ||
|
|
3845acd0ce | ||
|
|
b45c68554c | ||
|
|
8a45aa4c42 | ||
|
|
51ccba12af | ||
|
|
c8699dc066 | ||
|
|
87454babfa | ||
|
|
c9ff575628 | ||
|
|
877d16273b | ||
|
|
dc5bfba902 | ||
|
|
5e7a405f34 | ||
|
|
5228f3d85c | ||
|
|
2efc75fdf5 | ||
|
|
a435fd12f0 | ||
|
|
a5d0c3528c | ||
|
|
5e981d00a4 | ||
|
|
97dc72a6e2 | ||
|
|
088b097a03 | ||
|
|
85c94e6403 | ||
|
|
a2ef1604af | ||
|
|
55dc4b0d2c | ||
|
|
18e8a3b185 | ||
|
|
3a68a0a67f | ||
|
|
7ab2d2e07a | ||
|
|
809629c0e2 | ||
|
|
2be578a33f | ||
|
|
5cff79ce50 | ||
|
|
513c8487c5 | ||
|
|
031de8da51 | ||
|
|
2e1463b9e9 | ||
|
|
9a58440296 | ||
|
|
26e0fcdb08 | ||
|
|
e835e41d59 | ||
|
|
c53c0a13be | ||
|
|
8098122dfe | ||
|
|
1d6ecbd1d5 | ||
|
|
c8276ec325 | ||
|
|
ddfad614ab | ||
|
|
8eb21749b5 | ||
|
|
a6ba25d3d4 | ||
|
|
1e70a0060b | ||
|
|
6c47f03d17 | ||
|
|
2054988790 | ||
|
|
f1ad3040b8 | ||
|
|
53ca31c112 | ||
|
|
23459a0355 | ||
|
|
a8bfe285bf | ||
|
|
0888d1a169 | ||
|
|
8b20272272 | ||
|
|
06b33e5589 | ||
|
|
9348569f90 | ||
|
|
4a9d545ffe | ||
|
|
277ee03145 | ||
|
|
6c9c17f129 | ||
|
|
bf59241dab | ||
|
|
57b7635b70 | ||
|
|
4b96266647 | ||
|
|
6266a4153d | ||
|
|
a9949a0aab | ||
|
|
428a74fa48 | ||
|
|
9f1023b195 | ||
|
|
256fc54aa1 | ||
|
|
94c1b9a434 | ||
|
|
275c15e2ae | ||
|
|
9cdbcd93cd | ||
|
|
f2e856b8a2 | ||
|
|
820f04e1e1 | ||
|
|
b7541f098c | ||
|
|
a345e80368 | ||
|
|
7a3d9a9345 |
8
.github/workflows/wheels.yml
vendored
8
.github/workflows/wheels.yml
vendored
@@ -166,7 +166,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev"
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;multidict;propcache;yarl;SQLAlchemy
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@@ -234,7 +234,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@@ -248,7 +248,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@@ -262,7 +262,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.8.3
|
||||
rev: v0.8.6
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
||||
@@ -311,6 +311,7 @@ homeassistant.components.manual.*
|
||||
homeassistant.components.mastodon.*
|
||||
homeassistant.components.matrix.*
|
||||
homeassistant.components.matter.*
|
||||
homeassistant.components.mcp_server.*
|
||||
homeassistant.components.mealie.*
|
||||
homeassistant.components.media_extractor.*
|
||||
homeassistant.components.media_player.*
|
||||
@@ -364,6 +365,7 @@ homeassistant.components.otbr.*
|
||||
homeassistant.components.overkiz.*
|
||||
homeassistant.components.overseerr.*
|
||||
homeassistant.components.p1_monitor.*
|
||||
homeassistant.components.pandora.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.peblar.*
|
||||
homeassistant.components.peco.*
|
||||
@@ -381,6 +383,7 @@ homeassistant.components.pure_energie.*
|
||||
homeassistant.components.purpleair.*
|
||||
homeassistant.components.pushbullet.*
|
||||
homeassistant.components.pvoutput.*
|
||||
homeassistant.components.python_script.*
|
||||
homeassistant.components.qnap_qsw.*
|
||||
homeassistant.components.rabbitair.*
|
||||
homeassistant.components.radarr.*
|
||||
|
||||
@@ -637,6 +637,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/homeassistant_sky_connect/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_yellow/ @home-assistant/core
|
||||
/tests/components/homeassistant_yellow/ @home-assistant/core
|
||||
/homeassistant/components/homee/ @Taraman17
|
||||
/tests/components/homee/ @Taraman17
|
||||
/homeassistant/components/homekit/ @bdraco
|
||||
/tests/components/homekit/ @bdraco
|
||||
/homeassistant/components/homekit_controller/ @Jc2k @bdraco
|
||||
@@ -686,6 +688,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/icloud/ @Quentame @nzapponi
|
||||
/homeassistant/components/idasen_desk/ @abmantis
|
||||
/tests/components/idasen_desk/ @abmantis
|
||||
/homeassistant/components/igloohome/ @keithle888
|
||||
/tests/components/igloohome/ @keithle888
|
||||
/homeassistant/components/ign_sismologia/ @exxamalte
|
||||
/tests/components/ign_sismologia/ @exxamalte
|
||||
/homeassistant/components/image/ @home-assistant/core
|
||||
@@ -887,6 +891,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/matrix/ @PaarthShah
|
||||
/homeassistant/components/matter/ @home-assistant/matter
|
||||
/tests/components/matter/ @home-assistant/matter
|
||||
/homeassistant/components/mcp_server/ @allenporter
|
||||
/tests/components/mcp_server/ @allenporter
|
||||
/homeassistant/components/mealie/ @joostlek @andrew-codechimp
|
||||
/tests/components/mealie/ @joostlek @andrew-codechimp
|
||||
/homeassistant/components/meater/ @Sotolotl @emontnemery
|
||||
|
||||
@@ -89,7 +89,7 @@ from .helpers import (
|
||||
)
|
||||
from .helpers.dispatcher import async_dispatcher_send_internal
|
||||
from .helpers.storage import get_internal_store_manager
|
||||
from .helpers.system_info import async_get_system_info, is_official_image
|
||||
from .helpers.system_info import async_get_system_info
|
||||
from .helpers.typing import ConfigType
|
||||
from .setup import (
|
||||
# _setup_started is marked as protected to make it clear
|
||||
@@ -106,6 +106,7 @@ from .util.async_ import create_eager_task
|
||||
from .util.hass_dict import HassKey
|
||||
from .util.logging import async_activate_log_queue_handler
|
||||
from .util.package import async_get_user_site, is_docker_env, is_virtual_env
|
||||
from .util.system_info import is_official_image
|
||||
|
||||
with contextlib.suppress(ImportError):
|
||||
# Ensure anyio backend is imported to avoid it being imported in the event loop
|
||||
|
||||
@@ -34,17 +34,17 @@
|
||||
"services": {
|
||||
"capture_image": {
|
||||
"name": "Capture image",
|
||||
"description": "Request a new image capture from a camera device.",
|
||||
"description": "Requests a new image capture from a camera device.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
"description": "Entity id of the camera to request an image."
|
||||
"description": "Entity ID of the camera to request an image from."
|
||||
}
|
||||
}
|
||||
},
|
||||
"change_setting": {
|
||||
"name": "Change setting",
|
||||
"description": "Change an Abode system setting.",
|
||||
"description": "Changes an Abode system setting.",
|
||||
"fields": {
|
||||
"setting": {
|
||||
"name": "Setting",
|
||||
@@ -58,11 +58,11 @@
|
||||
},
|
||||
"trigger_automation": {
|
||||
"name": "Trigger automation",
|
||||
"description": "Trigger an Abode automation.",
|
||||
"description": "Triggers an Abode automation.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
"description": "Entity id of the automation to trigger."
|
||||
"description": "Entity ID of the automation to trigger."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,5 +26,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioacaia"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioacaia==0.1.11"]
|
||||
"requirements": ["aioacaia==0.1.13"]
|
||||
}
|
||||
|
||||
@@ -39,45 +39,54 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
key="temp",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"humidity": SensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"pressure": SensorEntityDescription(
|
||||
key="pressure",
|
||||
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfPressure.MBAR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"battery": SensorEntityDescription(
|
||||
key="battery",
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"co2": SensorEntityDescription(
|
||||
key="co2",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"voc": SensorEntityDescription(
|
||||
key="voc",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"light": SensorEntityDescription(
|
||||
key="light",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
translation_key="light",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"virusRisk": SensorEntityDescription(
|
||||
key="virusRisk",
|
||||
translation_key="virus_risk",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"mold": SensorEntityDescription(
|
||||
key="mold",
|
||||
translation_key="mold",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"rssi": SensorEntityDescription(
|
||||
key="rssi",
|
||||
@@ -85,16 +94,19 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"pm1": SensorEntityDescription(
|
||||
key="pm1",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM1,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"pm25": SensorEntityDescription(
|
||||
key="pm25",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"invalid_unique_id": "Impossible to determine a valid unique id for the device"
|
||||
"invalid_unique_id": "Impossible to determine a valid unique ID for the device"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
@@ -38,17 +38,17 @@
|
||||
}
|
||||
},
|
||||
"apps": {
|
||||
"title": "Configure Android Apps",
|
||||
"description": "Configure application id {app_id}",
|
||||
"title": "Configure Android apps",
|
||||
"description": "Configure application ID {app_id}",
|
||||
"data": {
|
||||
"app_name": "Application Name",
|
||||
"app_name": "Application name",
|
||||
"app_id": "Application ID",
|
||||
"app_delete": "Check to delete this application"
|
||||
}
|
||||
},
|
||||
"rules": {
|
||||
"title": "Configure Android state detection rules",
|
||||
"description": "Configure detection rule for application id {rule_id}",
|
||||
"description": "Configure detection rule for application ID {rule_id}",
|
||||
"data": {
|
||||
"rule_id": "[%key:component::androidtv::options::step::apps::data::app_id%]",
|
||||
"rule_values": "List of state detection rules (see documentation)",
|
||||
|
||||
@@ -44,12 +44,12 @@
|
||||
}
|
||||
},
|
||||
"apps": {
|
||||
"title": "Configure Android Apps",
|
||||
"description": "Configure application id {app_id}",
|
||||
"title": "Configure Android apps",
|
||||
"description": "Configure application ID {app_id}",
|
||||
"data": {
|
||||
"app_name": "Application Name",
|
||||
"app_name": "Application name",
|
||||
"app_id": "Application ID",
|
||||
"app_icon": "Application Icon",
|
||||
"app_icon": "Application icon",
|
||||
"app_delete": "Check to delete this application"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,7 +98,6 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
VERSION = 1
|
||||
|
||||
scan_filter: str | None = None
|
||||
all_identifiers: set[str]
|
||||
atv: BaseConfig | None = None
|
||||
atv_identifiers: list[str] | None = None
|
||||
_host: str # host in zeroconf discovery info, should not be accessed by other flows
|
||||
@@ -118,6 +117,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a new AppleTVConfigFlow."""
|
||||
self.credentials: dict[int, str | None] = {} # Protocol -> credentials
|
||||
self.all_identifiers: set[str] = set()
|
||||
|
||||
@property
|
||||
def device_identifier(self) -> str | None:
|
||||
|
||||
@@ -120,6 +120,8 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
"""Wait for the client to be ready."""
|
||||
|
||||
if not self.data or Attribute.MAC_ADDRESS not in self.data:
|
||||
await self.client.read_mac_address()
|
||||
|
||||
data = await self.client.wait_for_response(
|
||||
FunctionalDomain.IDENTIFICATION, 2, WAIT_TIMEOUT
|
||||
)
|
||||
@@ -130,12 +132,9 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
|
||||
return False
|
||||
|
||||
if not self.data or Attribute.NAME not in self.data:
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.IDENTIFICATION, 4, WAIT_TIMEOUT
|
||||
)
|
||||
|
||||
if not self.data or Attribute.THERMOSTAT_MODES not in self.data:
|
||||
await self.client.read_thermostat_iaq_available()
|
||||
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.CONTROL, 7, WAIT_TIMEOUT
|
||||
)
|
||||
@@ -144,10 +143,16 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
not self.data
|
||||
or Attribute.INDOOR_TEMPERATURE_CONTROLLING_SENSOR_STATUS not in self.data
|
||||
):
|
||||
await self.client.read_sensors()
|
||||
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.SENSORS, 2, WAIT_TIMEOUT
|
||||
)
|
||||
|
||||
await self.client.read_thermostat_status()
|
||||
|
||||
await self.client.read_iaq_status()
|
||||
|
||||
await ready_callback(True)
|
||||
|
||||
return True
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyaprilaire"],
|
||||
"requirements": ["pyaprilaire==0.7.4"]
|
||||
"requirements": ["pyaprilaire==0.7.7"]
|
||||
}
|
||||
|
||||
@@ -29,6 +29,8 @@ class ApSystemsSensorData:
|
||||
class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
"""Coordinator used for all sensors."""
|
||||
|
||||
device_version: str
|
||||
|
||||
def __init__(self, hass: HomeAssistant, api: APsystemsEZ1M) -> None:
|
||||
"""Initialize my coordinator."""
|
||||
super().__init__(
|
||||
@@ -46,6 +48,7 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
raise UpdateFailed from None
|
||||
self.api.max_power = device_info.maxPower
|
||||
self.api.min_power = device_info.minPower
|
||||
self.device_version = device_info.devVer
|
||||
|
||||
async def _async_update_data(self) -> ApSystemsSensorData:
|
||||
try:
|
||||
|
||||
@@ -21,7 +21,8 @@ class ApSystemsEntity(Entity):
|
||||
"""Initialize the APsystems entity."""
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, data.device_id)},
|
||||
serial_number=data.device_id,
|
||||
manufacturer="APsystems",
|
||||
model="EZ1-M",
|
||||
serial_number=data.device_id,
|
||||
sw_version=data.coordinator.device_version.split(" ")[1],
|
||||
)
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aranet",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aranet4==2.4.0"]
|
||||
"requirements": ["aranet4==2.5.0"]
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@ from homeassistant.components.sensor import (
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_MANUFACTURER,
|
||||
ATTR_MODEL,
|
||||
ATTR_NAME,
|
||||
ATTR_SW_VERSION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
@@ -142,6 +143,7 @@ def _sensor_device_info_to_hass(
|
||||
if adv.readings and adv.readings.name:
|
||||
hass_device_info[ATTR_NAME] = adv.readings.name
|
||||
hass_device_info[ATTR_MANUFACTURER] = ARANET_MANUFACTURER_NAME
|
||||
hass_device_info[ATTR_MODEL] = adv.readings.type.model
|
||||
if adv.manufacturer_data:
|
||||
hass_device_info[ATTR_SW_VERSION] = str(adv.manufacturer_data.version)
|
||||
return hass_device_info
|
||||
|
||||
@@ -90,7 +90,7 @@ class ArubaDeviceScanner(DeviceScanner):
|
||||
"""Retrieve data from Aruba Access Point and return parsed result."""
|
||||
|
||||
connect = f"ssh {self.username}@{self.host} -o HostKeyAlgorithms=ssh-rsa"
|
||||
ssh = pexpect.spawn(connect)
|
||||
ssh: pexpect.spawn[str] = pexpect.spawn(connect, encoding="utf-8")
|
||||
query = ssh.expect(
|
||||
[
|
||||
"password:",
|
||||
@@ -125,12 +125,12 @@ class ArubaDeviceScanner(DeviceScanner):
|
||||
ssh.expect("#")
|
||||
ssh.sendline("show clients")
|
||||
ssh.expect("#")
|
||||
devices_result = ssh.before.split(b"\r\n")
|
||||
devices_result = (ssh.before or "").splitlines()
|
||||
ssh.sendline("exit")
|
||||
|
||||
devices: dict[str, dict[str, str]] = {}
|
||||
for device in devices_result:
|
||||
if match := _DEVICES_REGEX.search(device.decode("utf-8")):
|
||||
if match := _DEVICES_REGEX.search(device):
|
||||
devices[match.group("ip")] = {
|
||||
"ip": match.group("ip"),
|
||||
"mac": match.group("mac").upper(),
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pexpect", "ptyprocess"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pexpect==4.6.0"]
|
||||
"requirements": ["pexpect==4.9.0"]
|
||||
}
|
||||
|
||||
@@ -108,6 +108,7 @@ async def async_pipeline_from_audio_stream(
|
||||
device_id: str | None = None,
|
||||
start_stage: PipelineStage = PipelineStage.STT,
|
||||
end_stage: PipelineStage = PipelineStage.TTS,
|
||||
conversation_extra_system_prompt: str | None = None,
|
||||
) -> None:
|
||||
"""Create an audio pipeline from an audio stream.
|
||||
|
||||
@@ -119,6 +120,7 @@ async def async_pipeline_from_audio_stream(
|
||||
stt_metadata=stt_metadata,
|
||||
stt_stream=stt_stream,
|
||||
wake_word_phrase=wake_word_phrase,
|
||||
conversation_extra_system_prompt=conversation_extra_system_prompt,
|
||||
run=PipelineRun(
|
||||
hass,
|
||||
context=context,
|
||||
|
||||
@@ -1010,7 +1010,11 @@ class PipelineRun:
|
||||
self.intent_agent = agent_info.id
|
||||
|
||||
async def recognize_intent(
|
||||
self, intent_input: str, conversation_id: str | None, device_id: str | None
|
||||
self,
|
||||
intent_input: str,
|
||||
conversation_id: str | None,
|
||||
device_id: str | None,
|
||||
conversation_extra_system_prompt: str | None,
|
||||
) -> str:
|
||||
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
||||
if self.intent_agent is None:
|
||||
@@ -1045,6 +1049,7 @@ class PipelineRun:
|
||||
device_id=device_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
)
|
||||
processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT
|
||||
|
||||
@@ -1392,8 +1397,13 @@ class PipelineInput:
|
||||
"""Input for text-to-speech. Required when start_stage = tts."""
|
||||
|
||||
conversation_id: str | None = None
|
||||
"""Identifier for the conversation."""
|
||||
|
||||
conversation_extra_system_prompt: str | None = None
|
||||
"""Extra prompt information for the conversation agent."""
|
||||
|
||||
device_id: str | None = None
|
||||
"""Identifier of the device that is processing the input/output of the pipeline."""
|
||||
|
||||
async def execute(self) -> None:
|
||||
"""Run pipeline."""
|
||||
@@ -1483,6 +1493,7 @@ class PipelineInput:
|
||||
intent_input,
|
||||
self.conversation_id,
|
||||
self.device_id,
|
||||
self.conversation_extra_system_prompt,
|
||||
)
|
||||
if tts_input.strip():
|
||||
current_stage = PipelineStage.TTS
|
||||
|
||||
@@ -75,7 +75,7 @@ class AudioBuffer:
|
||||
class VoiceCommandSegmenter:
|
||||
"""Segments an audio stream into voice commands."""
|
||||
|
||||
speech_seconds: float = 0.1
|
||||
speech_seconds: float = 0.3
|
||||
"""Seconds of speech before voice command has started."""
|
||||
|
||||
command_seconds: float = 1.0
|
||||
|
||||
@@ -31,8 +31,8 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"invalid_unique_id": "Impossible to determine a valid unique id for the device",
|
||||
"no_unique_id": "A device without a valid unique id is already configured. Configuration of multiple instance is not possible"
|
||||
"invalid_unique_id": "Impossible to determine a valid unique ID for the device",
|
||||
"no_unique_id": "A device without a valid unique ID is already configured. Configuration of multiple instances is not possible"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
@@ -42,7 +42,7 @@
|
||||
"consider_home": "Seconds to wait before considering a device away",
|
||||
"track_unknown": "Track unknown / unnamed devices",
|
||||
"interface": "The interface that you want statistics from (e.g. eth0, eth1 etc)",
|
||||
"dnsmasq": "The location in the router of the dnsmasq.leases files",
|
||||
"dnsmasq": "The location of the dnsmasq.leases file in the router",
|
||||
"require_ip": "Devices must have IP (for access point mode)"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,8 +21,10 @@ from .manager import (
|
||||
BackupManager,
|
||||
BackupPlatformProtocol,
|
||||
BackupReaderWriter,
|
||||
BackupReaderWriterError,
|
||||
CoreBackupReaderWriter,
|
||||
CreateBackupEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
NewBackup,
|
||||
WrittenBackup,
|
||||
@@ -39,8 +41,10 @@ __all__ = [
|
||||
"BackupAgentPlatformProtocol",
|
||||
"BackupPlatformProtocol",
|
||||
"BackupReaderWriter",
|
||||
"BackupReaderWriterError",
|
||||
"CreateBackupEvent",
|
||||
"Folder",
|
||||
"IncorrectPasswordError",
|
||||
"LocalBackupAgent",
|
||||
"NewBackup",
|
||||
"WrittenBackup",
|
||||
|
||||
@@ -17,7 +17,7 @@ from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import LOGGER
|
||||
from .models import Folder
|
||||
from .models import BackupManagerError, Folder
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .manager import BackupManager, ManagerBackup
|
||||
@@ -124,6 +124,7 @@ class BackupConfig:
|
||||
def load(self, stored_config: StoredBackupConfig) -> None:
|
||||
"""Load config."""
|
||||
self.data = BackupConfigData.from_dict(stored_config)
|
||||
self.data.retention.apply(self._manager)
|
||||
self.data.schedule.apply(self._manager)
|
||||
|
||||
async def update(
|
||||
@@ -160,8 +161,13 @@ class RetentionConfig:
|
||||
def apply(self, manager: BackupManager) -> None:
|
||||
"""Apply backup retention configuration."""
|
||||
if self.days is not None:
|
||||
LOGGER.debug(
|
||||
"Scheduling next automatic delete of backups older than %s in 1 day",
|
||||
self.days,
|
||||
)
|
||||
self._schedule_next(manager)
|
||||
else:
|
||||
LOGGER.debug("Unscheduling next automatic delete")
|
||||
self._unschedule_next(manager)
|
||||
|
||||
def to_dict(self) -> StoredRetentionConfig:
|
||||
@@ -318,9 +324,9 @@ class BackupSchedule:
|
||||
password=config_data.create_backup.password,
|
||||
with_automatic_settings=True,
|
||||
)
|
||||
except BackupManagerError as err:
|
||||
LOGGER.error("Error creating backup: %s", err)
|
||||
except Exception: # noqa: BLE001
|
||||
# another more specific exception will be added
|
||||
# and handled in the future
|
||||
LOGGER.exception("Unexpected error creating automatic backup")
|
||||
|
||||
manager.remove_next_backup_event = async_track_point_in_time(
|
||||
|
||||
@@ -46,15 +46,11 @@ from .const import (
|
||||
EXCLUDE_FROM_BACKUP,
|
||||
LOGGER,
|
||||
)
|
||||
from .models import AgentBackup, Folder
|
||||
from .models import AgentBackup, BackupManagerError, Folder
|
||||
from .store import BackupStore
|
||||
from .util import make_backup_dir, read_backup, validate_password
|
||||
|
||||
|
||||
class IncorrectPasswordError(HomeAssistantError):
|
||||
"""Raised when the password is incorrect."""
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
class NewBackup:
|
||||
"""New backup class."""
|
||||
@@ -245,6 +241,14 @@ class BackupReaderWriter(abc.ABC):
|
||||
"""Restore a backup."""
|
||||
|
||||
|
||||
class BackupReaderWriterError(HomeAssistantError):
|
||||
"""Backup reader/writer error."""
|
||||
|
||||
|
||||
class IncorrectPasswordError(BackupReaderWriterError):
|
||||
"""Raised when the password is incorrect."""
|
||||
|
||||
|
||||
class BackupManager:
|
||||
"""Define the format that backup managers can have."""
|
||||
|
||||
@@ -373,7 +377,9 @@ class BackupManager:
|
||||
)
|
||||
for result in pre_backup_results:
|
||||
if isinstance(result, Exception):
|
||||
raise result
|
||||
raise BackupManagerError(
|
||||
f"Error during pre-backup: {result}"
|
||||
) from result
|
||||
|
||||
async def async_post_backup_actions(self) -> None:
|
||||
"""Perform post backup actions."""
|
||||
@@ -386,7 +392,9 @@ class BackupManager:
|
||||
)
|
||||
for result in post_backup_results:
|
||||
if isinstance(result, Exception):
|
||||
raise result
|
||||
raise BackupManagerError(
|
||||
f"Error during post-backup: {result}"
|
||||
) from result
|
||||
|
||||
async def load_platforms(self) -> None:
|
||||
"""Load backup platforms."""
|
||||
@@ -422,11 +430,22 @@ class BackupManager:
|
||||
return_exceptions=True,
|
||||
)
|
||||
for idx, result in enumerate(sync_backup_results):
|
||||
if isinstance(result, Exception):
|
||||
if isinstance(result, BackupReaderWriterError):
|
||||
# writer errors will affect all agents
|
||||
# no point in continuing
|
||||
raise BackupManagerError(str(result)) from result
|
||||
if isinstance(result, BackupAgentError):
|
||||
LOGGER.error("Error uploading to %s: %s", agent_ids[idx], result)
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
LOGGER.exception(
|
||||
"Error during backup upload - %s", result, exc_info=result
|
||||
)
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
# trap bugs from agents
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
LOGGER.error("Unexpected error: %s", result, exc_info=result)
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
|
||||
return agent_errors
|
||||
|
||||
async def async_get_backups(
|
||||
@@ -449,7 +468,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
for agent_backup in result:
|
||||
if (backup_id := agent_backup.backup_id) not in backups:
|
||||
if known_backup := self.known_backups.get(backup_id):
|
||||
@@ -499,7 +518,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
if not result:
|
||||
continue
|
||||
if backup is None:
|
||||
@@ -563,7 +582,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
|
||||
if not agent_errors:
|
||||
self.known_backups.remove(backup_id)
|
||||
@@ -578,7 +597,7 @@ class BackupManager:
|
||||
) -> None:
|
||||
"""Receive and store a backup file from upload."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
self.async_on_backup_event(
|
||||
ReceiveBackupEvent(stage=None, state=ReceiveBackupState.IN_PROGRESS)
|
||||
)
|
||||
@@ -652,6 +671,7 @@ class BackupManager:
|
||||
include_homeassistant=include_homeassistant,
|
||||
name=name,
|
||||
password=password,
|
||||
raise_task_error=True,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
assert self._backup_finish_task
|
||||
@@ -669,11 +689,12 @@ class BackupManager:
|
||||
include_homeassistant: bool,
|
||||
name: str | None,
|
||||
password: str | None,
|
||||
raise_task_error: bool = False,
|
||||
with_automatic_settings: bool = False,
|
||||
) -> NewBackup:
|
||||
"""Initiate generating a backup."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
|
||||
if with_automatic_settings:
|
||||
self.config.data.last_attempted_automatic_backup = dt_util.now()
|
||||
@@ -692,6 +713,7 @@ class BackupManager:
|
||||
include_homeassistant=include_homeassistant,
|
||||
name=name,
|
||||
password=password,
|
||||
raise_task_error=raise_task_error,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
except Exception:
|
||||
@@ -714,57 +736,81 @@ class BackupManager:
|
||||
include_homeassistant: bool,
|
||||
name: str | None,
|
||||
password: str | None,
|
||||
raise_task_error: bool,
|
||||
with_automatic_settings: bool,
|
||||
) -> NewBackup:
|
||||
"""Initiate generating a backup."""
|
||||
if not agent_ids:
|
||||
raise HomeAssistantError("At least one agent must be selected")
|
||||
if any(agent_id not in self.backup_agents for agent_id in agent_ids):
|
||||
raise HomeAssistantError("Invalid agent selected")
|
||||
raise BackupManagerError("At least one agent must be selected")
|
||||
if invalid_agents := [
|
||||
agent_id for agent_id in agent_ids if agent_id not in self.backup_agents
|
||||
]:
|
||||
raise BackupManagerError(f"Invalid agents selected: {invalid_agents}")
|
||||
if include_all_addons and include_addons:
|
||||
raise HomeAssistantError(
|
||||
raise BackupManagerError(
|
||||
"Cannot include all addons and specify specific addons"
|
||||
)
|
||||
|
||||
backup_name = (
|
||||
name
|
||||
or f"{"Automatic" if with_automatic_settings else "Custom"} {HAVERSION}"
|
||||
or f"{"Automatic" if with_automatic_settings else "Custom"} backup {HAVERSION}"
|
||||
)
|
||||
new_backup, self._backup_task = await self._reader_writer.async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
backup_name=backup_name,
|
||||
extra_metadata={
|
||||
"instance_id": await instance_id.async_get(self.hass),
|
||||
"with_automatic_settings": with_automatic_settings,
|
||||
},
|
||||
include_addons=include_addons,
|
||||
include_all_addons=include_all_addons,
|
||||
include_database=include_database,
|
||||
include_folders=include_folders,
|
||||
include_homeassistant=include_homeassistant,
|
||||
on_progress=self.async_on_backup_event,
|
||||
password=password,
|
||||
)
|
||||
self._backup_finish_task = self.hass.async_create_task(
|
||||
|
||||
try:
|
||||
(
|
||||
new_backup,
|
||||
self._backup_task,
|
||||
) = await self._reader_writer.async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
backup_name=backup_name,
|
||||
extra_metadata={
|
||||
"instance_id": await instance_id.async_get(self.hass),
|
||||
"with_automatic_settings": with_automatic_settings,
|
||||
},
|
||||
include_addons=include_addons,
|
||||
include_all_addons=include_all_addons,
|
||||
include_database=include_database,
|
||||
include_folders=include_folders,
|
||||
include_homeassistant=include_homeassistant,
|
||||
on_progress=self.async_on_backup_event,
|
||||
password=password,
|
||||
)
|
||||
except BackupReaderWriterError as err:
|
||||
raise BackupManagerError(str(err)) from err
|
||||
|
||||
backup_finish_task = self._backup_finish_task = self.hass.async_create_task(
|
||||
self._async_finish_backup(agent_ids, with_automatic_settings),
|
||||
name="backup_manager_finish_backup",
|
||||
)
|
||||
if not raise_task_error:
|
||||
|
||||
def log_finish_task_error(task: asyncio.Task[None]) -> None:
|
||||
if task.done() and not task.cancelled() and (err := task.exception()):
|
||||
if isinstance(err, BackupManagerError):
|
||||
LOGGER.error("Error creating backup: %s", err)
|
||||
else:
|
||||
LOGGER.error("Unexpected error: %s", err, exc_info=err)
|
||||
|
||||
backup_finish_task.add_done_callback(log_finish_task_error)
|
||||
|
||||
return new_backup
|
||||
|
||||
async def _async_finish_backup(
|
||||
self, agent_ids: list[str], with_automatic_settings: bool
|
||||
) -> None:
|
||||
"""Finish a backup."""
|
||||
if TYPE_CHECKING:
|
||||
assert self._backup_task is not None
|
||||
backup_success = False
|
||||
try:
|
||||
written_backup = await self._backup_task
|
||||
except Exception as err: # noqa: BLE001
|
||||
LOGGER.debug("Generating backup failed", exc_info=err)
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.FAILED)
|
||||
)
|
||||
except Exception as err:
|
||||
if with_automatic_settings:
|
||||
self._update_issue_backup_failed()
|
||||
|
||||
if isinstance(err, BackupReaderWriterError):
|
||||
raise BackupManagerError(str(err)) from err
|
||||
raise # unexpected error
|
||||
else:
|
||||
LOGGER.debug(
|
||||
"Generated new backup with backup_id %s, uploading to agents %s",
|
||||
@@ -777,28 +823,40 @@ class BackupManager:
|
||||
state=CreateBackupState.IN_PROGRESS,
|
||||
)
|
||||
)
|
||||
agent_errors = await self._async_upload_backup(
|
||||
backup=written_backup.backup,
|
||||
agent_ids=agent_ids,
|
||||
open_stream=written_backup.open_stream,
|
||||
)
|
||||
await written_backup.release_stream()
|
||||
if with_automatic_settings:
|
||||
# create backup was successful, update last_completed_automatic_backup
|
||||
self.config.data.last_completed_automatic_backup = dt_util.now()
|
||||
self.store.save()
|
||||
self._update_issue_after_agent_upload(agent_errors)
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
|
||||
try:
|
||||
agent_errors = await self._async_upload_backup(
|
||||
backup=written_backup.backup,
|
||||
agent_ids=agent_ids,
|
||||
open_stream=written_backup.open_stream,
|
||||
)
|
||||
finally:
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
if not agent_errors:
|
||||
if with_automatic_settings:
|
||||
# create backup was successful, update last_completed_automatic_backup
|
||||
self.config.data.last_completed_automatic_backup = dt_util.now()
|
||||
self.store.save()
|
||||
backup_success = True
|
||||
|
||||
if with_automatic_settings:
|
||||
self._update_issue_after_agent_upload(agent_errors)
|
||||
# delete old backups more numerous than copies
|
||||
# try this regardless of agent errors above
|
||||
await delete_backups_exceeding_configured_count(self)
|
||||
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED)
|
||||
)
|
||||
finally:
|
||||
self._backup_task = None
|
||||
self._backup_finish_task = None
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(
|
||||
stage=None,
|
||||
state=CreateBackupState.COMPLETED
|
||||
if backup_success
|
||||
else CreateBackupState.FAILED,
|
||||
)
|
||||
)
|
||||
self.async_on_backup_event(IdleEvent())
|
||||
|
||||
async def async_restore_backup(
|
||||
@@ -814,7 +872,7 @@ class BackupManager:
|
||||
) -> None:
|
||||
"""Initiate restoring a backup."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.IN_PROGRESS)
|
||||
@@ -829,6 +887,9 @@ class BackupManager:
|
||||
restore_folders=restore_folders,
|
||||
restore_homeassistant=restore_homeassistant,
|
||||
)
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.COMPLETED)
|
||||
)
|
||||
except Exception:
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.FAILED)
|
||||
@@ -851,7 +912,7 @@ class BackupManager:
|
||||
"""Initiate restoring a backup."""
|
||||
agent = self.backup_agents[agent_id]
|
||||
if not await agent.async_get_backup(backup_id):
|
||||
raise HomeAssistantError(
|
||||
raise BackupManagerError(
|
||||
f"Backup {backup_id} not found in agent {agent_id}"
|
||||
)
|
||||
|
||||
@@ -1024,11 +1085,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
backup_id = _generate_backup_id(date_str, backup_name)
|
||||
|
||||
if include_addons or include_all_addons or include_folders:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Addons and folders are not supported by core backup"
|
||||
)
|
||||
if not include_homeassistant:
|
||||
raise HomeAssistantError("Home Assistant must be included in backup")
|
||||
raise BackupReaderWriterError("Home Assistant must be included in backup")
|
||||
|
||||
backup_task = self._hass.async_create_task(
|
||||
self._async_create_backup(
|
||||
@@ -1099,6 +1160,13 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
password,
|
||||
local_agent_tar_file_path,
|
||||
)
|
||||
except (BackupManagerError, OSError, tarfile.TarError, ValueError) as err:
|
||||
# BackupManagerError from async_pre_backup_actions
|
||||
# OSError from file operations
|
||||
# TarError from tarfile
|
||||
# ValueError from json_bytes
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
else:
|
||||
backup = AgentBackup(
|
||||
addons=[],
|
||||
backup_id=backup_id,
|
||||
@@ -1116,12 +1184,15 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
async_add_executor_job = self._hass.async_add_executor_job
|
||||
|
||||
async def send_backup() -> AsyncIterator[bytes]:
|
||||
f = await async_add_executor_job(tar_file_path.open, "rb")
|
||||
try:
|
||||
while chunk := await async_add_executor_job(f.read, 2**20):
|
||||
yield chunk
|
||||
finally:
|
||||
await async_add_executor_job(f.close)
|
||||
f = await async_add_executor_job(tar_file_path.open, "rb")
|
||||
try:
|
||||
while chunk := await async_add_executor_job(f.read, 2**20):
|
||||
yield chunk
|
||||
finally:
|
||||
await async_add_executor_job(f.close)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
async def open_backup() -> AsyncIterator[bytes]:
|
||||
return send_backup()
|
||||
@@ -1129,14 +1200,20 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
async def remove_backup() -> None:
|
||||
if local_agent_tar_file_path:
|
||||
return
|
||||
await async_add_executor_job(tar_file_path.unlink, True)
|
||||
try:
|
||||
await async_add_executor_job(tar_file_path.unlink, True)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
return WrittenBackup(
|
||||
backup=backup, open_stream=open_backup, release_stream=remove_backup
|
||||
)
|
||||
finally:
|
||||
# Inform integrations the backup is done
|
||||
await manager.async_post_backup_actions()
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
def _mkdir_and_generate_backup_contents(
|
||||
self,
|
||||
@@ -1206,6 +1283,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
if self._local_agent_id in agent_ids:
|
||||
local_agent = manager.local_backup_agents[self._local_agent_id]
|
||||
tar_file_path = local_agent.get_backup_path(backup.backup_id)
|
||||
await async_add_executor_job(make_backup_dir, tar_file_path.parent)
|
||||
await async_add_executor_job(shutil.move, temp_file, tar_file_path)
|
||||
else:
|
||||
tar_file_path = temp_file
|
||||
@@ -1249,11 +1327,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
"""
|
||||
|
||||
if restore_addons or restore_folders:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Addons and folders are not supported in core restore"
|
||||
)
|
||||
if not restore_homeassistant and not restore_database:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Home Assistant or database must be included in restore"
|
||||
)
|
||||
|
||||
@@ -1298,7 +1376,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
)
|
||||
|
||||
await self._hass.async_add_executor_job(_write_restore_file)
|
||||
await self._hass.services.async_call("homeassistant", "restart", {})
|
||||
await self._hass.services.async_call("homeassistant", "restart", blocking=True)
|
||||
|
||||
|
||||
def _generate_backup_id(date: str, name: str) -> str:
|
||||
|
||||
@@ -6,6 +6,8 @@ from dataclasses import asdict, dataclass
|
||||
from enum import StrEnum
|
||||
from typing import Any, Self
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AddonInfo:
|
||||
@@ -67,3 +69,7 @@ class AgentBackup:
|
||||
protected=data["protected"],
|
||||
size=data["size"],
|
||||
)
|
||||
|
||||
|
||||
class BackupManagerError(HomeAssistantError):
|
||||
"""Backup manager error."""
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
"description": "The automatic backup could not be created. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_upload_agents": {
|
||||
"title": "Automatic backup could not be uploaded to agents",
|
||||
"description": "The automatic backup could not be uploaded to agents {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
"title": "Automatic backup could not be uploaded to the configured locations",
|
||||
"description": "The automatic backup could not be uploaded to the configured locations {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -34,7 +34,7 @@ class BangOlufsenData:
|
||||
|
||||
type BangOlufsenConfigEntry = ConfigEntry[BangOlufsenData]
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||
PLATFORMS = [Platform.EVENT, Platform.MEDIA_PLAYER]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry) -> bool:
|
||||
|
||||
@@ -79,6 +79,7 @@ class WebsocketNotification(StrEnum):
|
||||
"""Enum for WebSocket notification types."""
|
||||
|
||||
ACTIVE_LISTENING_MODE = "active_listening_mode"
|
||||
BUTTON = "button"
|
||||
PLAYBACK_ERROR = "playback_error"
|
||||
PLAYBACK_METADATA = "playback_metadata"
|
||||
PLAYBACK_PROGRESS = "playback_progress"
|
||||
@@ -203,14 +204,60 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
),
|
||||
]
|
||||
)
|
||||
# Map for storing compatibility of devices.
|
||||
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS: Final[str] = "device_buttons"
|
||||
|
||||
MODEL_SUPPORT_MAP = {
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS: (
|
||||
BangOlufsenModel.BEOLAB_8,
|
||||
BangOlufsenModel.BEOLAB_28,
|
||||
BangOlufsenModel.BEOSOUND_2,
|
||||
BangOlufsenModel.BEOSOUND_A5,
|
||||
BangOlufsenModel.BEOSOUND_A9,
|
||||
BangOlufsenModel.BEOSOUND_BALANCE,
|
||||
BangOlufsenModel.BEOSOUND_EMERGE,
|
||||
BangOlufsenModel.BEOSOUND_LEVEL,
|
||||
BangOlufsenModel.BEOSOUND_THEATRE,
|
||||
)
|
||||
}
|
||||
|
||||
# Device events
|
||||
BANG_OLUFSEN_WEBSOCKET_EVENT: Final[str] = f"{DOMAIN}_websocket_event"
|
||||
|
||||
# Dict used to translate native Bang & Olufsen event names to string.json compatible ones
|
||||
EVENT_TRANSLATION_MAP: dict[str, str] = {
|
||||
"shortPress (Release)": "short_press_release",
|
||||
"longPress (Timeout)": "long_press_timeout",
|
||||
"longPress (Release)": "long_press_release",
|
||||
"veryLongPress (Timeout)": "very_long_press_timeout",
|
||||
"veryLongPress (Release)": "very_long_press_release",
|
||||
}
|
||||
|
||||
CONNECTION_STATUS: Final[str] = "CONNECTION_STATUS"
|
||||
|
||||
DEVICE_BUTTONS: Final[list[str]] = [
|
||||
"Bluetooth",
|
||||
"Microphone",
|
||||
"Next",
|
||||
"PlayPause",
|
||||
"Preset1",
|
||||
"Preset2",
|
||||
"Preset3",
|
||||
"Preset4",
|
||||
"Previous",
|
||||
"Volume",
|
||||
]
|
||||
|
||||
|
||||
DEVICE_BUTTON_EVENTS: Final[list[str]] = [
|
||||
"short_press_release",
|
||||
"long_press_timeout",
|
||||
"long_press_release",
|
||||
"very_long_press_timeout",
|
||||
"very_long_press_release",
|
||||
]
|
||||
|
||||
# Beolink Converter NL/ML sources need to be transformed to upper case
|
||||
BEOLINK_JOIN_SOURCES_TO_UPPER = (
|
||||
"aux_a",
|
||||
|
||||
76
homeassistant/components/bang_olufsen/event.py
Normal file
76
homeassistant/components/bang_olufsen/event.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""Event entities for the Bang & Olufsen integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.event import EventDeviceClass, EventEntity
|
||||
from homeassistant.const import CONF_MODEL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import BangOlufsenConfigEntry
|
||||
from .const import (
|
||||
CONNECTION_STATUS,
|
||||
DEVICE_BUTTON_EVENTS,
|
||||
DEVICE_BUTTONS,
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS,
|
||||
MODEL_SUPPORT_MAP,
|
||||
WebsocketNotification,
|
||||
)
|
||||
from .entity import BangOlufsenEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: BangOlufsenConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Sensor entities from config entry."""
|
||||
|
||||
if config_entry.data[CONF_MODEL] in MODEL_SUPPORT_MAP[MODEL_SUPPORT_DEVICE_BUTTONS]:
|
||||
async_add_entities(
|
||||
BangOlufsenButtonEvent(config_entry, button_type)
|
||||
for button_type in DEVICE_BUTTONS
|
||||
)
|
||||
|
||||
|
||||
class BangOlufsenButtonEvent(BangOlufsenEntity, EventEntity):
|
||||
"""Event class for Button events."""
|
||||
|
||||
_attr_device_class = EventDeviceClass.BUTTON
|
||||
_attr_entity_registry_enabled_default = False
|
||||
_attr_event_types = DEVICE_BUTTON_EVENTS
|
||||
|
||||
def __init__(self, config_entry: BangOlufsenConfigEntry, button_type: str) -> None:
|
||||
"""Initialize Button."""
|
||||
super().__init__(config_entry, config_entry.runtime_data.client)
|
||||
|
||||
self._attr_unique_id = f"{self._unique_id}_{button_type}"
|
||||
|
||||
# Make the native button name Home Assistant compatible
|
||||
self._attr_translation_key = button_type.lower()
|
||||
|
||||
self._button_type = button_type
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Listen to WebSocket button events."""
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._async_update_connection_state,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{self._button_type}",
|
||||
self._async_handle_event,
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_handle_event(self, event: str) -> None:
|
||||
"""Handle event."""
|
||||
self._trigger_event(event)
|
||||
self.async_write_ha_state()
|
||||
@@ -1,7 +1,12 @@
|
||||
{
|
||||
"common": {
|
||||
"jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity.",
|
||||
"jid_options_name": "JID options",
|
||||
"jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity."
|
||||
"long_press_release": "Release of long press",
|
||||
"long_press_timeout": "Long press",
|
||||
"short_press_release": "Release of short press",
|
||||
"very_long_press_release": "Release of very long press",
|
||||
"very_long_press_timeout": "Very long press"
|
||||
},
|
||||
"config": {
|
||||
"error": {
|
||||
@@ -29,6 +34,150 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"event": {
|
||||
"bluetooth": {
|
||||
"name": "Bluetooth",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"microphone": {
|
||||
"name": "Microphone",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"next": {
|
||||
"name": "Next",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"playpause": {
|
||||
"name": "Play / Pause",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"preset1": {
|
||||
"name": "Favourite 1",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"preset2": {
|
||||
"name": "Favourite 2",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"preset3": {
|
||||
"name": "Favourite 3",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"preset4": {
|
||||
"name": "Favourite 4",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"previous": {
|
||||
"name": "Previous",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"volume": {
|
||||
"name": "Volume",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"source_ids": {
|
||||
"options": {
|
||||
|
||||
@@ -3,8 +3,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from mozart_api.models import (
|
||||
ButtonEvent,
|
||||
ListeningModeProps,
|
||||
PlaybackContentMetadata,
|
||||
PlaybackError,
|
||||
@@ -26,6 +28,7 @@ from homeassistant.util.enum import try_parse_enum
|
||||
from .const import (
|
||||
BANG_OLUFSEN_WEBSOCKET_EVENT,
|
||||
CONNECTION_STATUS,
|
||||
EVENT_TRANSLATION_MAP,
|
||||
WebsocketNotification,
|
||||
)
|
||||
from .entity import BangOlufsenBase
|
||||
@@ -54,6 +57,8 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
self._client.get_active_listening_mode_notifications(
|
||||
self.on_active_listening_mode
|
||||
)
|
||||
self._client.get_button_notifications(self.on_button_notification)
|
||||
|
||||
self._client.get_playback_error_notifications(
|
||||
self.on_playback_error_notification
|
||||
)
|
||||
@@ -104,6 +109,19 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
notification,
|
||||
)
|
||||
|
||||
def on_button_notification(self, notification: ButtonEvent) -> None:
|
||||
"""Send button dispatch."""
|
||||
# State is expected to always be available.
|
||||
if TYPE_CHECKING:
|
||||
assert notification.state
|
||||
|
||||
# Send to event entity
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{notification.button}",
|
||||
EVENT_TRANSLATION_MAP[notification.state],
|
||||
)
|
||||
|
||||
def on_notification_notification(
|
||||
self, notification: WebsocketNotificationTag
|
||||
) -> None:
|
||||
|
||||
@@ -71,27 +71,6 @@ class BluesoundConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
),
|
||||
)
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Import bluesound config entry from configuration.yaml."""
|
||||
session = async_get_clientsession(self.hass)
|
||||
async with Player(
|
||||
import_data[CONF_HOST], import_data[CONF_PORT], session=session
|
||||
) as player:
|
||||
try:
|
||||
sync_status = await player.sync_status(timeout=1)
|
||||
except PlayerUnreachableError:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
await self.async_set_unique_id(
|
||||
format_unique_id(sync_status.mac, import_data[CONF_PORT])
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=sync_status.name,
|
||||
data=import_data,
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
|
||||
@@ -15,7 +15,6 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
from homeassistant.components.media_player import (
|
||||
PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA,
|
||||
BrowseMedia,
|
||||
MediaPlayerEntity,
|
||||
MediaPlayerEntityFeature,
|
||||
@@ -23,16 +22,10 @@ from homeassistant.components.media_player import (
|
||||
MediaType,
|
||||
async_process_play_media_url,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_IMPORT
|
||||
from homeassistant.const import CONF_HOST, CONF_HOSTS, CONF_NAME, CONF_PORT
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
entity_platform,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_NETWORK_MAC,
|
||||
DeviceInfo,
|
||||
@@ -43,10 +36,9 @@ from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN, INTEGRATION_TITLE
|
||||
from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN
|
||||
from .utils import dispatcher_join_signal, dispatcher_unjoin_signal, format_unique_id
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -71,64 +63,6 @@ SYNC_STATUS_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
POLL_TIMEOUT = 120
|
||||
|
||||
PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_HOSTS): vol.All(
|
||||
cv.ensure_list,
|
||||
[
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
}
|
||||
],
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def _async_import(hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Import config entry from configuration.yaml."""
|
||||
if not hass.config_entries.async_entries(DOMAIN):
|
||||
# Start import flow
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
|
||||
)
|
||||
if (
|
||||
result["type"] == FlowResultType.ABORT
|
||||
and result["reason"] == "cannot_connect"
|
||||
):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
breaks_in_ha_version="2025.2.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key=f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": INTEGRATION_TITLE,
|
||||
},
|
||||
)
|
||||
return
|
||||
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.2.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": INTEGRATION_TITLE,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -159,22 +93,6 @@ async def async_setup_entry(
|
||||
async_add_entities([bluesound_player], update_before_add=True)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None,
|
||||
) -> None:
|
||||
"""Trigger import flows."""
|
||||
hosts = config.get(CONF_HOSTS, [])
|
||||
for host in hosts:
|
||||
import_data = {
|
||||
CONF_HOST: host[CONF_HOST],
|
||||
CONF_PORT: host.get(CONF_PORT, 11000),
|
||||
}
|
||||
hass.async_create_task(_async_import(hass, import_data))
|
||||
|
||||
|
||||
class BluesoundPlayer(MediaPlayerEntity):
|
||||
"""Representation of a Bluesound Player."""
|
||||
|
||||
|
||||
@@ -20,6 +20,6 @@
|
||||
"bluetooth-auto-recovery==1.4.2",
|
||||
"bluetooth-data-tools==1.20.0",
|
||||
"dbus-fast==2.24.3",
|
||||
"habluetooth==3.6.0"
|
||||
"habluetooth==3.7.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bring",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["bring_api"],
|
||||
"requirements": ["bring-api==0.9.1"]
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
}
|
||||
},
|
||||
"discovery_confirm": {
|
||||
"description": "Do you want to setup {name}?"
|
||||
"description": "Do you want to set up {name}?"
|
||||
},
|
||||
"reconfigure": {
|
||||
"description": "Reconfigure your Cambridge Audio Streamer.",
|
||||
@@ -28,7 +28,7 @@
|
||||
"cannot_connect": "Failed to connect to Cambridge Audio device. Please make sure the device is powered up and connected to the network. Try power-cycling the device if it does not connect."
|
||||
},
|
||||
"abort": {
|
||||
"wrong_device": "This Cambridge Audio device does not match the existing device id. Please make sure you entered the correct IP address.",
|
||||
"wrong_device": "This Cambridge Audio device does not match the existing device ID. Please make sure you entered the correct IP address.",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
|
||||
@@ -516,6 +516,19 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Flag supported features."""
|
||||
return self._attr_supported_features
|
||||
|
||||
@property
|
||||
def supported_features_compat(self) -> CameraEntityFeature:
|
||||
"""Return the supported features as CameraEntityFeature.
|
||||
|
||||
Remove this compatibility shim in 2025.1 or later.
|
||||
"""
|
||||
features = self.supported_features
|
||||
if type(features) is int: # noqa: E721
|
||||
new_features = CameraEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
@cached_property
|
||||
def is_recording(self) -> bool:
|
||||
"""Return true if the device is recording."""
|
||||
@@ -569,7 +582,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
|
||||
self._deprecate_attr_frontend_stream_type_logged = True
|
||||
return self._attr_frontend_stream_type
|
||||
if CameraEntityFeature.STREAM not in self.supported_features:
|
||||
if CameraEntityFeature.STREAM not in self.supported_features_compat:
|
||||
return None
|
||||
if (
|
||||
self._webrtc_provider
|
||||
@@ -798,7 +811,9 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
self.__supports_stream = self.supported_features & CameraEntityFeature.STREAM
|
||||
self.__supports_stream = (
|
||||
self.supported_features_compat & CameraEntityFeature.STREAM
|
||||
)
|
||||
await self.async_refresh_providers(write_state=False)
|
||||
|
||||
async def async_refresh_providers(self, *, write_state: bool = True) -> None:
|
||||
@@ -838,7 +853,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
self, fn: Callable[[HomeAssistant, Camera], Coroutine[None, None, _T | None]]
|
||||
) -> _T | None:
|
||||
"""Get first provider that supports this camera."""
|
||||
if CameraEntityFeature.STREAM not in self.supported_features:
|
||||
if CameraEntityFeature.STREAM not in self.supported_features_compat:
|
||||
return None
|
||||
|
||||
return await fn(self.hass, self)
|
||||
@@ -896,7 +911,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def camera_capabilities(self) -> CameraCapabilities:
|
||||
"""Return the camera capabilities."""
|
||||
frontend_stream_types = set()
|
||||
if CameraEntityFeature.STREAM in self.supported_features:
|
||||
if CameraEntityFeature.STREAM in self.supported_features_compat:
|
||||
if self._supports_native_sync_webrtc or self._supports_native_async_webrtc:
|
||||
# The camera has a native WebRTC implementation
|
||||
frontend_stream_types.add(StreamType.WEB_RTC)
|
||||
@@ -916,7 +931,8 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""
|
||||
super().async_write_ha_state()
|
||||
if self.__supports_stream != (
|
||||
supports_stream := self.supported_features & CameraEntityFeature.STREAM
|
||||
supports_stream := self.supported_features_compat
|
||||
& CameraEntityFeature.STREAM
|
||||
):
|
||||
self.__supports_stream = supports_stream
|
||||
self._invalidate_camera_capabilities_cache()
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
from pexpect import pxssh
|
||||
import voluptuous as vol
|
||||
@@ -101,11 +100,11 @@ class CiscoDeviceScanner(DeviceScanner):
|
||||
|
||||
return False
|
||||
|
||||
def _get_arp_data(self):
|
||||
def _get_arp_data(self) -> str | None:
|
||||
"""Open connection to the router and get arp entries."""
|
||||
|
||||
try:
|
||||
cisco_ssh = pxssh.pxssh()
|
||||
cisco_ssh: pxssh.pxssh[str] = pxssh.pxssh(encoding="uft-8")
|
||||
cisco_ssh.login(
|
||||
self.host,
|
||||
self.username,
|
||||
@@ -115,12 +114,11 @@ class CiscoDeviceScanner(DeviceScanner):
|
||||
)
|
||||
|
||||
# Find the hostname
|
||||
initial_line = cisco_ssh.before.decode("utf-8").splitlines()
|
||||
initial_line = (cisco_ssh.before or "").splitlines()
|
||||
router_hostname = initial_line[len(initial_line) - 1]
|
||||
router_hostname += "#"
|
||||
# Set the discovered hostname as prompt
|
||||
regex_expression = f"(?i)^{router_hostname}".encode()
|
||||
cisco_ssh.PROMPT = re.compile(regex_expression, re.MULTILINE)
|
||||
cisco_ssh.PROMPT = f"(?i)^{router_hostname}"
|
||||
# Allow full arp table to print at once
|
||||
cisco_ssh.sendline("terminal length 0")
|
||||
cisco_ssh.prompt(1)
|
||||
@@ -128,13 +126,11 @@ class CiscoDeviceScanner(DeviceScanner):
|
||||
cisco_ssh.sendline("show ip arp")
|
||||
cisco_ssh.prompt(1)
|
||||
|
||||
devices_result = cisco_ssh.before
|
||||
|
||||
return devices_result.decode("utf-8")
|
||||
except pxssh.ExceptionPxssh as px_e:
|
||||
_LOGGER.error("Failed to login via pxssh: %s", px_e)
|
||||
return None
|
||||
|
||||
return None
|
||||
return cisco_ssh.before
|
||||
|
||||
|
||||
def _parse_cisco_mac_address(cisco_hardware_addr):
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pexpect", "ptyprocess"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pexpect==4.6.0"]
|
||||
"requirements": ["pexpect==4.9.0"]
|
||||
}
|
||||
|
||||
@@ -5,9 +5,10 @@ from __future__ import annotations
|
||||
import base64
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
|
||||
import hashlib
|
||||
from typing import Any, Self
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientError, ClientTimeout, StreamReader
|
||||
from aiohttp import ClientError, ClientTimeout
|
||||
from hass_nabucasa import Cloud, CloudError
|
||||
from hass_nabucasa.cloud_api import (
|
||||
async_files_delete_file,
|
||||
@@ -18,11 +19,13 @@ from hass_nabucasa.cloud_api import (
|
||||
|
||||
from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import ChunkAsyncStreamIterator
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
||||
from .client import CloudClient
|
||||
from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_STORAGE_BACKUP = "backup"
|
||||
|
||||
|
||||
@@ -71,31 +74,6 @@ def async_register_backup_agents_listener(
|
||||
return unsub
|
||||
|
||||
|
||||
class ChunkAsyncStreamIterator:
|
||||
"""Async iterator for chunked streams.
|
||||
|
||||
Based on aiohttp.streams.ChunkTupleAsyncStreamIterator, but yields
|
||||
bytes instead of tuple[bytes, bool].
|
||||
"""
|
||||
|
||||
__slots__ = ("_stream",)
|
||||
|
||||
def __init__(self, stream: StreamReader) -> None:
|
||||
"""Initialize."""
|
||||
self._stream = stream
|
||||
|
||||
def __aiter__(self) -> Self:
|
||||
"""Iterate."""
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> bytes:
|
||||
"""Yield next chunk."""
|
||||
rv = await self._stream.readchunk()
|
||||
if rv == (b"", False):
|
||||
raise StopAsyncIteration
|
||||
return rv[0]
|
||||
|
||||
|
||||
class CloudBackupAgent(BackupAgent):
|
||||
"""Cloud backup agent."""
|
||||
|
||||
@@ -179,6 +157,11 @@ class CloudBackupAgent(BackupAgent):
|
||||
headers=details["headers"] | {"content-length": str(backup.size)},
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
_LOGGER.log(
|
||||
logging.DEBUG if upload_status.status < 400 else logging.WARNING,
|
||||
"Backup upload status: %s",
|
||||
upload_status.status,
|
||||
)
|
||||
upload_status.raise_for_status()
|
||||
except (TimeoutError, ClientError) as err:
|
||||
raise BackupAgentError("Failed to upload backup") from err
|
||||
@@ -208,6 +191,7 @@ class CloudBackupAgent(BackupAgent):
|
||||
"""List backups."""
|
||||
try:
|
||||
backups = await async_files_list(self._cloud, storage_type=_STORAGE_BACKUP)
|
||||
_LOGGER.debug("Cloud backups: %s", backups)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to list backups") from err
|
||||
|
||||
|
||||
@@ -46,6 +46,13 @@ def async_setup(hass: HomeAssistant) -> bool:
|
||||
hass.http.register_view(OptionManagerFlowIndexView(hass.config_entries.options))
|
||||
hass.http.register_view(OptionManagerFlowResourceView(hass.config_entries.options))
|
||||
|
||||
hass.http.register_view(
|
||||
SubentryManagerFlowIndexView(hass.config_entries.subentries)
|
||||
)
|
||||
hass.http.register_view(
|
||||
SubentryManagerFlowResourceView(hass.config_entries.subentries)
|
||||
)
|
||||
|
||||
websocket_api.async_register_command(hass, config_entries_get)
|
||||
websocket_api.async_register_command(hass, config_entry_disable)
|
||||
websocket_api.async_register_command(hass, config_entry_get_single)
|
||||
@@ -54,6 +61,9 @@ def async_setup(hass: HomeAssistant) -> bool:
|
||||
websocket_api.async_register_command(hass, config_entries_progress)
|
||||
websocket_api.async_register_command(hass, ignore_config_flow)
|
||||
|
||||
websocket_api.async_register_command(hass, config_subentry_delete)
|
||||
websocket_api.async_register_command(hass, config_subentry_list)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -285,6 +295,66 @@ class OptionManagerFlowResourceView(
|
||||
return await super().post(request, flow_id)
|
||||
|
||||
|
||||
class SubentryManagerFlowIndexView(
|
||||
FlowManagerIndexView[config_entries.ConfigSubentryFlowManager]
|
||||
):
|
||||
"""View to create subentry flows."""
|
||||
|
||||
url = "/api/config/config_entries/subentries/flow"
|
||||
name = "api:config:config_entries:subentries:flow"
|
||||
|
||||
@require_admin(
|
||||
error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT)
|
||||
)
|
||||
@RequestDataValidator(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required("handler"): vol.All(vol.Coerce(tuple), (str, str)),
|
||||
vol.Optional("show_advanced_options", default=False): cv.boolean,
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
)
|
||||
async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response:
|
||||
"""Handle a POST request.
|
||||
|
||||
handler in request is [entry_id, subentry_type].
|
||||
"""
|
||||
return await super()._post_impl(request, data)
|
||||
|
||||
def get_context(self, data: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Return context."""
|
||||
context = super().get_context(data)
|
||||
context["source"] = config_entries.SOURCE_USER
|
||||
if subentry_id := data.get("subentry_id"):
|
||||
context["source"] = config_entries.SOURCE_RECONFIGURE
|
||||
context["subentry_id"] = subentry_id
|
||||
return context
|
||||
|
||||
|
||||
class SubentryManagerFlowResourceView(
|
||||
FlowManagerResourceView[config_entries.ConfigSubentryFlowManager]
|
||||
):
|
||||
"""View to interact with the subentry flow manager."""
|
||||
|
||||
url = "/api/config/config_entries/subentries/flow/{flow_id}"
|
||||
name = "api:config:config_entries:subentries:flow:resource"
|
||||
|
||||
@require_admin(
|
||||
error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT)
|
||||
)
|
||||
async def get(self, request: web.Request, /, flow_id: str) -> web.Response:
|
||||
"""Get the current state of a data_entry_flow."""
|
||||
return await super().get(request, flow_id)
|
||||
|
||||
@require_admin(
|
||||
error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT)
|
||||
)
|
||||
async def post(self, request: web.Request, flow_id: str) -> web.Response:
|
||||
"""Handle a POST request."""
|
||||
return await super().post(request, flow_id)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({"type": "config_entries/flow/progress"})
|
||||
def config_entries_progress(
|
||||
@@ -588,3 +658,63 @@ async def _async_matching_config_entries_json_fragments(
|
||||
)
|
||||
or (filter_is_not_helper and entry.domain not in integrations)
|
||||
]
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
"type": "config_entries/subentries/list",
|
||||
"entry_id": str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def config_subentry_list(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""List subentries of a config entry."""
|
||||
entry = get_entry(hass, connection, msg["entry_id"], msg["id"])
|
||||
if entry is None:
|
||||
return
|
||||
|
||||
result = [
|
||||
{
|
||||
"subentry_id": subentry.subentry_id,
|
||||
"subentry_type": subentry.subentry_type,
|
||||
"title": subentry.title,
|
||||
"unique_id": subentry.unique_id,
|
||||
}
|
||||
for subentry in entry.subentries.values()
|
||||
]
|
||||
connection.send_result(msg["id"], result)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
"type": "config_entries/subentries/delete",
|
||||
"entry_id": str,
|
||||
"subentry_id": str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def config_subentry_delete(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Delete a subentry of a config entry."""
|
||||
entry = get_entry(hass, connection, msg["entry_id"], msg["id"])
|
||||
if entry is None:
|
||||
return
|
||||
|
||||
try:
|
||||
hass.config_entries.async_remove_subentry(entry, msg["subentry_id"])
|
||||
except config_entries.UnknownSubEntry:
|
||||
connection.send_error(
|
||||
msg["id"], websocket_api.const.ERR_NOT_FOUND, "Config subentry not found"
|
||||
)
|
||||
return
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
@@ -75,6 +75,7 @@ async def async_converse(
|
||||
language: str | None = None,
|
||||
agent_id: str | None = None,
|
||||
device_id: str | None = None,
|
||||
extra_system_prompt: str | None = None,
|
||||
) -> ConversationResult:
|
||||
"""Process text and get intent."""
|
||||
agent = async_get_agent(hass, agent_id)
|
||||
@@ -99,6 +100,7 @@ async def async_converse(
|
||||
device_id=device_id,
|
||||
language=language,
|
||||
agent_id=agent_id,
|
||||
extra_system_prompt=extra_system_prompt,
|
||||
)
|
||||
with async_conversation_trace() as trace:
|
||||
trace.add_event(
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.20"]
|
||||
"requirements": ["hassil==2.1.0", "home-assistant-intents==2025.1.1"]
|
||||
}
|
||||
|
||||
@@ -40,6 +40,9 @@ class ConversationInput:
|
||||
agent_id: str | None = None
|
||||
"""Agent to use for processing."""
|
||||
|
||||
extra_system_prompt: str | None = None
|
||||
"""Extra prompt to provide extra info to LLMs how to understand the command."""
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return input as a dict."""
|
||||
return {
|
||||
@@ -49,6 +52,7 @@ class ConversationInput:
|
||||
"device_id": self.device_id,
|
||||
"language": self.language,
|
||||
"agent_id": self.agent_id,
|
||||
"extra_system_prompt": self.extra_system_prompt,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from cookidoo_api import Cookidoo, CookidooConfig, CookidooLocalizationConfig
|
||||
from cookidoo_api import Cookidoo, CookidooConfig, get_localization_options
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_COUNTRY,
|
||||
@@ -22,15 +22,17 @@ PLATFORMS: list[Platform] = [Platform.BUTTON, Platform.TODO]
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool:
|
||||
"""Set up Cookidoo from a config entry."""
|
||||
|
||||
localizations = await get_localization_options(
|
||||
country=entry.data[CONF_COUNTRY].lower(),
|
||||
language=entry.data[CONF_LANGUAGE],
|
||||
)
|
||||
|
||||
cookidoo = Cookidoo(
|
||||
async_get_clientsession(hass),
|
||||
CookidooConfig(
|
||||
email=entry.data[CONF_EMAIL],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
localization=CookidooLocalizationConfig(
|
||||
country_code=entry.data[CONF_COUNTRY].lower(),
|
||||
language=entry.data[CONF_LANGUAGE],
|
||||
),
|
||||
localization=localizations[0],
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ from cookidoo_api import (
|
||||
Cookidoo,
|
||||
CookidooAuthException,
|
||||
CookidooConfig,
|
||||
CookidooLocalizationConfig,
|
||||
CookidooRequestException,
|
||||
get_country_options,
|
||||
get_localization_options,
|
||||
@@ -219,18 +218,19 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
data_input[CONF_LANGUAGE] = (
|
||||
await get_localization_options(country=data_input[CONF_COUNTRY].lower())
|
||||
)[0] # Pick any language to test login
|
||||
)[0].language # Pick any language to test login
|
||||
|
||||
localizations = await get_localization_options(
|
||||
country=data_input[CONF_COUNTRY].lower(),
|
||||
language=data_input[CONF_LANGUAGE],
|
||||
)
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
cookidoo = Cookidoo(
|
||||
session,
|
||||
async_get_clientsession(self.hass),
|
||||
CookidooConfig(
|
||||
email=data_input[CONF_EMAIL],
|
||||
password=data_input[CONF_PASSWORD],
|
||||
localization=CookidooLocalizationConfig(
|
||||
country_code=data_input[CONF_COUNTRY].lower(),
|
||||
language=data_input[CONF_LANGUAGE],
|
||||
),
|
||||
localization=localizations[0],
|
||||
),
|
||||
)
|
||||
try:
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/cookidoo",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["cookidoo_api"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["cookidoo-api==0.10.0"]
|
||||
"requirements": ["cookidoo-api==0.11.2"]
|
||||
}
|
||||
|
||||
@@ -300,6 +300,10 @@ class CoverEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def supported_features(self) -> CoverEntityFeature:
|
||||
"""Flag supported features."""
|
||||
if (features := self._attr_supported_features) is not None:
|
||||
if type(features) is int: # noqa: E721
|
||||
new_features = CoverEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
supported_features = (
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
import os
|
||||
|
||||
from serial.tools.list_ports_common import ListPortInfo
|
||||
@@ -12,7 +13,7 @@ from .const import DONT_USE_USB, MANUAL_PATH, REFRESH_LIST
|
||||
|
||||
|
||||
def list_ports_as_str(
|
||||
serial_ports: list[ListPortInfo], no_usb_option: bool = True
|
||||
serial_ports: Sequence[ListPortInfo], no_usb_option: bool = True
|
||||
) -> list[str]:
|
||||
"""Represent currently available serial ports as string.
|
||||
|
||||
|
||||
@@ -266,7 +266,7 @@ class DeconzBaseLight[_LightDeviceT: Group | Light](
|
||||
@property
|
||||
def color_temp_kelvin(self) -> int | None:
|
||||
"""Return the CT color value."""
|
||||
if self._device.color_temp is None:
|
||||
if self._device.color_temp is None or self._device.color_temp == 0:
|
||||
return None
|
||||
return color_temperature_mired_to_kelvin(self._device.color_temp)
|
||||
|
||||
|
||||
1
homeassistant/components/decorquip/__init__.py
Normal file
1
homeassistant/components/decorquip/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Virtual integration: Decorquip."""
|
||||
6
homeassistant/components/decorquip/manifest.json
Normal file
6
homeassistant/components/decorquip/manifest.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "decorquip",
|
||||
"name": "Decorquip Dream",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "motion_blinds"
|
||||
}
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pydoods"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pydoods==1.0.2", "Pillow==11.0.0"]
|
||||
"requirements": ["pydoods==1.0.2", "Pillow==11.1.0"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==10.0.1"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==10.1.0"]
|
||||
}
|
||||
|
||||
@@ -163,11 +163,6 @@ class EcovacsLegacyVacuum(EcovacsLegacyEntity, StateVacuumEntity):
|
||||
data: dict[str, Any] = {}
|
||||
data[ATTR_ERROR] = self.error
|
||||
|
||||
# these attributes are deprecated and can be removed in 2025.2
|
||||
for key, val in self.device.components.items():
|
||||
attr_name = ATTR_COMPONENT_PREFIX + key
|
||||
data[attr_name] = int(val * 100)
|
||||
|
||||
return data
|
||||
|
||||
def return_to_base(self, **kwargs: Any) -> None:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["openwebif"],
|
||||
"requirements": ["openwebifpy==4.3.0"]
|
||||
"requirements": ["openwebifpy==4.3.1"]
|
||||
}
|
||||
|
||||
@@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==1.1.0"]
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.0.0"]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ from typing import TYPE_CHECKING
|
||||
|
||||
from aioesphomeapi import APIClient, DeviceInfo
|
||||
from bleak_esphome import connect_scanner
|
||||
from bleak_esphome.backend.cache import ESPHomeBluetoothCache
|
||||
|
||||
from homeassistant.components.bluetooth import async_register_scanner
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback as hass_callback
|
||||
@@ -28,10 +27,9 @@ def async_connect_scanner(
|
||||
entry_data: RuntimeEntryData,
|
||||
cli: APIClient,
|
||||
device_info: DeviceInfo,
|
||||
cache: ESPHomeBluetoothCache,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Connect scanner."""
|
||||
client_data = connect_scanner(cli, device_info, cache, entry_data.available)
|
||||
client_data = connect_scanner(cli, device_info, entry_data.available)
|
||||
entry_data.bluetooth_device = client_data.bluetooth_device
|
||||
client_data.disconnect_callbacks = entry_data.disconnect_callbacks
|
||||
scanner = client_data.scanner
|
||||
|
||||
@@ -6,8 +6,6 @@ from dataclasses import dataclass, field
|
||||
from functools import cache
|
||||
from typing import Self
|
||||
|
||||
from bleak_esphome.backend.cache import ESPHomeBluetoothCache
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.json import JSONEncoder
|
||||
|
||||
@@ -22,9 +20,6 @@ class DomainData:
|
||||
"""Define a class that stores global esphome data in hass.data[DOMAIN]."""
|
||||
|
||||
_stores: dict[str, ESPHomeStorage] = field(default_factory=dict)
|
||||
bluetooth_cache: ESPHomeBluetoothCache = field(
|
||||
default_factory=ESPHomeBluetoothCache
|
||||
)
|
||||
|
||||
def get_entry_data(self, entry: ESPHomeConfigEntry) -> RuntimeEntryData:
|
||||
"""Return the runtime entry data associated with this config entry.
|
||||
|
||||
@@ -423,9 +423,7 @@ class ESPHomeManager:
|
||||
|
||||
if device_info.bluetooth_proxy_feature_flags_compat(api_version):
|
||||
entry_data.disconnect_callbacks.add(
|
||||
async_connect_scanner(
|
||||
hass, entry_data, cli, device_info, self.domain_data.bluetooth_cache
|
||||
)
|
||||
async_connect_scanner(hass, entry_data, cli, device_info)
|
||||
)
|
||||
|
||||
if device_info.voice_assistant_feature_flags_compat(api_version) and (
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
"requirements": [
|
||||
"aioesphomeapi==28.0.0",
|
||||
"esphome-dashboard-api==1.2.3",
|
||||
"bleak-esphome==1.1.0"
|
||||
"bleak-esphome==2.0.0"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -23,10 +23,10 @@ from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.system_info import is_official_image
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.signal_type import SignalType
|
||||
from homeassistant.util.system_info import is_official_image
|
||||
|
||||
DOMAIN = "ffmpeg"
|
||||
|
||||
|
||||
@@ -2,10 +2,11 @@
|
||||
|
||||
from datetime import datetime as dt
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import jwt
|
||||
from pyflick import FlickAPI
|
||||
from pyflick.authentication import AbstractFlickAuth
|
||||
from pyflick.authentication import SimpleFlickAuth
|
||||
from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -20,7 +21,8 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
from .const import CONF_TOKEN_EXPIRY, DOMAIN
|
||||
from .const import CONF_ACCOUNT_ID, CONF_SUPPLY_NODE_REF, CONF_TOKEN_EXPIRY
|
||||
from .coordinator import FlickConfigEntry, FlickElectricDataCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -29,36 +31,85 @@ CONF_ID_TOKEN = "id_token"
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: FlickConfigEntry) -> bool:
|
||||
"""Set up Flick Electric from a config entry."""
|
||||
auth = HassFlickAuth(hass, entry)
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
hass.data[DOMAIN][entry.entry_id] = FlickAPI(auth)
|
||||
coordinator = FlickElectricDataCoordinator(
|
||||
hass, FlickAPI(auth), entry.data[CONF_SUPPLY_NODE_REF]
|
||||
)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: FlickConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
class HassFlickAuth(AbstractFlickAuth):
|
||||
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
_LOGGER.debug(
|
||||
"Migrating configuration from version %s.%s",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
if config_entry.version > 2:
|
||||
return False
|
||||
|
||||
if config_entry.version == 1:
|
||||
api = FlickAPI(HassFlickAuth(hass, config_entry))
|
||||
|
||||
accounts = await api.getCustomerAccounts()
|
||||
active_accounts = [
|
||||
account for account in accounts if account["status"] == "active"
|
||||
]
|
||||
|
||||
# A single active account can be auto-migrated
|
||||
if (len(active_accounts)) == 1:
|
||||
account = active_accounts[0]
|
||||
|
||||
new_data = {**config_entry.data}
|
||||
new_data[CONF_ACCOUNT_ID] = account["id"]
|
||||
new_data[CONF_SUPPLY_NODE_REF] = account["main_consumer"]["supply_node_ref"]
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry,
|
||||
title=account["address"],
|
||||
unique_id=account["id"],
|
||||
data=new_data,
|
||||
version=2,
|
||||
)
|
||||
return True
|
||||
|
||||
config_entry.async_start_reauth(hass, data={**config_entry.data})
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class HassFlickAuth(SimpleFlickAuth):
|
||||
"""Implementation of AbstractFlickAuth based on a Home Assistant entity config."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
def __init__(self, hass: HomeAssistant, entry: FlickConfigEntry) -> None:
|
||||
"""Flick authentication based on a Home Assistant entity config."""
|
||||
super().__init__(aiohttp_client.async_get_clientsession(hass))
|
||||
super().__init__(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
client_id=entry.data.get(CONF_CLIENT_ID, DEFAULT_CLIENT_ID),
|
||||
client_secret=entry.data.get(CONF_CLIENT_SECRET, DEFAULT_CLIENT_SECRET),
|
||||
websession=aiohttp_client.async_get_clientsession(hass),
|
||||
)
|
||||
self._entry = entry
|
||||
self._hass = hass
|
||||
|
||||
async def _get_entry_token(self):
|
||||
async def _get_entry_token(self) -> dict[str, Any]:
|
||||
# No token saved, generate one
|
||||
if (
|
||||
CONF_TOKEN_EXPIRY not in self._entry.data
|
||||
@@ -75,13 +126,8 @@ class HassFlickAuth(AbstractFlickAuth):
|
||||
async def _update_token(self):
|
||||
_LOGGER.debug("Fetching new access token")
|
||||
|
||||
token = await self.get_new_token(
|
||||
username=self._entry.data[CONF_USERNAME],
|
||||
password=self._entry.data[CONF_PASSWORD],
|
||||
client_id=self._entry.data.get(CONF_CLIENT_ID, DEFAULT_CLIENT_ID),
|
||||
client_secret=self._entry.data.get(
|
||||
CONF_CLIENT_SECRET, DEFAULT_CLIENT_SECRET
|
||||
),
|
||||
token = await super().get_new_token(
|
||||
self._username, self._password, self._client_id, self._client_secret
|
||||
)
|
||||
|
||||
_LOGGER.debug("New token: %s", token)
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
"""Config Flow for Flick Electric integration."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyflick.authentication import AuthException, SimpleFlickAuth
|
||||
from aiohttp import ClientResponseError
|
||||
from pyflick import FlickAPI
|
||||
from pyflick.authentication import AbstractFlickAuth, SimpleFlickAuth
|
||||
from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET
|
||||
from pyflick.types import APIException, AuthException, CustomerAccount
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import (
|
||||
CONF_CLIENT_ID,
|
||||
CONF_CLIENT_SECRET,
|
||||
@@ -17,12 +21,18 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import CONF_ACCOUNT_ID, CONF_SUPPLY_NODE_REF, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
LOGIN_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
@@ -35,10 +45,13 @@ DATA_SCHEMA = vol.Schema(
|
||||
class FlickConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Flick config flow."""
|
||||
|
||||
VERSION = 1
|
||||
VERSION = 2
|
||||
auth: AbstractFlickAuth
|
||||
accounts: list[CustomerAccount]
|
||||
data: dict[str, Any]
|
||||
|
||||
async def _validate_input(self, user_input):
|
||||
auth = SimpleFlickAuth(
|
||||
async def _validate_auth(self, user_input: Mapping[str, Any]) -> bool:
|
||||
self.auth = SimpleFlickAuth(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
websession=aiohttp_client.async_get_clientsession(self.hass),
|
||||
@@ -48,22 +61,83 @@ class FlickConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
token = await auth.async_get_access_token()
|
||||
except TimeoutError as err:
|
||||
token = await self.auth.async_get_access_token()
|
||||
except (TimeoutError, ClientResponseError) as err:
|
||||
raise CannotConnect from err
|
||||
except AuthException as err:
|
||||
raise InvalidAuth from err
|
||||
|
||||
return token is not None
|
||||
|
||||
async def async_step_select_account(
|
||||
self, user_input: Mapping[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Ask user to select account."""
|
||||
|
||||
errors = {}
|
||||
if user_input is not None and CONF_ACCOUNT_ID in user_input:
|
||||
self.data[CONF_ACCOUNT_ID] = user_input[CONF_ACCOUNT_ID]
|
||||
self.data[CONF_SUPPLY_NODE_REF] = self._get_supply_node_ref(
|
||||
user_input[CONF_ACCOUNT_ID]
|
||||
)
|
||||
try:
|
||||
# Ensure supply node is active
|
||||
await FlickAPI(self.auth).getPricing(self.data[CONF_SUPPLY_NODE_REF])
|
||||
except (APIException, ClientResponseError):
|
||||
errors["base"] = "cannot_connect"
|
||||
except AuthException:
|
||||
# We should never get here as we have a valid token
|
||||
return self.async_abort(reason="no_permissions")
|
||||
else:
|
||||
# Supply node is active
|
||||
return await self._async_create_entry()
|
||||
|
||||
try:
|
||||
self.accounts = await FlickAPI(self.auth).getCustomerAccounts()
|
||||
except (APIException, ClientResponseError):
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
active_accounts = [a for a in self.accounts if a["status"] == "active"]
|
||||
|
||||
if len(active_accounts) == 0:
|
||||
return self.async_abort(reason="no_accounts")
|
||||
|
||||
if len(active_accounts) == 1:
|
||||
self.data[CONF_ACCOUNT_ID] = active_accounts[0]["id"]
|
||||
self.data[CONF_SUPPLY_NODE_REF] = self._get_supply_node_ref(
|
||||
active_accounts[0]["id"]
|
||||
)
|
||||
|
||||
return await self._async_create_entry()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="select_account",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ACCOUNT_ID): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
SelectOptionDict(
|
||||
value=account["id"], label=account["address"]
|
||||
)
|
||||
for account in active_accounts
|
||||
],
|
||||
mode=SelectSelectorMode.LIST,
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
self, user_input: Mapping[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle gathering login info."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
await self._validate_input(user_input)
|
||||
await self._validate_auth(user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
@@ -72,20 +146,61 @@ class FlickConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(
|
||||
f"flick_electric_{user_input[CONF_USERNAME]}"
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=f"Flick Electric: {user_input[CONF_USERNAME]}",
|
||||
data=user_input,
|
||||
)
|
||||
self.data = dict(user_input)
|
||||
return await self.async_step_select_account(user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors
|
||||
step_id="user", data_schema=LOGIN_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, user_input: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle re-authentication."""
|
||||
|
||||
self.data = {**user_input}
|
||||
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
async def _async_create_entry(self) -> ConfigFlowResult:
|
||||
"""Create an entry for the flow."""
|
||||
|
||||
await self.async_set_unique_id(self.data[CONF_ACCOUNT_ID])
|
||||
|
||||
account = self._get_account(self.data[CONF_ACCOUNT_ID])
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
# Migration completed
|
||||
if self._get_reauth_entry().version == 1:
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self._get_reauth_entry(),
|
||||
unique_id=self.unique_id,
|
||||
data=self.data,
|
||||
version=self.VERSION,
|
||||
)
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
unique_id=self.unique_id,
|
||||
title=account["address"],
|
||||
data=self.data,
|
||||
)
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=account["address"],
|
||||
data=self.data,
|
||||
)
|
||||
|
||||
def _get_account(self, account_id: str) -> CustomerAccount:
|
||||
"""Get the account for the account ID."""
|
||||
return next(a for a in self.accounts if a["id"] == account_id)
|
||||
|
||||
def _get_supply_node_ref(self, account_id: str) -> str:
|
||||
"""Get the supply node ref for the account."""
|
||||
return self._get_account(account_id)["main_consumer"][CONF_SUPPLY_NODE_REF]
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
DOMAIN = "flick_electric"
|
||||
|
||||
CONF_TOKEN_EXPIRY = "expires"
|
||||
CONF_ACCOUNT_ID = "account_id"
|
||||
CONF_SUPPLY_NODE_REF = "supply_node_ref"
|
||||
|
||||
ATTR_START_AT = "start_at"
|
||||
ATTR_END_AT = "end_at"
|
||||
|
||||
47
homeassistant/components/flick_electric/coordinator.py
Normal file
47
homeassistant/components/flick_electric/coordinator.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""Data Coordinator for Flick Electric."""
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from pyflick import FlickAPI, FlickPrice
|
||||
from pyflick.types import APIException, AuthException
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
type FlickConfigEntry = ConfigEntry[FlickElectricDataCoordinator]
|
||||
|
||||
|
||||
class FlickElectricDataCoordinator(DataUpdateCoordinator[FlickPrice]):
|
||||
"""Coordinator for flick power price."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, api: FlickAPI, supply_node_ref: str
|
||||
) -> None:
|
||||
"""Initialize FlickElectricDataCoordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name="Flick Electric",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
self.supply_node_ref = supply_node_ref
|
||||
self._api = api
|
||||
|
||||
async def _async_update_data(self) -> FlickPrice:
|
||||
"""Fetch pricing data from Flick Electric."""
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
return await self._api.getPricing(self.supply_node_ref)
|
||||
except AuthException as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except (APIException, aiohttp.ClientResponseError) as err:
|
||||
raise UpdateFailed from err
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyflick"],
|
||||
"requirements": ["PyFlick==0.0.2"]
|
||||
"requirements": ["PyFlick==1.1.2"]
|
||||
}
|
||||
|
||||
@@ -1,74 +1,72 @@
|
||||
"""Support for Flick Electric Pricing data."""
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from decimal import Decimal
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyflick import FlickAPI, FlickPrice
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CURRENCY_CENT, UnitOfEnergy
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util.dt import utcnow
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import ATTR_COMPONENTS, ATTR_END_AT, ATTR_START_AT, DOMAIN
|
||||
from .const import ATTR_COMPONENTS, ATTR_END_AT, ATTR_START_AT
|
||||
from .coordinator import FlickConfigEntry, FlickElectricDataCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
hass: HomeAssistant,
|
||||
entry: FlickConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Flick Sensor Setup."""
|
||||
api: FlickAPI = hass.data[DOMAIN][entry.entry_id]
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities([FlickPricingSensor(api)], True)
|
||||
async_add_entities([FlickPricingSensor(coordinator)])
|
||||
|
||||
|
||||
class FlickPricingSensor(SensorEntity):
|
||||
class FlickPricingSensor(CoordinatorEntity[FlickElectricDataCoordinator], SensorEntity):
|
||||
"""Entity object for Flick Electric sensor."""
|
||||
|
||||
_attr_attribution = "Data provided by Flick Electric"
|
||||
_attr_native_unit_of_measurement = f"{CURRENCY_CENT}/{UnitOfEnergy.KILO_WATT_HOUR}"
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "power_price"
|
||||
_attributes: dict[str, Any] = {}
|
||||
|
||||
def __init__(self, api: FlickAPI) -> None:
|
||||
def __init__(self, coordinator: FlickElectricDataCoordinator) -> None:
|
||||
"""Entity object for Flick Electric sensor."""
|
||||
self._api: FlickAPI = api
|
||||
self._price: FlickPrice = None
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.supply_node_ref}_pricing"
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
def native_value(self) -> Decimal:
|
||||
"""Return the state of the sensor."""
|
||||
return self._price.price
|
||||
# The API should return a unit price with quantity of 1.0 when no start/end time is provided
|
||||
if self.coordinator.data.quantity != 1:
|
||||
_LOGGER.warning(
|
||||
"Unexpected quantity for unit price: %s", self.coordinator.data
|
||||
)
|
||||
return self.coordinator.data.cost
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the state attributes."""
|
||||
return self._attributes
|
||||
components: dict[str, Decimal] = {}
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the Flick Pricing data from the web service."""
|
||||
if self._price and self._price.end_at >= utcnow():
|
||||
return # Power price data is still valid
|
||||
|
||||
async with asyncio.timeout(60):
|
||||
self._price = await self._api.getPricing()
|
||||
|
||||
_LOGGER.debug("Pricing data: %s", self._price)
|
||||
|
||||
self._attributes[ATTR_START_AT] = self._price.start_at
|
||||
self._attributes[ATTR_END_AT] = self._price.end_at
|
||||
for component in self._price.components:
|
||||
for component in self.coordinator.data.components:
|
||||
if component.charge_setter not in ATTR_COMPONENTS:
|
||||
_LOGGER.warning("Found unknown component: %s", component.charge_setter)
|
||||
continue
|
||||
|
||||
self._attributes[component.charge_setter] = float(component.value)
|
||||
components[component.charge_setter] = component.value
|
||||
|
||||
return {
|
||||
ATTR_START_AT: self.coordinator.data.start_at,
|
||||
ATTR_END_AT: self.coordinator.data.end_at,
|
||||
**components,
|
||||
}
|
||||
|
||||
@@ -9,6 +9,12 @@
|
||||
"client_id": "Client ID (optional)",
|
||||
"client_secret": "Client Secret (optional)"
|
||||
}
|
||||
},
|
||||
"select_account": {
|
||||
"title": "Select account",
|
||||
"data": {
|
||||
"account_id": "Account"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@@ -17,7 +23,10 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"no_permissions": "Cannot get pricing for this account. Please check user permissions.",
|
||||
"no_accounts": "No services are active on this Flick account"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
||||
@@ -11,7 +11,7 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_registry import async_migrate_entries
|
||||
from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries
|
||||
|
||||
from .config_flow import DEFAULT_RTSP_PORT
|
||||
from .const import CONF_RTSP_PORT, DOMAIN, LOGGER, SERVICE_PTZ, SERVICE_PTZ_PRESET
|
||||
@@ -36,6 +36,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
|
||||
|
||||
# Migrate to correct unique IDs for switches
|
||||
await async_migrate_entities(hass, entry)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
@@ -92,3 +95,24 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
LOGGER.debug("Migration to version %s successful", entry.version)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_entities(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Migrate old entry."""
|
||||
|
||||
@callback
|
||||
def _update_unique_id(
|
||||
entity_entry: RegistryEntry,
|
||||
) -> dict[str, str] | None:
|
||||
"""Update unique ID of entity entry."""
|
||||
if (
|
||||
entity_entry.domain == Platform.SWITCH
|
||||
and entity_entry.unique_id == "sleep_switch"
|
||||
):
|
||||
entity_new_unique_id = f"{entity_entry.config_entry_id}_sleep_switch"
|
||||
return {"new_unique_id": entity_new_unique_id}
|
||||
|
||||
return None
|
||||
|
||||
# Migrate entities
|
||||
await async_migrate_entries(hass, entry.entry_id, _update_unique_id)
|
||||
|
||||
@@ -41,7 +41,7 @@ class FoscamSleepSwitch(FoscamEntity, SwitchEntity):
|
||||
"""Initialize a Foscam Sleep Switch."""
|
||||
super().__init__(coordinator, config_entry.entry_id)
|
||||
|
||||
self._attr_unique_id = "sleep_switch"
|
||||
self._attr_unique_id = f"{config_entry.entry_id}_sleep_switch"
|
||||
self._attr_translation_key = "sleep_switch"
|
||||
self._attr_has_entity_name = True
|
||||
|
||||
|
||||
@@ -214,6 +214,18 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
||||
self._options = options
|
||||
await self.hass.async_add_executor_job(self.setup)
|
||||
|
||||
device_registry = dr.async_get(self.hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=self.config_entry.entry_id,
|
||||
configuration_url=f"http://{self.host}",
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, self.mac)},
|
||||
identifiers={(DOMAIN, self.unique_id)},
|
||||
manufacturer="AVM",
|
||||
model=self.model,
|
||||
name=self.config_entry.title,
|
||||
sw_version=self.current_firmware,
|
||||
)
|
||||
|
||||
def setup(self) -> None:
|
||||
"""Set up FritzboxTools class."""
|
||||
|
||||
|
||||
@@ -68,23 +68,14 @@ class FritzBoxBaseEntity:
|
||||
"""Init device info class."""
|
||||
self._avm_wrapper = avm_wrapper
|
||||
self._device_name = device_name
|
||||
|
||||
@property
|
||||
def mac_address(self) -> str:
|
||||
"""Return the mac address of the main device."""
|
||||
return self._avm_wrapper.mac
|
||||
self.mac_address = self._avm_wrapper.mac
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return the device information."""
|
||||
return DeviceInfo(
|
||||
configuration_url=f"http://{self._avm_wrapper.host}",
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, self.mac_address)},
|
||||
identifiers={(DOMAIN, self._avm_wrapper.unique_id)},
|
||||
manufacturer="AVM",
|
||||
model=self._avm_wrapper.model,
|
||||
name=self._device_name,
|
||||
sw_version=self._avm_wrapper.current_firmware,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"domain": "frontend",
|
||||
"name": "Home Assistant Frontend",
|
||||
"after_dependencies": ["backup"],
|
||||
"codeowners": ["@home-assistant/frontend"],
|
||||
"dependencies": [
|
||||
"api",
|
||||
@@ -20,5 +21,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20241230.0"]
|
||||
"requirements": ["home-assistant-frontend==20250106.0"]
|
||||
}
|
||||
|
||||
@@ -171,6 +171,8 @@ async def async_test_still(
|
||||
"""Verify that the still image is valid before we create an entity."""
|
||||
fmt = None
|
||||
if not (url := info.get(CONF_STILL_IMAGE_URL)):
|
||||
# If user didn't specify a still image URL,the automatically generated
|
||||
# still image that stream generates is always jpeg.
|
||||
return {}, info.get(CONF_CONTENT_TYPE, "image/jpeg")
|
||||
try:
|
||||
if not isinstance(url, template_helper.Template):
|
||||
@@ -309,8 +311,8 @@ async def async_test_and_preview_stream(
|
||||
return stream
|
||||
|
||||
|
||||
def register_preview(hass: HomeAssistant) -> None:
|
||||
"""Set up previews for camera feeds during config flow."""
|
||||
def register_still_preview(hass: HomeAssistant) -> None:
|
||||
"""Set up still image preview for camera feeds during config flow."""
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
|
||||
if not hass.data[DOMAIN].get(IMAGE_PREVIEWS_ACTIVE):
|
||||
@@ -326,7 +328,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize Generic ConfigFlow."""
|
||||
self.preview_cam: dict[str, Any] = {}
|
||||
self.preview_image_settings: dict[str, Any] = {}
|
||||
self.preview_stream: Stream | None = None
|
||||
self.user_input: dict[str, Any] = {}
|
||||
self.title = ""
|
||||
@@ -343,7 +345,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the start of the config flow."""
|
||||
errors = {}
|
||||
description_placeholders = {}
|
||||
hass = self.hass
|
||||
if user_input:
|
||||
# Secondary validation because serialised vol can't seem to handle this complexity:
|
||||
@@ -359,8 +360,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
except InvalidStreamException as err:
|
||||
errors[CONF_STREAM_SOURCE] = str(err)
|
||||
if err.details:
|
||||
errors["error_details"] = err.details
|
||||
self.preview_stream = None
|
||||
if not errors:
|
||||
user_input[CONF_CONTENT_TYPE] = still_format
|
||||
@@ -369,18 +368,11 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
name = (
|
||||
slug(hass, still_url) or slug(hass, stream_url) or DEFAULT_NAME
|
||||
)
|
||||
if still_url is None:
|
||||
# If user didn't specify a still image URL,
|
||||
# The automatically generated still image that stream generates
|
||||
# is always jpeg
|
||||
user_input[CONF_CONTENT_TYPE] = "image/jpeg"
|
||||
self.user_input = user_input
|
||||
self.title = name
|
||||
# temporary preview for user to check the image
|
||||
self.preview_cam = user_input
|
||||
self.preview_image_settings = user_input
|
||||
return await self.async_step_user_confirm()
|
||||
if "error_details" in errors:
|
||||
description_placeholders["error"] = errors.pop("error_details")
|
||||
elif self.user_input:
|
||||
user_input = self.user_input
|
||||
else:
|
||||
@@ -388,7 +380,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=build_schema(user_input),
|
||||
description_placeholders=description_placeholders,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -405,8 +396,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(
|
||||
title=self.title, data={}, options=self.user_input
|
||||
)
|
||||
register_preview(self.hass)
|
||||
preview_url = f"/api/generic/preview_flow_image/{self.flow_id}?t={datetime.now().isoformat()}"
|
||||
register_still_preview(self.hass)
|
||||
return self.async_show_form(
|
||||
step_id="user_confirm",
|
||||
data_schema=vol.Schema(
|
||||
@@ -414,7 +404,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
vol.Required(CONF_CONFIRMED_OK, default=False): bool,
|
||||
}
|
||||
),
|
||||
description_placeholders={"preview_url": preview_url},
|
||||
errors=None,
|
||||
preview="generic_camera",
|
||||
)
|
||||
@@ -430,7 +419,8 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize Generic IP Camera options flow."""
|
||||
self.preview_cam: dict[str, Any] = {}
|
||||
self.preview_image_settings: dict[str, Any] = {}
|
||||
self.preview_stream: Stream | None = None
|
||||
self.user_input: dict[str, Any] = {}
|
||||
|
||||
async def async_step_init(
|
||||
@@ -438,42 +428,38 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage Generic IP Camera options."""
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders = {}
|
||||
hass = self.hass
|
||||
|
||||
if user_input is not None:
|
||||
errors, still_format = await async_test_still(
|
||||
hass, self.config_entry.options | user_input
|
||||
)
|
||||
try:
|
||||
await async_test_and_preview_stream(hass, user_input)
|
||||
except InvalidStreamException as err:
|
||||
errors[CONF_STREAM_SOURCE] = str(err)
|
||||
if err.details:
|
||||
errors["error_details"] = err.details
|
||||
# Stream preview during options flow not yet implemented
|
||||
|
||||
still_url = user_input.get(CONF_STILL_IMAGE_URL)
|
||||
if not errors:
|
||||
if still_url is None:
|
||||
# If user didn't specify a still image URL,
|
||||
# The automatically generated still image that stream generates
|
||||
# is always jpeg
|
||||
still_format = "image/jpeg"
|
||||
data = {
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS: self.config_entry.options.get(
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False
|
||||
),
|
||||
**user_input,
|
||||
CONF_CONTENT_TYPE: still_format
|
||||
or self.config_entry.options.get(CONF_CONTENT_TYPE),
|
||||
}
|
||||
self.user_input = data
|
||||
# temporary preview for user to check the image
|
||||
self.preview_cam = data
|
||||
return await self.async_step_confirm_still()
|
||||
if "error_details" in errors:
|
||||
description_placeholders["error"] = errors.pop("error_details")
|
||||
if user_input:
|
||||
# Secondary validation because serialised vol can't seem to handle this complexity:
|
||||
if not user_input.get(CONF_STILL_IMAGE_URL) and not user_input.get(
|
||||
CONF_STREAM_SOURCE
|
||||
):
|
||||
errors["base"] = "no_still_image_or_stream_url"
|
||||
else:
|
||||
errors, still_format = await async_test_still(hass, user_input)
|
||||
try:
|
||||
self.preview_stream = await async_test_and_preview_stream(
|
||||
hass, user_input
|
||||
)
|
||||
except InvalidStreamException as err:
|
||||
errors[CONF_STREAM_SOURCE] = str(err)
|
||||
self.preview_stream = None
|
||||
if not errors:
|
||||
data = {
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS: self.config_entry.options.get(
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False
|
||||
),
|
||||
**user_input,
|
||||
CONF_CONTENT_TYPE: still_format
|
||||
or self.config_entry.options.get(CONF_CONTENT_TYPE),
|
||||
}
|
||||
self.user_input = data
|
||||
# temporary preview for user to check the image
|
||||
self.preview_image_settings = data
|
||||
return await self.async_step_user_confirm()
|
||||
elif self.user_input:
|
||||
user_input = self.user_input
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=build_schema(
|
||||
@@ -481,34 +467,40 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
True,
|
||||
self.show_advanced_options,
|
||||
),
|
||||
description_placeholders=description_placeholders,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_confirm_still(
|
||||
async def async_step_user_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle user clicking confirm after still preview."""
|
||||
if user_input:
|
||||
if ha_stream := self.preview_stream:
|
||||
# Kill off the temp stream we created.
|
||||
await ha_stream.stop()
|
||||
if not user_input.get(CONF_CONFIRMED_OK):
|
||||
return await self.async_step_init()
|
||||
return self.async_create_entry(
|
||||
title=self.config_entry.title,
|
||||
data=self.user_input,
|
||||
)
|
||||
register_preview(self.hass)
|
||||
preview_url = f"/api/generic/preview_flow_image/{self.flow_id}?t={datetime.now().isoformat()}"
|
||||
register_still_preview(self.hass)
|
||||
return self.async_show_form(
|
||||
step_id="confirm_still",
|
||||
step_id="user_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_CONFIRMED_OK, default=False): bool,
|
||||
}
|
||||
),
|
||||
description_placeholders={"preview_url": preview_url},
|
||||
errors=None,
|
||||
preview="generic_camera",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def async_setup_preview(hass: HomeAssistant) -> None:
|
||||
"""Set up preview WS API."""
|
||||
websocket_api.async_register_command(hass, ws_start_preview)
|
||||
|
||||
|
||||
class CameraImagePreview(HomeAssistantView):
|
||||
"""Camera view to temporarily serve an image."""
|
||||
@@ -534,7 +526,7 @@ class CameraImagePreview(HomeAssistantView):
|
||||
if not flow:
|
||||
_LOGGER.warning("Unknown flow while getting image preview")
|
||||
raise web.HTTPNotFound
|
||||
user_input = flow.preview_cam
|
||||
user_input = flow.preview_image_settings
|
||||
camera = GenericCamera(self.hass, user_input, flow_id, "preview")
|
||||
if not camera.is_on:
|
||||
_LOGGER.debug("Camera is off")
|
||||
@@ -550,7 +542,7 @@ class CameraImagePreview(HomeAssistantView):
|
||||
{
|
||||
vol.Required("type"): "generic_camera/start_preview",
|
||||
vol.Required("flow_id"): str,
|
||||
vol.Optional("flow_type"): vol.Any("config_flow"),
|
||||
vol.Optional("flow_type"): vol.Any("config_flow", "options_flow"),
|
||||
vol.Optional("user_input"): dict,
|
||||
}
|
||||
)
|
||||
@@ -564,11 +556,18 @@ async def ws_start_preview(
|
||||
_LOGGER.debug("Generating websocket handler for generic camera preview")
|
||||
|
||||
flow_id = msg["flow_id"]
|
||||
flow = cast(
|
||||
GenericIPCamConfigFlow,
|
||||
hass.config_entries.flow._progress.get(flow_id), # noqa: SLF001
|
||||
)
|
||||
user_input = flow.preview_cam
|
||||
flow: GenericIPCamConfigFlow | GenericOptionsFlowHandler
|
||||
if msg.get("flow_type", "config_flow") == "config_flow":
|
||||
flow = cast(
|
||||
GenericIPCamConfigFlow,
|
||||
hass.config_entries.flow._progress.get(flow_id), # noqa: SLF001
|
||||
)
|
||||
else: # (flow type == "options flow")
|
||||
flow = cast(
|
||||
GenericOptionsFlowHandler,
|
||||
hass.config_entries.options._progress.get(flow_id), # noqa: SLF001
|
||||
)
|
||||
user_input = flow.preview_image_settings
|
||||
|
||||
# Create an EntityPlatform, needed for name translations
|
||||
platform = await async_prepare_setup_platform(hass, {}, CAMERA_DOMAIN, DOMAIN)
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/generic",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["av==13.1.0", "Pillow==11.0.0"]
|
||||
"requirements": ["av==13.1.0", "Pillow==11.1.0"]
|
||||
}
|
||||
|
||||
@@ -67,11 +67,11 @@
|
||||
"use_wallclock_as_timestamps": "This option may correct segmenting or crashing issues arising from buggy timestamp implementations on some cameras"
|
||||
}
|
||||
},
|
||||
"confirm_still": {
|
||||
"title": "Preview",
|
||||
"description": "",
|
||||
"user_confirm": {
|
||||
"title": "Confirmation",
|
||||
"description": "Please wait for previews to load...",
|
||||
"data": {
|
||||
"confirmed_ok": "This image looks good."
|
||||
"confirmed_ok": "Everything looks good."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -34,6 +34,18 @@
|
||||
"moderate": "Moderate",
|
||||
"good": "Good",
|
||||
"very_good": "Very good"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"c6h6": {
|
||||
@@ -51,6 +63,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"o3_index": {
|
||||
@@ -62,6 +86,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"pm10_index": {
|
||||
@@ -73,6 +109,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"pm25_index": {
|
||||
@@ -84,6 +132,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"so2_index": {
|
||||
@@ -95,6 +155,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["googleapiclient"],
|
||||
"requirements": ["gcal-sync==6.2.0", "oauth2client==4.1.3", "ical==8.2.0"]
|
||||
"requirements": ["gcal-sync==7.0.0", "oauth2client==4.1.3", "ical==8.3.0"]
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"fields": {
|
||||
"agent_user_id": {
|
||||
"name": "Agent user ID",
|
||||
"description": "Only needed for automations. Specific Home Assistant user id (not username, ID in configuration > users > under username) to sync with Google Assistant. Do not need when you use this action through Home Assistant front end or API. Used in automation script or other place where context.user_id is missing."
|
||||
"description": "Only needed for automations. Specific Home Assistant user ID (not username, ID in Settings > People > Users > under username) to sync with Google Assistant. Not needed when you use this action through Home Assistant frontend or API. Used in automation, script or other place where context.user_id is missing."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -204,9 +204,7 @@ class GoogleGenerativeAIConversationEntity(
|
||||
"""Process a sentence."""
|
||||
result = conversation.ConversationResult(
|
||||
response=intent.IntentResponse(language=user_input.language),
|
||||
conversation_id=user_input.conversation_id
|
||||
if user_input.conversation_id in self.history
|
||||
else ulid.ulid_now(),
|
||||
conversation_id=user_input.conversation_id or ulid.ulid_now(),
|
||||
)
|
||||
assert result.conversation_id
|
||||
|
||||
|
||||
@@ -66,11 +66,11 @@
|
||||
"services": {
|
||||
"upload": {
|
||||
"name": "Upload media",
|
||||
"description": "Upload images or videos to Google Photos.",
|
||||
"description": "Uploads images or videos to Google Photos.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"name": "Integration Id",
|
||||
"description": "The Google Photos integration id."
|
||||
"name": "Integration ID",
|
||||
"description": "The Google Photos integration ID."
|
||||
},
|
||||
"filename": {
|
||||
"name": "Filename",
|
||||
|
||||
@@ -238,7 +238,7 @@
|
||||
},
|
||||
"set": {
|
||||
"name": "Set",
|
||||
"description": "Creates/Updates a user group.",
|
||||
"description": "Creates/Updates a group.",
|
||||
"fields": {
|
||||
"object_id": {
|
||||
"name": "Object ID",
|
||||
|
||||
@@ -21,6 +21,7 @@ PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.CALENDAR,
|
||||
Platform.IMAGE,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.TODO,
|
||||
|
||||
@@ -45,7 +45,7 @@ class HabiticaButtonEntityDescription(ButtonEntityDescription):
|
||||
entity_picture: str | None = None
|
||||
|
||||
|
||||
class HabitipyButtonEntity(StrEnum):
|
||||
class HabiticaButtonEntity(StrEnum):
|
||||
"""Habitica button entities."""
|
||||
|
||||
RUN_CRON = "run_cron"
|
||||
@@ -68,14 +68,14 @@ class HabitipyButtonEntity(StrEnum):
|
||||
|
||||
BUTTON_DESCRIPTIONS: tuple[HabiticaButtonEntityDescription, ...] = (
|
||||
HabiticaButtonEntityDescription(
|
||||
key=HabitipyButtonEntity.RUN_CRON,
|
||||
translation_key=HabitipyButtonEntity.RUN_CRON,
|
||||
key=HabiticaButtonEntity.RUN_CRON,
|
||||
translation_key=HabiticaButtonEntity.RUN_CRON,
|
||||
press_fn=lambda coordinator: coordinator.habitica.run_cron(),
|
||||
available_fn=lambda data: data.user.needsCron is True,
|
||||
),
|
||||
HabiticaButtonEntityDescription(
|
||||
key=HabitipyButtonEntity.BUY_HEALTH_POTION,
|
||||
translation_key=HabitipyButtonEntity.BUY_HEALTH_POTION,
|
||||
key=HabiticaButtonEntity.BUY_HEALTH_POTION,
|
||||
translation_key=HabiticaButtonEntity.BUY_HEALTH_POTION,
|
||||
press_fn=lambda coordinator: coordinator.habitica.buy_health_potion(),
|
||||
available_fn=(
|
||||
lambda data: (data.user.stats.gp or 0) >= 25
|
||||
@@ -84,8 +84,8 @@ BUTTON_DESCRIPTIONS: tuple[HabiticaButtonEntityDescription, ...] = (
|
||||
entity_picture="shop_potion.png",
|
||||
),
|
||||
HabiticaButtonEntityDescription(
|
||||
key=HabitipyButtonEntity.ALLOCATE_ALL_STAT_POINTS,
|
||||
translation_key=HabitipyButtonEntity.ALLOCATE_ALL_STAT_POINTS,
|
||||
key=HabiticaButtonEntity.ALLOCATE_ALL_STAT_POINTS,
|
||||
translation_key=HabiticaButtonEntity.ALLOCATE_ALL_STAT_POINTS,
|
||||
press_fn=lambda coordinator: coordinator.habitica.allocate_stat_points(),
|
||||
available_fn=(
|
||||
lambda data: data.user.preferences.automaticAllocation is True
|
||||
@@ -93,8 +93,8 @@ BUTTON_DESCRIPTIONS: tuple[HabiticaButtonEntityDescription, ...] = (
|
||||
),
|
||||
),
|
||||
HabiticaButtonEntityDescription(
|
||||
key=HabitipyButtonEntity.REVIVE,
|
||||
translation_key=HabitipyButtonEntity.REVIVE,
|
||||
key=HabiticaButtonEntity.REVIVE,
|
||||
translation_key=HabiticaButtonEntity.REVIVE,
|
||||
press_fn=lambda coordinator: coordinator.habitica.revive(),
|
||||
available_fn=lambda data: data.user.stats.hp == 0,
|
||||
),
|
||||
@@ -103,8 +103,8 @@ BUTTON_DESCRIPTIONS: tuple[HabiticaButtonEntityDescription, ...] = (
|
||||
|
||||
CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
|
||||
HabiticaButtonEntityDescription(
|
||||
key=HabitipyButtonEntity.MPHEAL,
|
||||
translation_key=HabitipyButtonEntity.MPHEAL,
|
||||
key=HabiticaButtonEntity.MPHEAL,
|
||||
translation_key=HabiticaButtonEntity.MPHEAL,
|
||||
press_fn=(
|
||||
lambda coordinator: coordinator.habitica.cast_skill(Skill.ETHEREAL_SURGE)
|
||||
),
|
||||
@@ -116,8 +116,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
|
||||
entity_picture="shop_mpheal.png",
|
||||
),
|
||||
HabiticaButtonEntityDescription(
|
||||
key=HabitipyButtonEntity.EARTH,
|
||||
translation_key=HabitipyButtonEntity.EARTH,
|
||||
key=HabiticaButtonEntity.EARTH,
|
||||
translation_key=HabiticaButtonEntity.EARTH,
|
||||
press_fn=lambda coordinator: coordinator.habitica.cast_skill(Skill.EARTHQUAKE),
|
||||
available_fn=(
|
||||
lambda data: (data.user.stats.lvl or 0) >= 13
|
||||
@@ -127,8 +127,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
|
||||
entity_picture="shop_earth.png",
|
||||
),
|
||||
HabiticaButtonEntityDescription(
|
||||
key=HabitipyButtonEntity.FROST,
|
||||
translation_key=HabitipyButtonEntity.FROST,
|
||||
key=HabiticaButtonEntity.FROST,
|
||||
translation_key=HabiticaButtonEntity.FROST,
|
||||
press_fn=(
|
||||
lambda coordinator: coordinator.habitica.cast_skill(Skill.CHILLING_FROST)
|
||||
),
|
||||
@@ -142,8 +142,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
|
||||
entity_picture="shop_frost.png",
|
||||
),
|
||||
HabiticaButtonEntityDescription(
|
||||
key=HabitipyButtonEntity.DEFENSIVE_STANCE,
|
||||
translation_key=HabitipyButtonEntity.DEFENSIVE_STANCE,
|
||||
key=HabiticaButtonEntity.DEFENSIVE_STANCE,
|
||||
translation_key=HabiticaButtonEntity.DEFENSIVE_STANCE,
|
||||
press_fn=(
|
||||
lambda coordinator: coordinator.habitica.cast_skill(Skill.DEFENSIVE_STANCE)
|
||||
),
|
||||
@@ -155,8 +155,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
|
||||
entity_picture="shop_defensiveStance.png",
|
||||
),
|
||||
HabiticaButtonEntityDescription(
|
||||
key=HabitipyButtonEntity.VALOROUS_PRESENCE,
|
||||
translation_key=HabitipyButtonEntity.VALOROUS_PRESENCE,
|
||||
key=HabiticaButtonEntity.VALOROUS_PRESENCE,
|
||||
translation_key=HabiticaButtonEntity.VALOROUS_PRESENCE,
|
||||
press_fn=(
|
||||
lambda coordinator: coordinator.habitica.cast_skill(Skill.VALOROUS_PRESENCE)
|
||||
),
|
||||
@@ -168,8 +168,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
|
||||
entity_picture="shop_valorousPresence.png",
|
||||
),
|
||||
HabiticaButtonEntityDescription(
|
||||
key=HabitipyButtonEntity.INTIMIDATE,
|
||||
translation_key=HabitipyButtonEntity.INTIMIDATE,
|
||||
key=HabiticaButtonEntity.INTIMIDATE,
|
||||
translation_key=HabiticaButtonEntity.INTIMIDATE,
|
||||
press_fn=(
|
||||
lambda coordinator: coordinator.habitica.cast_skill(Skill.INTIMIDATING_GAZE)
|
||||
),
|
||||
@@ -181,8 +181,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
|
||||
entity_picture="shop_intimidate.png",
|
||||
),
|
||||
HabiticaButtonEntityDescription(
|
||||
key=HabitipyButtonEntity.TOOLS_OF_TRADE,
|
||||
translation_key=HabitipyButtonEntity.TOOLS_OF_TRADE,
|
||||
key=HabiticaButtonEntity.TOOLS_OF_TRADE,
|
||||
translation_key=HabiticaButtonEntity.TOOLS_OF_TRADE,
|
||||
press_fn=(
|
||||
lambda coordinator: coordinator.habitica.cast_skill(
|
||||
Skill.TOOLS_OF_THE_TRADE
|
||||
@@ -196,8 +196,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
|
||||
entity_picture="shop_toolsOfTrade.png",
|
||||
),
|
||||
HabiticaButtonEntityDescription(
|
||||
key=HabitipyButtonEntity.STEALTH,
|
||||
translation_key=HabitipyButtonEntity.STEALTH,
|
||||
key=HabiticaButtonEntity.STEALTH,
|
||||
translation_key=HabiticaButtonEntity.STEALTH,
|
||||
press_fn=lambda coordinator: coordinator.habitica.cast_skill(Skill.STEALTH),
|
||||
# Stealth buffs stack and it can only be cast if the amount of
|
||||
# buffs is smaller than the amount of unfinished dailies
|
||||
@@ -219,8 +219,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
|
||||
entity_picture="shop_stealth.png",
|
||||
),
|
||||
HabiticaButtonEntityDescription(
|
||||
key=HabitipyButtonEntity.HEAL,
|
||||
translation_key=HabitipyButtonEntity.HEAL,
|
||||
key=HabiticaButtonEntity.HEAL,
|
||||
translation_key=HabiticaButtonEntity.HEAL,
|
||||
press_fn=(
|
||||
lambda coordinator: coordinator.habitica.cast_skill(Skill.HEALING_LIGHT)
|
||||
),
|
||||
@@ -233,8 +233,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
|
||||
entity_picture="shop_heal.png",
|
||||
),
|
||||
HabiticaButtonEntityDescription(
|
||||
key=HabitipyButtonEntity.BRIGHTNESS,
|
||||
translation_key=HabitipyButtonEntity.BRIGHTNESS,
|
||||
key=HabiticaButtonEntity.BRIGHTNESS,
|
||||
translation_key=HabiticaButtonEntity.BRIGHTNESS,
|
||||
press_fn=(
|
||||
lambda coordinator: coordinator.habitica.cast_skill(
|
||||
Skill.SEARING_BRIGHTNESS
|
||||
@@ -248,8 +248,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
|
||||
entity_picture="shop_brightness.png",
|
||||
),
|
||||
HabiticaButtonEntityDescription(
|
||||
key=HabitipyButtonEntity.PROTECT_AURA,
|
||||
translation_key=HabitipyButtonEntity.PROTECT_AURA,
|
||||
key=HabiticaButtonEntity.PROTECT_AURA,
|
||||
translation_key=HabiticaButtonEntity.PROTECT_AURA,
|
||||
press_fn=(
|
||||
lambda coordinator: coordinator.habitica.cast_skill(Skill.PROTECTIVE_AURA)
|
||||
),
|
||||
@@ -261,8 +261,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
|
||||
entity_picture="shop_protectAura.png",
|
||||
),
|
||||
HabiticaButtonEntityDescription(
|
||||
key=HabitipyButtonEntity.HEAL_ALL,
|
||||
translation_key=HabitipyButtonEntity.HEAL_ALL,
|
||||
key=HabiticaButtonEntity.HEAL_ALL,
|
||||
translation_key=HabiticaButtonEntity.HEAL_ALL,
|
||||
press_fn=lambda coordinator: coordinator.habitica.cast_skill(Skill.BLESSING),
|
||||
available_fn=(
|
||||
lambda data: (data.user.stats.lvl or 0) >= 14
|
||||
|
||||
@@ -31,6 +31,11 @@ ATTR_TASK = "task"
|
||||
ATTR_DIRECTION = "direction"
|
||||
ATTR_TARGET = "target"
|
||||
ATTR_ITEM = "item"
|
||||
ATTR_TYPE = "type"
|
||||
ATTR_PRIORITY = "priority"
|
||||
ATTR_TAG = "tag"
|
||||
ATTR_KEYWORD = "keyword"
|
||||
|
||||
SERVICE_CAST_SKILL = "cast_skill"
|
||||
SERVICE_START_QUEST = "start_quest"
|
||||
SERVICE_ACCEPT_QUEST = "accept_quest"
|
||||
@@ -38,6 +43,8 @@ SERVICE_CANCEL_QUEST = "cancel_quest"
|
||||
SERVICE_ABORT_QUEST = "abort_quest"
|
||||
SERVICE_REJECT_QUEST = "reject_quest"
|
||||
SERVICE_LEAVE_QUEST = "leave_quest"
|
||||
SERVICE_GET_TASKS = "get_tasks"
|
||||
|
||||
SERVICE_SCORE_HABIT = "score_habit"
|
||||
SERVICE_SCORE_REWARD = "score_reward"
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from io import BytesIO
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -18,6 +19,7 @@ from habiticalib import (
|
||||
TaskFilter,
|
||||
TooManyRequestsError,
|
||||
UserData,
|
||||
UserStyles,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -130,3 +132,13 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
|
||||
) from e
|
||||
else:
|
||||
await self.async_request_refresh()
|
||||
|
||||
async def generate_avatar(self, user_styles: UserStyles) -> bytes:
|
||||
"""Generate Avatar."""
|
||||
|
||||
avatar = BytesIO()
|
||||
await self.habitica.generate_avatar(
|
||||
fp=avatar, user_styles=user_styles, fmt="PNG"
|
||||
)
|
||||
|
||||
return avatar.getvalue()
|
||||
|
||||
@@ -121,12 +121,6 @@
|
||||
"rogue": "mdi:ninja"
|
||||
}
|
||||
},
|
||||
"todos": {
|
||||
"default": "mdi:checkbox-outline"
|
||||
},
|
||||
"dailys": {
|
||||
"default": "mdi:calendar-month"
|
||||
},
|
||||
"habits": {
|
||||
"default": "mdi:contrast-box"
|
||||
},
|
||||
@@ -196,6 +190,12 @@
|
||||
},
|
||||
"transformation": {
|
||||
"service": "mdi:flask-round-bottom"
|
||||
},
|
||||
"get_tasks": {
|
||||
"service": "mdi:calendar-export",
|
||||
"sections": {
|
||||
"filter": "mdi:calendar-filter"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
76
homeassistant/components/habitica/image.py
Normal file
76
homeassistant/components/habitica/image.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""Image platform for Habitica integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from enum import StrEnum
|
||||
|
||||
from habiticalib import UserStyles
|
||||
|
||||
from homeassistant.components.image import ImageEntity, ImageEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import HabiticaConfigEntry
|
||||
from .coordinator import HabiticaDataUpdateCoordinator
|
||||
from .entity import HabiticaBase
|
||||
|
||||
|
||||
class HabiticaImageEntity(StrEnum):
|
||||
"""Image entities."""
|
||||
|
||||
AVATAR = "avatar"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: HabiticaConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the habitica image platform."""
|
||||
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
async_add_entities([HabiticaImage(hass, coordinator)])
|
||||
|
||||
|
||||
class HabiticaImage(HabiticaBase, ImageEntity):
|
||||
"""A Habitica image entity."""
|
||||
|
||||
entity_description = ImageEntityDescription(
|
||||
key=HabiticaImageEntity.AVATAR,
|
||||
translation_key=HabiticaImageEntity.AVATAR,
|
||||
)
|
||||
_attr_content_type = "image/png"
|
||||
_current_appearance: UserStyles | None = None
|
||||
_cache: bytes | None = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
coordinator: HabiticaDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize the image entity."""
|
||||
super().__init__(coordinator, self.entity_description)
|
||||
ImageEntity.__init__(self, hass)
|
||||
self._attr_image_last_updated = dt_util.utcnow()
|
||||
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Check if equipped gear and other things have changed since last avatar image generation."""
|
||||
new_appearance = UserStyles.from_dict(asdict(self.coordinator.data.user))
|
||||
|
||||
if self._current_appearance != new_appearance:
|
||||
self._current_appearance = new_appearance
|
||||
self._attr_image_last_updated = dt_util.utcnow()
|
||||
self._cache = None
|
||||
|
||||
return super()._handle_coordinator_update()
|
||||
|
||||
async def async_image(self) -> bytes | None:
|
||||
"""Return cached bytes, otherwise generate new avatar."""
|
||||
if not self._cache and self._current_appearance:
|
||||
self._cache = await self.coordinator.generate_avatar(
|
||||
self._current_appearance
|
||||
)
|
||||
return self._cache
|
||||
@@ -6,7 +6,7 @@ from collections.abc import Callable, Mapping
|
||||
from dataclasses import asdict, dataclass
|
||||
from enum import StrEnum
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import Any
|
||||
|
||||
from habiticalib import (
|
||||
ContentData,
|
||||
@@ -18,32 +18,25 @@ from habiticalib import (
|
||||
)
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
DOMAIN as SENSOR_DOMAIN,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
IssueSeverity,
|
||||
async_create_issue,
|
||||
async_delete_issue,
|
||||
)
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .const import ASSETS_URL, DOMAIN
|
||||
from .const import ASSETS_URL
|
||||
from .entity import HabiticaBase
|
||||
from .types import HabiticaConfigEntry
|
||||
from .util import entity_used_in, get_attribute_points, get_attributes_total
|
||||
from .util import get_attribute_points, get_attributes_total
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class HabitipySensorEntityDescription(SensorEntityDescription):
|
||||
"""Habitipy Sensor Description."""
|
||||
class HabiticaSensorEntityDescription(SensorEntityDescription):
|
||||
"""Habitica Sensor Description."""
|
||||
|
||||
value_fn: Callable[[UserData, ContentData], StateType]
|
||||
attributes_fn: Callable[[UserData, ContentData], dict[str, Any] | None] | None = (
|
||||
@@ -53,14 +46,14 @@ class HabitipySensorEntityDescription(SensorEntityDescription):
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class HabitipyTaskSensorEntityDescription(SensorEntityDescription):
|
||||
"""Habitipy Task Sensor Description."""
|
||||
class HabiticaTaskSensorEntityDescription(SensorEntityDescription):
|
||||
"""Habitica Task Sensor Description."""
|
||||
|
||||
value_fn: Callable[[list[TaskData]], list[TaskData]]
|
||||
|
||||
|
||||
class HabitipySensorEntity(StrEnum):
|
||||
"""Habitipy Entities."""
|
||||
class HabiticaSensorEntity(StrEnum):
|
||||
"""Habitica Entities."""
|
||||
|
||||
DISPLAY_NAME = "display_name"
|
||||
HEALTH = "health"
|
||||
@@ -73,8 +66,6 @@ class HabitipySensorEntity(StrEnum):
|
||||
GOLD = "gold"
|
||||
CLASS = "class"
|
||||
HABITS = "habits"
|
||||
DAILIES = "dailys"
|
||||
TODOS = "todos"
|
||||
REWARDS = "rewards"
|
||||
GEMS = "gems"
|
||||
TRINKETS = "trinkets"
|
||||
@@ -84,105 +75,105 @@ class HabitipySensorEntity(StrEnum):
|
||||
PERCEPTION = "perception"
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = (
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.DISPLAY_NAME,
|
||||
translation_key=HabitipySensorEntity.DISPLAY_NAME,
|
||||
SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.DISPLAY_NAME,
|
||||
translation_key=HabiticaSensorEntity.DISPLAY_NAME,
|
||||
value_fn=lambda user, _: user.profile.name,
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.HEALTH,
|
||||
translation_key=HabitipySensorEntity.HEALTH,
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.HEALTH,
|
||||
translation_key=HabiticaSensorEntity.HEALTH,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda user, _: user.stats.hp,
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.HEALTH_MAX,
|
||||
translation_key=HabitipySensorEntity.HEALTH_MAX,
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.HEALTH_MAX,
|
||||
translation_key=HabiticaSensorEntity.HEALTH_MAX,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda user, _: 50,
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.MANA,
|
||||
translation_key=HabitipySensorEntity.MANA,
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.MANA,
|
||||
translation_key=HabiticaSensorEntity.MANA,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda user, _: user.stats.mp,
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.MANA_MAX,
|
||||
translation_key=HabitipySensorEntity.MANA_MAX,
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.MANA_MAX,
|
||||
translation_key=HabiticaSensorEntity.MANA_MAX,
|
||||
value_fn=lambda user, _: user.stats.maxMP,
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.EXPERIENCE,
|
||||
translation_key=HabitipySensorEntity.EXPERIENCE,
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.EXPERIENCE,
|
||||
translation_key=HabiticaSensorEntity.EXPERIENCE,
|
||||
value_fn=lambda user, _: user.stats.exp,
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.EXPERIENCE_MAX,
|
||||
translation_key=HabitipySensorEntity.EXPERIENCE_MAX,
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.EXPERIENCE_MAX,
|
||||
translation_key=HabiticaSensorEntity.EXPERIENCE_MAX,
|
||||
value_fn=lambda user, _: user.stats.toNextLevel,
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.LEVEL,
|
||||
translation_key=HabitipySensorEntity.LEVEL,
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.LEVEL,
|
||||
translation_key=HabiticaSensorEntity.LEVEL,
|
||||
value_fn=lambda user, _: user.stats.lvl,
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.GOLD,
|
||||
translation_key=HabitipySensorEntity.GOLD,
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.GOLD,
|
||||
translation_key=HabiticaSensorEntity.GOLD,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda user, _: user.stats.gp,
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.CLASS,
|
||||
translation_key=HabitipySensorEntity.CLASS,
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.CLASS,
|
||||
translation_key=HabiticaSensorEntity.CLASS,
|
||||
value_fn=lambda user, _: user.stats.Class.value if user.stats.Class else None,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[item.value for item in HabiticaClass],
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.GEMS,
|
||||
translation_key=HabitipySensorEntity.GEMS,
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.GEMS,
|
||||
translation_key=HabiticaSensorEntity.GEMS,
|
||||
value_fn=lambda user, _: round(user.balance * 4) if user.balance else None,
|
||||
suggested_display_precision=0,
|
||||
entity_picture="shop_gem.png",
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.TRINKETS,
|
||||
translation_key=HabitipySensorEntity.TRINKETS,
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.TRINKETS,
|
||||
translation_key=HabiticaSensorEntity.TRINKETS,
|
||||
value_fn=lambda user, _: user.purchased.plan.consecutive.trinkets or 0,
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="⧖",
|
||||
entity_picture="notif_subscriber_reward.png",
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.STRENGTH,
|
||||
translation_key=HabitipySensorEntity.STRENGTH,
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.STRENGTH,
|
||||
translation_key=HabiticaSensorEntity.STRENGTH,
|
||||
value_fn=lambda user, content: get_attributes_total(user, content, "Str"),
|
||||
attributes_fn=lambda user, content: get_attribute_points(user, content, "Str"),
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="STR",
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.INTELLIGENCE,
|
||||
translation_key=HabitipySensorEntity.INTELLIGENCE,
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.INTELLIGENCE,
|
||||
translation_key=HabiticaSensorEntity.INTELLIGENCE,
|
||||
value_fn=lambda user, content: get_attributes_total(user, content, "Int"),
|
||||
attributes_fn=lambda user, content: get_attribute_points(user, content, "Int"),
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="INT",
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.PERCEPTION,
|
||||
translation_key=HabitipySensorEntity.PERCEPTION,
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.PERCEPTION,
|
||||
translation_key=HabiticaSensorEntity.PERCEPTION,
|
||||
value_fn=lambda user, content: get_attributes_total(user, content, "per"),
|
||||
attributes_fn=lambda user, content: get_attribute_points(user, content, "per"),
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="PER",
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.CONSTITUTION,
|
||||
translation_key=HabitipySensorEntity.CONSTITUTION,
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.CONSTITUTION,
|
||||
translation_key=HabiticaSensorEntity.CONSTITUTION,
|
||||
value_fn=lambda user, content: get_attributes_total(user, content, "con"),
|
||||
attributes_fn=lambda user, content: get_attribute_points(user, content, "con"),
|
||||
suggested_display_precision=0,
|
||||
@@ -221,31 +212,15 @@ TASKS_MAP = {
|
||||
}
|
||||
|
||||
|
||||
TASK_SENSOR_DESCRIPTION: tuple[HabitipyTaskSensorEntityDescription, ...] = (
|
||||
HabitipyTaskSensorEntityDescription(
|
||||
key=HabitipySensorEntity.HABITS,
|
||||
translation_key=HabitipySensorEntity.HABITS,
|
||||
TASK_SENSOR_DESCRIPTION: tuple[HabiticaTaskSensorEntityDescription, ...] = (
|
||||
HabiticaTaskSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.HABITS,
|
||||
translation_key=HabiticaSensorEntity.HABITS,
|
||||
value_fn=lambda tasks: [r for r in tasks if r.Type is TaskType.HABIT],
|
||||
),
|
||||
HabitipyTaskSensorEntityDescription(
|
||||
key=HabitipySensorEntity.DAILIES,
|
||||
translation_key=HabitipySensorEntity.DAILIES,
|
||||
value_fn=lambda tasks: [r for r in tasks if r.Type is TaskType.DAILY],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
HabitipyTaskSensorEntityDescription(
|
||||
key=HabitipySensorEntity.TODOS,
|
||||
translation_key=HabitipySensorEntity.TODOS,
|
||||
value_fn=(
|
||||
lambda tasks: [
|
||||
r for r in tasks if r.Type is TaskType.TODO and not r.completed
|
||||
]
|
||||
),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
HabitipyTaskSensorEntityDescription(
|
||||
key=HabitipySensorEntity.REWARDS,
|
||||
translation_key=HabitipySensorEntity.REWARDS,
|
||||
HabiticaTaskSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.REWARDS,
|
||||
translation_key=HabiticaSensorEntity.REWARDS,
|
||||
value_fn=lambda tasks: [r for r in tasks if r.Type is TaskType.REWARD],
|
||||
),
|
||||
)
|
||||
@@ -261,19 +236,19 @@ async def async_setup_entry(
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
entities: list[SensorEntity] = [
|
||||
HabitipySensor(coordinator, description) for description in SENSOR_DESCRIPTIONS
|
||||
HabiticaSensor(coordinator, description) for description in SENSOR_DESCRIPTIONS
|
||||
]
|
||||
entities.extend(
|
||||
HabitipyTaskSensor(coordinator, description)
|
||||
HabiticaTaskSensor(coordinator, description)
|
||||
for description in TASK_SENSOR_DESCRIPTION
|
||||
)
|
||||
async_add_entities(entities, True)
|
||||
|
||||
|
||||
class HabitipySensor(HabiticaBase, SensorEntity):
|
||||
class HabiticaSensor(HabiticaBase, SensorEntity):
|
||||
"""A generic Habitica sensor."""
|
||||
|
||||
entity_description: HabitipySensorEntityDescription
|
||||
entity_description: HabiticaSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
@@ -298,10 +273,10 @@ class HabitipySensor(HabiticaBase, SensorEntity):
|
||||
return None
|
||||
|
||||
|
||||
class HabitipyTaskSensor(HabiticaBase, SensorEntity):
|
||||
class HabiticaTaskSensor(HabiticaBase, SensorEntity):
|
||||
"""A Habitica task sensor."""
|
||||
|
||||
entity_description: HabitipyTaskSensorEntityDescription
|
||||
entity_description: HabiticaTaskSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
@@ -324,37 +299,3 @@ class HabitipyTaskSensor(HabiticaBase, SensorEntity):
|
||||
task[map_key] = value
|
||||
attrs[str(task_id)] = task
|
||||
return attrs
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Raise issue when entity is registered and was not disabled."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.unique_id
|
||||
if entity_id := er.async_get(self.hass).async_get_entity_id(
|
||||
SENSOR_DOMAIN, DOMAIN, self.unique_id
|
||||
):
|
||||
if (
|
||||
self.enabled
|
||||
and self.entity_description.key
|
||||
in (HabitipySensorEntity.TODOS, HabitipySensorEntity.DAILIES)
|
||||
and entity_used_in(self.hass, entity_id)
|
||||
):
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_task_entity_{self.entity_description.key}",
|
||||
breaks_in_ha_version="2025.2.0",
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_task_entity",
|
||||
translation_placeholders={
|
||||
"task_name": str(self.name),
|
||||
"entity": entity_id,
|
||||
},
|
||||
)
|
||||
else:
|
||||
async_delete_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_task_entity_{self.entity_description.key}",
|
||||
)
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from aiohttp import ClientError
|
||||
from habiticalib import (
|
||||
@@ -13,6 +13,9 @@ from habiticalib import (
|
||||
NotAuthorizedError,
|
||||
NotFoundError,
|
||||
Skill,
|
||||
TaskData,
|
||||
TaskPriority,
|
||||
TaskType,
|
||||
TooManyRequestsError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
@@ -36,10 +39,14 @@ from .const import (
|
||||
ATTR_DATA,
|
||||
ATTR_DIRECTION,
|
||||
ATTR_ITEM,
|
||||
ATTR_KEYWORD,
|
||||
ATTR_PATH,
|
||||
ATTR_PRIORITY,
|
||||
ATTR_SKILL,
|
||||
ATTR_TAG,
|
||||
ATTR_TARGET,
|
||||
ATTR_TASK,
|
||||
ATTR_TYPE,
|
||||
DOMAIN,
|
||||
EVENT_API_CALL_SUCCESS,
|
||||
SERVICE_ABORT_QUEST,
|
||||
@@ -47,6 +54,7 @@ from .const import (
|
||||
SERVICE_API_CALL,
|
||||
SERVICE_CANCEL_QUEST,
|
||||
SERVICE_CAST_SKILL,
|
||||
SERVICE_GET_TASKS,
|
||||
SERVICE_LEAVE_QUEST,
|
||||
SERVICE_REJECT_QUEST,
|
||||
SERVICE_SCORE_HABIT,
|
||||
@@ -96,6 +104,21 @@ SERVICE_TRANSFORMATION_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_GET_TASKS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Optional(ATTR_TYPE): vol.All(
|
||||
cv.ensure_list, [vol.All(vol.Upper, vol.In({x.name for x in TaskType}))]
|
||||
),
|
||||
vol.Optional(ATTR_PRIORITY): vol.All(
|
||||
cv.ensure_list, [vol.All(vol.Upper, vol.In({x.name for x in TaskPriority}))]
|
||||
),
|
||||
vol.Optional(ATTR_TASK): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_TAG): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_KEYWORD): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
SKILL_MAP = {
|
||||
"pickpocket": Skill.PICKPOCKET,
|
||||
"backstab": Skill.BACKSTAB,
|
||||
@@ -403,6 +426,52 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
else:
|
||||
return asdict(response.data)
|
||||
|
||||
async def get_tasks(call: ServiceCall) -> ServiceResponse:
|
||||
"""Get tasks action."""
|
||||
|
||||
entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY])
|
||||
coordinator = entry.runtime_data
|
||||
response: list[TaskData] = coordinator.data.tasks
|
||||
|
||||
if types := {TaskType[x] for x in call.data.get(ATTR_TYPE, [])}:
|
||||
response = [task for task in response if task.Type in types]
|
||||
|
||||
if priority := {TaskPriority[x] for x in call.data.get(ATTR_PRIORITY, [])}:
|
||||
response = [task for task in response if task.priority in priority]
|
||||
|
||||
if tasks := call.data.get(ATTR_TASK):
|
||||
response = [
|
||||
task
|
||||
for task in response
|
||||
if str(task.id) in tasks or task.alias in tasks or task.text in tasks
|
||||
]
|
||||
|
||||
if tags := call.data.get(ATTR_TAG):
|
||||
tag_ids = {
|
||||
tag.id
|
||||
for tag in coordinator.data.user.tags
|
||||
if (tag.name and tag.name.lower())
|
||||
in (tag.lower() for tag in tags) # Case-insensitive matching
|
||||
and tag.id
|
||||
}
|
||||
|
||||
response = [
|
||||
task
|
||||
for task in response
|
||||
if any(tag_id in task.tags for tag_id in tag_ids if task.tags)
|
||||
]
|
||||
if keyword := call.data.get(ATTR_KEYWORD):
|
||||
keyword = keyword.lower()
|
||||
response = [
|
||||
task
|
||||
for task in response
|
||||
if (task.text and keyword in task.text.lower())
|
||||
or (task.notes and keyword in task.notes.lower())
|
||||
or any(keyword in item.text.lower() for item in task.checklist)
|
||||
]
|
||||
result: dict[str, Any] = {"tasks": response}
|
||||
return result
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_API_CALL,
|
||||
@@ -440,3 +509,10 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
schema=SERVICE_TRANSFORMATION_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GET_TASKS,
|
||||
get_tasks,
|
||||
schema=SERVICE_GET_TASKS_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
@@ -94,3 +94,49 @@ transformation:
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
get_tasks:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
filter:
|
||||
collapsed: true
|
||||
fields:
|
||||
type:
|
||||
required: false
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- "habit"
|
||||
- "daily"
|
||||
- "todo"
|
||||
- "reward"
|
||||
mode: dropdown
|
||||
translation_key: "type"
|
||||
multiple: true
|
||||
sort: true
|
||||
priority:
|
||||
required: false
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- "trivial"
|
||||
- "easy"
|
||||
- "medium"
|
||||
- "hard"
|
||||
mode: dropdown
|
||||
translation_key: "priority"
|
||||
multiple: true
|
||||
sort: false
|
||||
task:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
multiple: true
|
||||
tag:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
multiple: true
|
||||
keyword:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
"todos": "To-Do's",
|
||||
"dailies": "Dailies",
|
||||
"config_entry_name": "Select character",
|
||||
"task_name": "Task name",
|
||||
"unit_tasks": "tasks",
|
||||
"unit_health_points": "HP",
|
||||
"unit_mana_points": "MP",
|
||||
@@ -166,6 +167,11 @@
|
||||
"name": "Daily reminders"
|
||||
}
|
||||
},
|
||||
"image": {
|
||||
"avatar": {
|
||||
"name": "Avatar"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"display_name": {
|
||||
"name": "Display name"
|
||||
@@ -217,14 +223,6 @@
|
||||
"rogue": "Rogue"
|
||||
}
|
||||
},
|
||||
"todos": {
|
||||
"name": "[%key:component::habitica::common::todos%]",
|
||||
"unit_of_measurement": "[%key:component::habitica::common::unit_tasks%]"
|
||||
},
|
||||
"dailys": {
|
||||
"name": "[%key:component::habitica::common::dailies%]",
|
||||
"unit_of_measurement": "[%key:component::habitica::common::unit_tasks%]"
|
||||
},
|
||||
"habits": {
|
||||
"name": "Habits",
|
||||
"unit_of_measurement": "[%key:component::habitica::common::unit_tasks%]"
|
||||
@@ -403,10 +401,6 @@
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_task_entity": {
|
||||
"title": "The Habitica {task_name} sensor is deprecated",
|
||||
"description": "The Habitica entity `{entity}` is deprecated and will be removed in a future release.\nPlease update your automations and scripts to replace the sensor entity with the newly added todo entity.\nWhen you are done migrating you can disable `{entity}`."
|
||||
},
|
||||
"deprecated_api_call": {
|
||||
"title": "The Habitica action habitica.api_call is deprecated",
|
||||
"description": "The Habitica action `habitica.api_call` is deprecated and will be removed in Home Assistant 2025.5.0.\n\nPlease update your automations and scripts to use other Habitica actions and entities."
|
||||
@@ -433,7 +427,7 @@
|
||||
},
|
||||
"cast_skill": {
|
||||
"name": "Cast a skill",
|
||||
"description": "Use a skill or spell from your Habitica character on a specific task to affect its progress or status.",
|
||||
"description": "Uses a skill or spell from your Habitica character on a specific task to affect its progress or status.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
@@ -444,14 +438,14 @@
|
||||
"description": "Select the skill or spell you want to cast on the task. Only skills corresponding to your character's class can be used."
|
||||
},
|
||||
"task": {
|
||||
"name": "Task name",
|
||||
"name": "[%key:component::habitica::common::task_name%]",
|
||||
"description": "The name (or task ID) of the task you want to target with the skill or spell."
|
||||
}
|
||||
}
|
||||
},
|
||||
"accept_quest": {
|
||||
"name": "Accept a quest invitation",
|
||||
"description": "Accept a pending invitation to a quest.",
|
||||
"description": "Accepts a pending invitation to a quest.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
@@ -461,7 +455,7 @@
|
||||
},
|
||||
"reject_quest": {
|
||||
"name": "Reject a quest invitation",
|
||||
"description": "Reject a pending invitation to a quest.",
|
||||
"description": "Rejects a pending invitation to a quest.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
@@ -471,7 +465,7 @@
|
||||
},
|
||||
"leave_quest": {
|
||||
"name": "Leave a quest",
|
||||
"description": "Leave the current quest you are participating in.",
|
||||
"description": "Leaves the current quest you are participating in.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
@@ -481,7 +475,7 @@
|
||||
},
|
||||
"abort_quest": {
|
||||
"name": "Abort an active quest",
|
||||
"description": "Terminate your party's ongoing quest. All progress will be lost and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.",
|
||||
"description": "Terminates your party's ongoing quest. All progress will be lost and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
@@ -491,7 +485,7 @@
|
||||
},
|
||||
"cancel_quest": {
|
||||
"name": "Cancel a pending quest",
|
||||
"description": "Cancel a quest that has not yet startet. All accepted and pending invitations will be canceled and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.",
|
||||
"description": "Cancels a quest that has not yet startet. All accepted and pending invitations will be canceled and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
@@ -501,7 +495,7 @@
|
||||
},
|
||||
"start_quest": {
|
||||
"name": "Force-start a pending quest",
|
||||
"description": "Begin the quest immediately, bypassing any pending invitations that haven't been accepted or rejected. Only quest leader or group leader can perform this action.",
|
||||
"description": "Begins the quest immediately, bypassing any pending invitations that haven't been accepted or rejected. Only quest leader or group leader can perform this action.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
@@ -511,7 +505,7 @@
|
||||
},
|
||||
"score_habit": {
|
||||
"name": "Track a habit",
|
||||
"description": "Increase the positive or negative streak of a habit to track its progress.",
|
||||
"description": "Increases the positive or negative streak of a habit to track its progress.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
@@ -529,7 +523,7 @@
|
||||
},
|
||||
"score_reward": {
|
||||
"name": "Buy a reward",
|
||||
"description": "Reward yourself and buy one of your custom rewards with gold earned by fulfilling tasks.",
|
||||
"description": "Buys one of your custom rewards with gold earned by fulfilling tasks.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
@@ -543,7 +537,7 @@
|
||||
},
|
||||
"transformation": {
|
||||
"name": "Use a transformation item",
|
||||
"description": "Use a transformation item from your Habitica character's inventory on a member of your party or yourself.",
|
||||
"description": "Uses a transformation item from your Habitica character's inventory on a member of your party or yourself.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "Select character",
|
||||
@@ -558,6 +552,42 @@
|
||||
"description": "The name of the character you want to use the transformation item on. You can also specify the players username or user ID."
|
||||
}
|
||||
}
|
||||
},
|
||||
"get_tasks": {
|
||||
"name": "Get tasks",
|
||||
"description": "Retrieves tasks from your Habitica character.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
"description": "Choose the Habitica character to retrieve tasks from."
|
||||
},
|
||||
"type": {
|
||||
"name": "Task type",
|
||||
"description": "Filter tasks by type."
|
||||
},
|
||||
"priority": {
|
||||
"name": "Difficulty",
|
||||
"description": "Filter tasks by difficulty."
|
||||
},
|
||||
"task": {
|
||||
"name": "[%key:component::habitica::common::task_name%]",
|
||||
"description": "Select tasks by matching their name (or task ID)."
|
||||
},
|
||||
"tag": {
|
||||
"name": "Tag",
|
||||
"description": "Filter tasks that have one or more of the selected tags."
|
||||
},
|
||||
"keyword": {
|
||||
"name": "Keyword",
|
||||
"description": "Filter tasks by keyword, searching across titles, notes, and checklists."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"filter": {
|
||||
"name": "Filter options",
|
||||
"description": "Use the optional filters to narrow the returned tasks."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
@@ -576,6 +606,22 @@
|
||||
"seafoam": "Seafoam",
|
||||
"shiny_seed": "Shiny seed"
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"options": {
|
||||
"daily": "Daily",
|
||||
"habit": "Habit",
|
||||
"todo": "To-do",
|
||||
"reward": "Reward"
|
||||
}
|
||||
},
|
||||
"priority": {
|
||||
"options": {
|
||||
"trivial": "Trivial",
|
||||
"easy": "Easy",
|
||||
"medium": "Medium",
|
||||
"hard": "Hard"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ from typing import Any, cast
|
||||
|
||||
from aiohasupervisor.exceptions import (
|
||||
SupervisorBadRequestError,
|
||||
SupervisorError,
|
||||
SupervisorNotFoundError,
|
||||
)
|
||||
from aiohasupervisor.models import (
|
||||
@@ -23,8 +24,10 @@ from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupReaderWriter,
|
||||
BackupReaderWriterError,
|
||||
CreateBackupEvent,
|
||||
Folder,
|
||||
IncorrectPasswordError,
|
||||
NewBackup,
|
||||
WrittenBackup,
|
||||
)
|
||||
@@ -213,6 +216,10 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
password: str | None,
|
||||
) -> tuple[NewBackup, asyncio.Task[WrittenBackup]]:
|
||||
"""Create a backup."""
|
||||
if not include_homeassistant and include_database:
|
||||
raise HomeAssistantError(
|
||||
"Cannot create a backup with database but without Home Assistant"
|
||||
)
|
||||
manager = self._hass.data[DATA_MANAGER]
|
||||
|
||||
include_addons_set: supervisor_backups.AddonSet | set[str] | None = None
|
||||
@@ -233,20 +240,23 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
]
|
||||
locations = [agent.location for agent in hassio_agents]
|
||||
|
||||
backup = await self._client.backups.partial_backup(
|
||||
supervisor_backups.PartialBackupOptions(
|
||||
addons=include_addons_set,
|
||||
folders=include_folders_set,
|
||||
homeassistant=include_homeassistant,
|
||||
name=backup_name,
|
||||
password=password,
|
||||
compressed=True,
|
||||
location=locations or LOCATION_CLOUD_BACKUP,
|
||||
homeassistant_exclude_database=not include_database,
|
||||
background=True,
|
||||
extra=extra_metadata,
|
||||
try:
|
||||
backup = await self._client.backups.partial_backup(
|
||||
supervisor_backups.PartialBackupOptions(
|
||||
addons=include_addons_set,
|
||||
folders=include_folders_set,
|
||||
homeassistant=include_homeassistant,
|
||||
name=backup_name,
|
||||
password=password,
|
||||
compressed=True,
|
||||
location=locations or LOCATION_CLOUD_BACKUP,
|
||||
homeassistant_exclude_database=not include_database,
|
||||
background=True,
|
||||
extra=extra_metadata,
|
||||
)
|
||||
)
|
||||
)
|
||||
except SupervisorError as err:
|
||||
raise BackupReaderWriterError(f"Error creating backup: {err}") from err
|
||||
backup_task = self._hass.async_create_task(
|
||||
self._async_wait_for_backup(
|
||||
backup, remove_after_upload=not bool(locations)
|
||||
@@ -278,22 +288,35 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
finally:
|
||||
unsub()
|
||||
if not backup_id:
|
||||
raise HomeAssistantError("Backup failed")
|
||||
raise BackupReaderWriterError("Backup failed")
|
||||
|
||||
async def open_backup() -> AsyncIterator[bytes]:
|
||||
return await self._client.backups.download_backup(backup_id)
|
||||
try:
|
||||
return await self._client.backups.download_backup(backup_id)
|
||||
except SupervisorError as err:
|
||||
raise BackupReaderWriterError(
|
||||
f"Error downloading backup: {err}"
|
||||
) from err
|
||||
|
||||
async def remove_backup() -> None:
|
||||
if not remove_after_upload:
|
||||
return
|
||||
await self._client.backups.remove_backup(
|
||||
backup_id,
|
||||
options=supervisor_backups.RemoveBackupOptions(
|
||||
location={LOCATION_CLOUD_BACKUP}
|
||||
),
|
||||
)
|
||||
try:
|
||||
await self._client.backups.remove_backup(
|
||||
backup_id,
|
||||
options=supervisor_backups.RemoveBackupOptions(
|
||||
location={LOCATION_CLOUD_BACKUP}
|
||||
),
|
||||
)
|
||||
except SupervisorError as err:
|
||||
raise BackupReaderWriterError(f"Error removing backup: {err}") from err
|
||||
|
||||
details = await self._client.backups.backup_info(backup_id)
|
||||
try:
|
||||
details = await self._client.backups.backup_info(backup_id)
|
||||
except SupervisorError as err:
|
||||
raise BackupReaderWriterError(
|
||||
f"Error getting backup details: {err}"
|
||||
) from err
|
||||
|
||||
return WrittenBackup(
|
||||
backup=_backup_details_to_agent_backup(details),
|
||||
@@ -359,8 +382,16 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
restore_homeassistant: bool,
|
||||
) -> None:
|
||||
"""Restore a backup."""
|
||||
if restore_homeassistant and not restore_database:
|
||||
raise HomeAssistantError("Cannot restore Home Assistant without database")
|
||||
manager = self._hass.data[DATA_MANAGER]
|
||||
# The backup manager has already checked that the backup exists so we don't need to
|
||||
# check that here.
|
||||
backup = await manager.backup_agents[agent_id].async_get_backup(backup_id)
|
||||
if (
|
||||
backup
|
||||
and restore_homeassistant
|
||||
and restore_database != backup.database_included
|
||||
):
|
||||
raise HomeAssistantError("Restore database must match backup")
|
||||
if not restore_homeassistant and restore_database:
|
||||
raise HomeAssistantError("Cannot restore database without Home Assistant")
|
||||
restore_addons_set = set(restore_addons) if restore_addons else None
|
||||
@@ -370,7 +401,6 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
else None
|
||||
)
|
||||
|
||||
manager = self._hass.data[DATA_MANAGER]
|
||||
restore_location: str | None
|
||||
if manager.backup_agents[agent_id].domain != DOMAIN:
|
||||
# Download the backup to the supervisor. Supervisor will clean up the backup
|
||||
@@ -385,17 +415,24 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
agent = cast(SupervisorBackupAgent, manager.backup_agents[agent_id])
|
||||
restore_location = agent.location
|
||||
|
||||
job = await self._client.backups.partial_restore(
|
||||
backup_id,
|
||||
supervisor_backups.PartialRestoreOptions(
|
||||
addons=restore_addons_set,
|
||||
folders=restore_folders_set,
|
||||
homeassistant=restore_homeassistant,
|
||||
password=password,
|
||||
background=True,
|
||||
location=restore_location,
|
||||
),
|
||||
)
|
||||
try:
|
||||
job = await self._client.backups.partial_restore(
|
||||
backup_id,
|
||||
supervisor_backups.PartialRestoreOptions(
|
||||
addons=restore_addons_set,
|
||||
folders=restore_folders_set,
|
||||
homeassistant=restore_homeassistant,
|
||||
password=password,
|
||||
background=True,
|
||||
location=restore_location,
|
||||
),
|
||||
)
|
||||
except SupervisorBadRequestError as err:
|
||||
# Supervisor currently does not transmit machine parsable error types
|
||||
message = err.args[0]
|
||||
if message.startswith("Invalid password for backup"):
|
||||
raise IncorrectPasswordError(message) from err
|
||||
raise HomeAssistantError(message) from err
|
||||
|
||||
restore_complete = asyncio.Event()
|
||||
|
||||
|
||||
@@ -362,7 +362,7 @@
|
||||
},
|
||||
"addons": {
|
||||
"name": "Add-ons",
|
||||
"description": "List of add-ons to include in the backup. Use the name slug of the add-on."
|
||||
"description": "List of add-ons to include in the backup. Use the name slug of each add-on."
|
||||
},
|
||||
"folders": {
|
||||
"name": "Folders",
|
||||
@@ -418,11 +418,11 @@
|
||||
},
|
||||
"folders": {
|
||||
"name": "[%key:component::hassio::services::backup_partial::fields::folders::name%]",
|
||||
"description": "[%key:component::hassio::services::backup_partial::fields::folders::description%]"
|
||||
"description": "List of directories to restore from the backup."
|
||||
},
|
||||
"addons": {
|
||||
"name": "[%key:component::hassio::services::backup_partial::fields::addons::name%]",
|
||||
"description": "[%key:component::hassio::services::backup_partial::fields::addons::description%]"
|
||||
"description": "List of add-ons to restore from the backup. Use the name slug of each add-on."
|
||||
},
|
||||
"password": {
|
||||
"name": "[%key:common::config_flow::data::password%]",
|
||||
|
||||
@@ -7,17 +7,32 @@ from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyheos import Heos, HeosError, HeosOptions, HeosPlayer, const as heos_const
|
||||
from pyheos import (
|
||||
Credentials,
|
||||
Heos,
|
||||
HeosError,
|
||||
HeosOptions,
|
||||
HeosPlayer,
|
||||
const as heos_const,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP, Platform
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_USERNAME,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from . import services
|
||||
@@ -33,6 +48,8 @@ PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||
|
||||
MIN_UPDATE_SOURCES = timedelta(seconds=1)
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -49,6 +66,12 @@ class HeosRuntimeData:
|
||||
type HeosConfigEntry = ConfigEntry[HeosRuntimeData]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the HEOS component."""
|
||||
services.register(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool:
|
||||
"""Initialize config entry which represents the HEOS controller."""
|
||||
# For backwards compat
|
||||
@@ -56,12 +79,37 @@ async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool
|
||||
hass.config_entries.async_update_entry(entry, unique_id=DOMAIN)
|
||||
|
||||
host = entry.data[CONF_HOST]
|
||||
credentials: Credentials | None = None
|
||||
if entry.options:
|
||||
credentials = Credentials(
|
||||
entry.options[CONF_USERNAME], entry.options[CONF_PASSWORD]
|
||||
)
|
||||
|
||||
# Setting all_progress_events=False ensures that we only receive a
|
||||
# media position update upon start of playback or when media changes
|
||||
controller = Heos(HeosOptions(host, all_progress_events=False, auto_reconnect=True))
|
||||
controller = Heos(
|
||||
HeosOptions(
|
||||
host,
|
||||
all_progress_events=False,
|
||||
auto_reconnect=True,
|
||||
credentials=credentials,
|
||||
)
|
||||
)
|
||||
|
||||
# Auth failure handler must be added before connecting to the host, otherwise
|
||||
# the event will be missed when login fails during connection.
|
||||
async def auth_failure(event: str) -> None:
|
||||
"""Handle authentication failure."""
|
||||
if event == heos_const.EVENT_USER_CREDENTIALS_INVALID:
|
||||
entry.async_start_reauth(hass)
|
||||
|
||||
entry.async_on_unload(
|
||||
controller.dispatcher.connect(heos_const.SIGNAL_HEOS_EVENT, auth_failure)
|
||||
)
|
||||
|
||||
try:
|
||||
# Auto reconnect only operates if initial connection was successful.
|
||||
await controller.connect()
|
||||
# Auto reconnect only operates if initial connection was successful.
|
||||
except HeosError as error:
|
||||
await controller.disconnect()
|
||||
_LOGGER.debug("Unable to connect to controller %s: %s", host, error)
|
||||
@@ -83,12 +131,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool
|
||||
favorites = await controller.get_favorites()
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"%s is not logged in to a HEOS account and will be unable to"
|
||||
" retrieve HEOS favorites: Use the 'heos.sign_in' service to"
|
||||
" sign-in to a HEOS account"
|
||||
),
|
||||
host,
|
||||
"The HEOS System is not logged in: Enter credentials in the integration options to access favorites and streaming services"
|
||||
)
|
||||
inputs = await controller.get_input_sources()
|
||||
except HeosError as error:
|
||||
@@ -108,7 +151,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool
|
||||
controller_manager, group_manager, source_manager, players
|
||||
)
|
||||
|
||||
services.register(hass, controller)
|
||||
group_manager.connect_update()
|
||||
entry.async_on_unload(group_manager.disconnect_update)
|
||||
|
||||
@@ -120,9 +162,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
await entry.runtime_data.controller_manager.disconnect()
|
||||
|
||||
services.remove(hass)
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
@@ -135,7 +174,6 @@ class ControllerManager:
|
||||
self._device_registry = None
|
||||
self._entity_registry = None
|
||||
self.controller = controller
|
||||
self._signals = []
|
||||
|
||||
async def connect_listeners(self):
|
||||
"""Subscribe to events of interest."""
|
||||
@@ -143,23 +181,17 @@ class ControllerManager:
|
||||
self._entity_registry = er.async_get(self._hass)
|
||||
|
||||
# Handle controller events
|
||||
self._signals.append(
|
||||
self.controller.dispatcher.connect(
|
||||
heos_const.SIGNAL_CONTROLLER_EVENT, self._controller_event
|
||||
)
|
||||
self.controller.dispatcher.connect(
|
||||
heos_const.SIGNAL_CONTROLLER_EVENT, self._controller_event
|
||||
)
|
||||
|
||||
# Handle connection-related events
|
||||
self._signals.append(
|
||||
self.controller.dispatcher.connect(
|
||||
heos_const.SIGNAL_HEOS_EVENT, self._heos_event
|
||||
)
|
||||
self.controller.dispatcher.connect(
|
||||
heos_const.SIGNAL_HEOS_EVENT, self._heos_event
|
||||
)
|
||||
|
||||
async def disconnect(self):
|
||||
"""Disconnect subscriptions."""
|
||||
for signal_remove in self._signals:
|
||||
signal_remove()
|
||||
self._signals.clear()
|
||||
self.controller.dispatcher.disconnect_all()
|
||||
await self.controller.disconnect()
|
||||
|
||||
|
||||
@@ -1,17 +1,37 @@
|
||||
"""Config flow to configure Heos."""
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from pyheos import Heos, HeosError, HeosOptions
|
||||
from pyheos import CommandFailedError, Heos, HeosError, HeosOptions
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import ssdp
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import selector
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
AUTH_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_USERNAME): selector.TextSelector(),
|
||||
vol.Optional(CONF_PASSWORD): selector.TextSelector(
|
||||
selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def format_title(host: str) -> str:
|
||||
"""Format the title for config entries."""
|
||||
@@ -31,11 +51,65 @@ async def _validate_host(host: str, errors: dict[str, str]) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def _validate_auth(
|
||||
user_input: dict[str, str], heos: Heos, errors: dict[str, str]
|
||||
) -> bool:
|
||||
"""Validate authentication by signing in or out, otherwise populate errors if needed."""
|
||||
if not user_input:
|
||||
# Log out (neither username nor password provided)
|
||||
try:
|
||||
await heos.sign_out()
|
||||
except HeosError:
|
||||
errors["base"] = "unknown"
|
||||
_LOGGER.exception("Unexpected error occurred during sign-out")
|
||||
return False
|
||||
else:
|
||||
_LOGGER.debug("Successfully signed-out of HEOS Account")
|
||||
return True
|
||||
|
||||
# Ensure both username and password are provided
|
||||
authentication = CONF_USERNAME in user_input or CONF_PASSWORD in user_input
|
||||
if authentication and CONF_USERNAME not in user_input:
|
||||
errors[CONF_USERNAME] = "username_missing"
|
||||
return False
|
||||
if authentication and CONF_PASSWORD not in user_input:
|
||||
errors[CONF_PASSWORD] = "password_missing"
|
||||
return False
|
||||
|
||||
# Attempt to login (both username and password provided)
|
||||
try:
|
||||
await heos.sign_in(user_input[CONF_USERNAME], user_input[CONF_PASSWORD])
|
||||
except CommandFailedError as err:
|
||||
if err.error_id in (6, 8, 10): # Auth-specific errors
|
||||
errors["base"] = "invalid_auth"
|
||||
_LOGGER.warning("Failed to sign-in to HEOS Account: %s", err)
|
||||
else:
|
||||
errors["base"] = "unknown"
|
||||
_LOGGER.exception("Unexpected error occurred during sign-in")
|
||||
return False
|
||||
except HeosError:
|
||||
errors["base"] = "unknown"
|
||||
_LOGGER.exception("Unexpected error occurred during sign-in")
|
||||
return False
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Successfully signed-in to HEOS Account: %s",
|
||||
heos.signed_in_username,
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
class HeosFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Define a flow for HEOS."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
|
||||
"""Create the options flow."""
|
||||
return HeosOptionsFlowHandler()
|
||||
|
||||
async def async_step_ssdp(
|
||||
self, discovery_info: ssdp.SsdpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
@@ -100,3 +174,52 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
data_schema=vol.Schema({vol.Required(CONF_HOST, default=host): str}),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauthentication after auth failure event."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Validate account credentials and update options."""
|
||||
errors: dict[str, str] = {}
|
||||
entry = self._get_reauth_entry()
|
||||
if user_input is not None:
|
||||
heos = cast(Heos, entry.runtime_data.controller_manager.controller)
|
||||
if await _validate_auth(user_input, heos, errors):
|
||||
return self.async_update_reload_and_abort(entry, options=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
errors=errors,
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
AUTH_SCHEMA, user_input or entry.options
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class HeosOptionsFlowHandler(OptionsFlow):
|
||||
"""Define HEOS options flow."""
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
heos = cast(
|
||||
Heos, self.config_entry.runtime_data.controller_manager.controller
|
||||
)
|
||||
if await _validate_auth(user_input, heos, errors):
|
||||
return self.async_create_entry(data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
errors=errors,
|
||||
step_id="init",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
AUTH_SCHEMA, user_input or self.config_entry.options
|
||||
),
|
||||
)
|
||||
|
||||
@@ -123,7 +123,6 @@ class HeosMediaPlayer(MediaPlayerEntity):
|
||||
"""Initialize."""
|
||||
self._media_position_updated_at = None
|
||||
self._player = player
|
||||
self._signals: list = []
|
||||
self._source_manager = source_manager
|
||||
self._group_manager = group_manager
|
||||
self._attr_unique_id = str(player.player_id)
|
||||
@@ -150,13 +149,13 @@ class HeosMediaPlayer(MediaPlayerEntity):
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Device added to hass."""
|
||||
# Update state when attributes of the player change
|
||||
self._signals.append(
|
||||
self.async_on_remove(
|
||||
self._player.heos.dispatcher.connect(
|
||||
heos_const.SIGNAL_PLAYER_EVENT, self._player_update
|
||||
)
|
||||
)
|
||||
# Update state when heos changes
|
||||
self._signals.append(
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(self.hass, SIGNAL_HEOS_UPDATED, self._heos_updated)
|
||||
)
|
||||
# Register this player's entity_id so it can be resolved by the group manager
|
||||
@@ -304,12 +303,6 @@ class HeosMediaPlayer(MediaPlayerEntity):
|
||||
self._player.player_id, self.entity_id
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Disconnect the device when removed."""
|
||||
for signal_remove in self._signals:
|
||||
signal_remove()
|
||||
self._signals.clear()
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if the device is available."""
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user