forked from home-assistant/core
Compare commits
432 Commits
energy_sen
...
2025.3.0b3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8382663be4 | ||
|
|
7e1309d874 | ||
|
|
1d0cba1a43 | ||
|
|
7d9a6ceb6b | ||
|
|
6abdb28a03 | ||
|
|
3690e03951 | ||
|
|
4fe4d14f16 | ||
|
|
74e8ffa555 | ||
|
|
c257b228f1 | ||
|
|
6ff0f67d03 | ||
|
|
8fdff9ca37 | ||
|
|
9055dff9bd | ||
|
|
e766d681b5 | ||
|
|
511e57d0b3 | ||
|
|
74be49d00d | ||
|
|
684c3aac6b | ||
|
|
a718b6ebff | ||
|
|
f17274d417 | ||
|
|
1530139a61 | ||
|
|
f56d65b2ec | ||
|
|
21277a81d3 | ||
|
|
e1ce5b8c69 | ||
|
|
0323a9c4e6 | ||
|
|
c7d89398a0 | ||
|
|
8cc587d3a7 | ||
|
|
5ad156767a | ||
|
|
f54b3f4de2 | ||
|
|
6f0c62dc9d | ||
|
|
dce8bca103 | ||
|
|
22af8af132 | ||
|
|
8a62b882bf | ||
|
|
708f22fe6f | ||
|
|
a4e71e2055 | ||
|
|
61a3cc37e0 | ||
|
|
a0668e5a5b | ||
|
|
b4b7142b55 | ||
|
|
108b71d33c | ||
|
|
2636a47333 | ||
|
|
17116fcd6c | ||
|
|
17c16144d1 | ||
|
|
178d509d56 | ||
|
|
09c129de40 | ||
|
|
07128ba063 | ||
|
|
a786ff53ff | ||
|
|
d2e19c829d | ||
|
|
94b342f26a | ||
|
|
9e3e6b3f43 | ||
|
|
4300900322 | ||
|
|
342e04974d | ||
|
|
fdb4c0a81f | ||
|
|
6de878ffe4 | ||
|
|
c63aaec09e | ||
|
|
d8bf47c101 | ||
|
|
736ff8828d | ||
|
|
b501999a4c | ||
|
|
3985f1c6c8 | ||
|
|
46ec3987a8 | ||
|
|
df4e5a54e3 | ||
|
|
d8a259044f | ||
|
|
0891669aee | ||
|
|
83c0351338 | ||
|
|
c5e5fe555d | ||
|
|
345ba73777 | ||
|
|
e4200a79a2 | ||
|
|
381fa65ba0 | ||
|
|
16314711b8 | ||
|
|
553abe4a4a | ||
|
|
6a1bbdb3a7 | ||
|
|
59d92c75bd | ||
|
|
7732e6878e | ||
|
|
2cde317d59 | ||
|
|
0c08430507 | ||
|
|
fa6d7d5e3c | ||
|
|
585b950a46 | ||
|
|
3effc2e182 | ||
|
|
0e1602ff71 | ||
|
|
693584ce29 | ||
|
|
2e972422c2 | ||
|
|
3a21c36173 | ||
|
|
25ee2e58a5 | ||
|
|
561b3ae21b | ||
|
|
5be7f49146 | ||
|
|
2694828451 | ||
|
|
3eea932b24 | ||
|
|
468208502f | ||
|
|
92268f894a | ||
|
|
5e5fd6a2f2 | ||
|
|
cadee73da8 | ||
|
|
51099ae7d6 | ||
|
|
b777c29bab | ||
|
|
fc1190dafd | ||
|
|
775a81829b | ||
|
|
998757f09e | ||
|
|
b964bc58be | ||
|
|
bd80a78848 | ||
|
|
37c8764426 | ||
|
|
9262dec444 | ||
|
|
3c3c4d2641 | ||
|
|
c1898ece80 | ||
|
|
fdf69fcd7d | ||
|
|
e403bee95b | ||
|
|
9be8fd4eac | ||
|
|
e09b40c2bd | ||
|
|
2826198d5d | ||
|
|
5324f3e542 | ||
|
|
7e97ef588b | ||
|
|
bb120020a8 | ||
|
|
bb9aba2a7d | ||
|
|
b676c2f61b | ||
|
|
0c092f80c7 | ||
|
|
2bf592d8aa | ||
|
|
e591157e37 | ||
|
|
ee01aa73b8 | ||
|
|
0f827fbf22 | ||
|
|
4dca4a64b5 | ||
|
|
b82886a3e1 | ||
|
|
fe396cdf4b | ||
|
|
5895245a31 | ||
|
|
861ba0ee5e | ||
|
|
d15f9edc57 | ||
|
|
cab6ec0363 | ||
|
|
eb26a2124b | ||
|
|
4530fe4bf7 | ||
|
|
b1865de58f | ||
|
|
3ff04d6d04 | ||
|
|
bd306abace | ||
|
|
412ceca6f7 | ||
|
|
8644fb1887 | ||
|
|
622be70fee | ||
|
|
7bc0c1b912 | ||
|
|
3230e741e9 | ||
|
|
81db3dea41 | ||
|
|
fe348e17a3 | ||
|
|
03f6508bd8 | ||
|
|
fd47d6578e | ||
|
|
df6a5d7459 | ||
|
|
b8a0cdea12 | ||
|
|
570e11ba5b | ||
|
|
19704cff04 | ||
|
|
51c09c2aa4 | ||
|
|
ef46552146 | ||
|
|
75533463f7 | ||
|
|
2cd496fdaf | ||
|
|
cd4c79450b | ||
|
|
a1d1f6ec97 | ||
|
|
a910fb879c | ||
|
|
4e904bf5a3 | ||
|
|
38cc26485a | ||
|
|
2bba185e4c | ||
|
|
743cc42829 | ||
|
|
f3021b40ab | ||
|
|
9ec9110e1e | ||
|
|
433c2cb43e | ||
|
|
fcffe5151d | ||
|
|
ca1677cc46 | ||
|
|
27f7085b61 | ||
|
|
f607b95c00 | ||
|
|
72502c1a15 | ||
|
|
47e78e9008 | ||
|
|
1fb51ef189 | ||
|
|
f96e31fad8 | ||
|
|
e99bf21a36 | ||
|
|
3059d06960 | ||
|
|
2b55f3af36 | ||
|
|
776501f5e6 | ||
|
|
1f93d2cefb | ||
|
|
1633700a58 | ||
|
|
923ec71bf6 | ||
|
|
7566046995 | ||
|
|
b9dbf07a5e | ||
|
|
b8b153b87f | ||
|
|
d4dd8fd902 | ||
|
|
a3bc55f49b | ||
|
|
7ba94a680d | ||
|
|
664e09790c | ||
|
|
d45fce86a9 | ||
|
|
507c0739df | ||
|
|
d7301c62e2 | ||
|
|
befed910da | ||
|
|
2509353221 | ||
|
|
694a77fe3c | ||
|
|
bc7f5f3981 | ||
|
|
cea5cda881 | ||
|
|
9e063fd77c | ||
|
|
01fb6841da | ||
|
|
48d3dd88a1 | ||
|
|
051cc41d4f | ||
|
|
661b55d6eb | ||
|
|
d197acc069 | ||
|
|
bf190a8a73 | ||
|
|
c386abd49d | ||
|
|
6342d8334b | ||
|
|
24bb13e0d1 | ||
|
|
212c42ca77 | ||
|
|
54843bb422 | ||
|
|
c115a7f455 | ||
|
|
597c0ab985 | ||
|
|
b86bb75e5e | ||
|
|
b662d32e44 | ||
|
|
72f690d681 | ||
|
|
33c9f3cc7d | ||
|
|
a1076300c8 | ||
|
|
dc92e912c2 | ||
|
|
2451e5578a | ||
|
|
1c83dab0a1 | ||
|
|
b42973040c | ||
|
|
6507955a14 | ||
|
|
79dbc70470 | ||
|
|
2bab7436d3 | ||
|
|
60479369b6 | ||
|
|
ec3f5561dc | ||
|
|
2e5f56b70d | ||
|
|
461039f06a | ||
|
|
351e594fe4 | ||
|
|
377da5f954 | ||
|
|
51a881f3b5 | ||
|
|
5025e31129 | ||
|
|
f98720e525 | ||
|
|
37240e811b | ||
|
|
0b7a023d2e | ||
|
|
beec67a247 | ||
|
|
571349e3a2 | ||
|
|
d9eb248e91 | ||
|
|
fc8affd243 | ||
|
|
4d6fd1b10f | ||
|
|
257242e6e3 | ||
|
|
7f494c235c | ||
|
|
8c42db7501 | ||
|
|
183bbcd1e1 | ||
|
|
8c4b8028cf | ||
|
|
ea1045d826 | ||
|
|
db5bf41790 | ||
|
|
580c6f2684 | ||
|
|
d62c18c225 | ||
|
|
8f9f9bc8e7 | ||
|
|
6ad6e82a23 | ||
|
|
3d507c7b44 | ||
|
|
4f5c7353f8 | ||
|
|
0b961d98f5 | ||
|
|
1cd82ab8ee | ||
|
|
c1e5673cbd | ||
|
|
800fe1b01e | ||
|
|
15ca2fe489 | ||
|
|
bd919159e5 | ||
|
|
6ebda9322d | ||
|
|
4ca39636e2 | ||
|
|
f7a6d163bb | ||
|
|
746d1800f9 | ||
|
|
91668e99e3 | ||
|
|
0797c3228b | ||
|
|
8ce2727447 | ||
|
|
5b0eca7f85 | ||
|
|
b1b65e4d56 | ||
|
|
17c1c0e155 | ||
|
|
5a0a3d27d9 | ||
|
|
d821aa9162 | ||
|
|
93b01a3bc3 | ||
|
|
98c6a578b7 | ||
|
|
92788a04ff | ||
|
|
a0c2781355 | ||
|
|
6c0c4bfd74 | ||
|
|
f3dd772b43 | ||
|
|
648c750a0f | ||
|
|
f369ded93d | ||
|
|
4b342b7dd4 | ||
|
|
f7e8bc458f | ||
|
|
ee206a5a17 | ||
|
|
883e14b409 | ||
|
|
f5bdd4594d | ||
|
|
c806638448 | ||
|
|
539adaf128 | ||
|
|
7e5617fd54 | ||
|
|
4a0b1b74e3 | ||
|
|
f5263203f5 | ||
|
|
9a1f2b52cd | ||
|
|
037bdb6996 | ||
|
|
3160b7baa0 | ||
|
|
baa3b15dbc | ||
|
|
bf83f5a671 | ||
|
|
463d9617ac | ||
|
|
cc792403ab | ||
|
|
3d2ab3b59e | ||
|
|
6e71893b50 | ||
|
|
ba1650bd05 | ||
|
|
df5f6fc1e6 | ||
|
|
0dbdb42947 | ||
|
|
325022ec77 | ||
|
|
3ea1d2823e | ||
|
|
83d9c000d3 | ||
|
|
266612e4d9 | ||
|
|
dc7cba60bd | ||
|
|
d752a3a24c | ||
|
|
8c3ee80203 | ||
|
|
94555f533b | ||
|
|
6da33a8883 | ||
|
|
d42e31b5e7 | ||
|
|
441917706b | ||
|
|
12e530dc75 | ||
|
|
59651c6f10 | ||
|
|
ac21d2855c | ||
|
|
6070feea73 | ||
|
|
167881e434 | ||
|
|
35bcf82627 | ||
|
|
66bb501621 | ||
|
|
179ba8309d | ||
|
|
2b7543aca2 | ||
|
|
1e49e04491 | ||
|
|
e60b6482ab | ||
|
|
7b82781f4c | ||
|
|
8078e41cad | ||
|
|
b40daf0152 | ||
|
|
417ac56bd6 | ||
|
|
c9a0814142 | ||
|
|
2bd9918ee8 | ||
|
|
98ab16cf99 | ||
|
|
58274160a0 | ||
|
|
fb5af9acd0 | ||
|
|
672df7355c | ||
|
|
7495ea2cc8 | ||
|
|
42ab3228a0 | ||
|
|
a92c52e65b | ||
|
|
800f680bd5 | ||
|
|
26c60880e4 | ||
|
|
059a6dddbe | ||
|
|
0f7cb6b757 | ||
|
|
8068f82888 | ||
|
|
d522571308 | ||
|
|
debee25086 | ||
|
|
508b6c8db0 | ||
|
|
97a124b28a | ||
|
|
800749728b | ||
|
|
b73c6ed768 | ||
|
|
1d43cb3f29 | ||
|
|
56e36cb1ff | ||
|
|
4f43c971cd | ||
|
|
113e703d5c | ||
|
|
e59ec8f867 | ||
|
|
b35d252549 | ||
|
|
71bdd0e237 | ||
|
|
9105542bab | ||
|
|
9cbed483fb | ||
|
|
c687f37539 | ||
|
|
2d8a619b54 | ||
|
|
759cc3303a | ||
|
|
5328429b08 | ||
|
|
21b98a76cc | ||
|
|
95f632a13a | ||
|
|
33d4d1f8e5 | ||
|
|
72878c18d0 | ||
|
|
ccd220ad0f | ||
|
|
f191f6ae22 | ||
|
|
28a18e538d | ||
|
|
c2f6255d16 | ||
|
|
e5fd08ae76 | ||
|
|
4b5633d9d8 | ||
|
|
a9c6a06704 | ||
|
|
0faa8efd5a | ||
|
|
5a257b090e | ||
|
|
41fb6a537f | ||
|
|
b166c32eb8 | ||
|
|
288acfb511 | ||
|
|
2cb9682303 | ||
|
|
7e52170789 | ||
|
|
979b3d4269 | ||
|
|
9772014bce | ||
|
|
f8763c49ef | ||
|
|
b4ef00659c | ||
|
|
df49c53bb6 | ||
|
|
8dfe483b38 | ||
|
|
b45d7cbbc3 | ||
|
|
239ba9b1cc | ||
|
|
2d5a75d4f2 | ||
|
|
e1ad3f05e6 | ||
|
|
b9280edbfa | ||
|
|
010993fc5f | ||
|
|
713931661e | ||
|
|
af06521f66 | ||
|
|
c32f57f85a | ||
|
|
171061a778 | ||
|
|
476ea35bdb | ||
|
|
00e6866664 | ||
|
|
201bf95ab8 | ||
|
|
ff22bbd0e4 | ||
|
|
fd8d4e937c | ||
|
|
7903348d79 | ||
|
|
090dbba06e | ||
|
|
af77e69eb0 | ||
|
|
23e7638687 | ||
|
|
36b722960a | ||
|
|
3dd241a398 | ||
|
|
b5a9c3d1f6 | ||
|
|
eca714a45a | ||
|
|
8049699efb | ||
|
|
7c6afd50dc | ||
|
|
42d8889778 | ||
|
|
a4c0304e1f | ||
|
|
c63e688ba8 | ||
|
|
16298b4195 | ||
|
|
da23eb22db | ||
|
|
4bd1d0199b | ||
|
|
efe7050030 | ||
|
|
79ff85f517 | ||
|
|
73ad4caf94 | ||
|
|
e3d649d349 | ||
|
|
657e3488ba | ||
|
|
7508c14a53 | ||
|
|
ac84970da8 | ||
|
|
30073f3493 | ||
|
|
3abd7b8ba3 | ||
|
|
62bc6e4bf6 | ||
|
|
5faa189fef | ||
|
|
e09ae1c83d | ||
|
|
7b20299de7 | ||
|
|
81e501aba1 | ||
|
|
568ac22ce8 | ||
|
|
c71ab054f1 | ||
|
|
bea201f9f6 | ||
|
|
dda90bc04c | ||
|
|
a033e4c88d | ||
|
|
42b6f83e7c | ||
|
|
cb937bc115 | ||
|
|
bec569caf9 | ||
|
|
3390fb32a8 | ||
|
|
3ebb58f780 | ||
|
|
30b131d3b9 | ||
|
|
cd40232beb | ||
|
|
f27fe365c5 | ||
|
|
1c769418fb | ||
|
|
db7c2dab52 | ||
|
|
627377872b | ||
|
|
8504162539 | ||
|
|
67c6a1d436 |
28
.github/workflows/builder.yml
vendored
28
.github/workflows/builder.yml
vendored
@@ -69,7 +69,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -197,7 +197,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2024.08.2
|
||||
uses: home-assistant/builder@2025.02.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -263,7 +263,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2024.08.2
|
||||
uses: home-assistant/builder@2025.02.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -324,7 +324,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.8.0
|
||||
uses: sigstore/cosign-installer@v3.8.1
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
@@ -448,6 +448,9 @@ jobs:
|
||||
environment: ${{ needs.init.outputs.channel }}
|
||||
needs: ["init", "build_base"]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
@@ -459,7 +462,7 @@ jobs:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -473,16 +476,13 @@ jobs:
|
||||
run: |
|
||||
# Remove dist, build, and homeassistant.egg-info
|
||||
# when build locally for testing!
|
||||
pip install twine build
|
||||
pip install build
|
||||
python -m build
|
||||
|
||||
- name: Upload package
|
||||
shell: bash
|
||||
run: |
|
||||
export TWINE_USERNAME="__token__"
|
||||
export TWINE_PASSWORD="${{ secrets.TWINE_TOKEN }}"
|
||||
|
||||
twine upload dist/* --skip-existing
|
||||
- name: Upload package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.12.4
|
||||
with:
|
||||
skip-existing: true
|
||||
|
||||
hassfest-image:
|
||||
name: Build and test hassfest image
|
||||
|
||||
28
.github/workflows/ci.yaml
vendored
28
.github/workflows/ci.yaml
vendored
@@ -537,7 +537,7 @@ jobs:
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
@@ -661,7 +661,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
@@ -877,7 +877,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@@ -942,7 +942,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@@ -980,14 +980,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1108,7 +1108,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1116,7 +1116,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1239,7 +1239,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1247,7 +1247,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1271,7 +1271,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
@@ -1382,14 +1382,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1410,7 +1410,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.28.9
|
||||
uses: github/codeql-action/init@v3.28.10
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.28.9
|
||||
uses: github/codeql-action/analyze@v3.28.10
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
62
.github/workflows/wheels.yml
vendored
62
.github/workflows/wheels.yml
vendored
@@ -91,7 +91,7 @@ jobs:
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
@@ -99,14 +99,14 @@ jobs:
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@@ -118,7 +118,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
@@ -138,17 +138,17 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -187,22 +187,22 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
@@ -218,15 +218,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Split requirements all
|
||||
run: |
|
||||
# We split requirements all into multiple files.
|
||||
# This is to prevent the build from running out of memory when
|
||||
# resolving packages on 32-bits systems (like armhf, armv7).
|
||||
|
||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||
|
||||
- name: Build wheels (part 1)
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
@@ -238,32 +230,4 @@ jobs:
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtaa"
|
||||
|
||||
- name: Build wheels (part 2)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtab"
|
||||
|
||||
- name: Build wheels (part 3)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtac"
|
||||
requirements: "requirements_all.txt"
|
||||
|
||||
@@ -103,6 +103,7 @@ homeassistant.components.auth.*
|
||||
homeassistant.components.automation.*
|
||||
homeassistant.components.awair.*
|
||||
homeassistant.components.axis.*
|
||||
homeassistant.components.azure_storage.*
|
||||
homeassistant.components.backup.*
|
||||
homeassistant.components.baf.*
|
||||
homeassistant.components.bang_olufsen.*
|
||||
@@ -407,6 +408,7 @@ homeassistant.components.raspberry_pi.*
|
||||
homeassistant.components.rdw.*
|
||||
homeassistant.components.recollect_waste.*
|
||||
homeassistant.components.recorder.*
|
||||
homeassistant.components.remember_the_milk.*
|
||||
homeassistant.components.remote.*
|
||||
homeassistant.components.renault.*
|
||||
homeassistant.components.reolink.*
|
||||
|
||||
11
.vscode/launch.json
vendored
11
.vscode/launch.json
vendored
@@ -38,10 +38,17 @@
|
||||
"module": "pytest",
|
||||
"justMyCode": false,
|
||||
"args": [
|
||||
"--timeout=10",
|
||||
"--picked"
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "Home Assistant: Debug Current Test File",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "pytest",
|
||||
"console": "integratedTerminal",
|
||||
"args": ["-vv", "${file}"]
|
||||
},
|
||||
{
|
||||
// Debug by attaching to local Home Assistant server using Remote Python Debugger.
|
||||
// See https://www.home-assistant.io/integrations/debugpy/
|
||||
@@ -77,4 +84,4 @@
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
20
CODEOWNERS
generated
20
CODEOWNERS
generated
@@ -180,6 +180,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/azure_event_hub/ @eavanvalkenburg
|
||||
/tests/components/azure_event_hub/ @eavanvalkenburg
|
||||
/homeassistant/components/azure_service_bus/ @hfurubotten
|
||||
/homeassistant/components/azure_storage/ @zweckj
|
||||
/tests/components/azure_storage/ @zweckj
|
||||
/homeassistant/components/backup/ @home-assistant/core
|
||||
/tests/components/backup/ @home-assistant/core
|
||||
/homeassistant/components/baf/ @bdraco @jfroy
|
||||
@@ -967,8 +969,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/motionblinds_ble/ @LennP @jerrybboy
|
||||
/homeassistant/components/motioneye/ @dermotduffy
|
||||
/tests/components/motioneye/ @dermotduffy
|
||||
/homeassistant/components/motionmount/ @RJPoelstra
|
||||
/tests/components/motionmount/ @RJPoelstra
|
||||
/homeassistant/components/motionmount/ @laiho-vogels
|
||||
/tests/components/motionmount/ @laiho-vogels
|
||||
/homeassistant/components/mqtt/ @emontnemery @jbouwh @bdraco
|
||||
/tests/components/mqtt/ @emontnemery @jbouwh @bdraco
|
||||
/homeassistant/components/msteams/ @peroyvind
|
||||
@@ -1051,8 +1053,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/numato/ @clssn
|
||||
/homeassistant/components/number/ @home-assistant/core @Shulyaka
|
||||
/tests/components/number/ @home-assistant/core @Shulyaka
|
||||
/homeassistant/components/nut/ @bdraco @ollo69 @pestevez
|
||||
/tests/components/nut/ @bdraco @ollo69 @pestevez
|
||||
/homeassistant/components/nut/ @bdraco @ollo69 @pestevez @tdfountain
|
||||
/tests/components/nut/ @bdraco @ollo69 @pestevez @tdfountain
|
||||
/homeassistant/components/nws/ @MatthewFlamm @kamiyo
|
||||
/tests/components/nws/ @MatthewFlamm @kamiyo
|
||||
/homeassistant/components/nyt_games/ @joostlek
|
||||
@@ -1144,8 +1146,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/philips_js/ @elupus
|
||||
/homeassistant/components/pi_hole/ @shenxn
|
||||
/tests/components/pi_hole/ @shenxn
|
||||
/homeassistant/components/picnic/ @corneyl
|
||||
/tests/components/picnic/ @corneyl
|
||||
/homeassistant/components/picnic/ @corneyl @codesalatdev
|
||||
/tests/components/picnic/ @corneyl @codesalatdev
|
||||
/homeassistant/components/ping/ @jpbede
|
||||
/tests/components/ping/ @jpbede
|
||||
/homeassistant/components/plaato/ @JohNan
|
||||
@@ -1399,6 +1401,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/smappee/ @bsmappee
|
||||
/homeassistant/components/smart_meter_texas/ @grahamwetzler
|
||||
/tests/components/smart_meter_texas/ @grahamwetzler
|
||||
/homeassistant/components/smartthings/ @joostlek
|
||||
/tests/components/smartthings/ @joostlek
|
||||
/homeassistant/components/smarttub/ @mdz
|
||||
/tests/components/smarttub/ @mdz
|
||||
/homeassistant/components/smarty/ @z0mbieprocess
|
||||
@@ -1413,6 +1417,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/snapcast/ @luar123
|
||||
/homeassistant/components/snmp/ @nmaggioni
|
||||
/tests/components/snmp/ @nmaggioni
|
||||
/homeassistant/components/snoo/ @Lash-L
|
||||
/tests/components/snoo/ @Lash-L
|
||||
/homeassistant/components/snooz/ @AustinBrunkhorst
|
||||
/tests/components/snooz/ @AustinBrunkhorst
|
||||
/homeassistant/components/solaredge/ @frenck @bdraco
|
||||
@@ -1693,6 +1699,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/weatherflow_cloud/ @jeeftor
|
||||
/homeassistant/components/weatherkit/ @tjhorner
|
||||
/tests/components/weatherkit/ @tjhorner
|
||||
/homeassistant/components/webdav/ @jpbede
|
||||
/tests/components/webdav/ @jpbede
|
||||
/homeassistant/components/webhook/ @home-assistant/core
|
||||
/tests/components/webhook/ @home-assistant/core
|
||||
/homeassistant/components/webmin/ @autinerd
|
||||
|
||||
@@ -74,6 +74,7 @@ from .core_config import async_process_ha_core_config
|
||||
from .exceptions import HomeAssistantError
|
||||
from .helpers import (
|
||||
area_registry,
|
||||
backup,
|
||||
category_registry,
|
||||
config_validation as cv,
|
||||
device_registry,
|
||||
@@ -163,16 +164,6 @@ FRONTEND_INTEGRATIONS = {
|
||||
# integrations can be removed and database migration status is
|
||||
# visible in frontend
|
||||
"frontend",
|
||||
# Hassio is an after dependency of backup, after dependencies
|
||||
# are not promoted from stage 2 to earlier stages, so we need to
|
||||
# add it here. Hassio needs to be setup before backup, otherwise
|
||||
# the backup integration will think we are a container/core install
|
||||
# when using HAOS or Supervised install.
|
||||
"hassio",
|
||||
# Backup is an after dependency of frontend, after dependencies
|
||||
# are not promoted from stage 2 to earlier stages, so we need to
|
||||
# add it here.
|
||||
"backup",
|
||||
}
|
||||
# Stage 0 is divided into substages. Each substage has a name, a set of integrations and a timeout.
|
||||
# The substage containing recorder should have no timeout, as it could cancel a database migration.
|
||||
@@ -206,6 +197,8 @@ STAGE_1_INTEGRATIONS = {
|
||||
"mqtt_eventstream",
|
||||
# To provide account link implementations
|
||||
"cloud",
|
||||
# Ensure supervisor is available
|
||||
"hassio",
|
||||
}
|
||||
|
||||
DEFAULT_INTEGRATIONS = {
|
||||
@@ -328,10 +321,10 @@ async def async_setup_hass(
|
||||
|
||||
block_async_io.enable()
|
||||
|
||||
config_dict = None
|
||||
basic_setup_success = False
|
||||
|
||||
if not (recovery_mode := runtime_config.recovery_mode):
|
||||
config_dict = None
|
||||
basic_setup_success = False
|
||||
|
||||
await hass.async_add_executor_job(conf_util.process_ha_config_upgrade, hass)
|
||||
|
||||
try:
|
||||
@@ -349,39 +342,43 @@ async def async_setup_hass(
|
||||
await async_from_config_dict(config_dict, hass) is not None
|
||||
)
|
||||
|
||||
if config_dict is None:
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
if config_dict is None:
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
|
||||
elif not basic_setup_success:
|
||||
_LOGGER.warning("Unable to set up core integrations. Activating recovery mode")
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
elif not basic_setup_success:
|
||||
_LOGGER.warning(
|
||||
"Unable to set up core integrations. Activating recovery mode"
|
||||
)
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
|
||||
elif any(domain not in hass.config.components for domain in CRITICAL_INTEGRATIONS):
|
||||
_LOGGER.warning(
|
||||
"Detected that %s did not load. Activating recovery mode",
|
||||
",".join(CRITICAL_INTEGRATIONS),
|
||||
)
|
||||
elif any(
|
||||
domain not in hass.config.components for domain in CRITICAL_INTEGRATIONS
|
||||
):
|
||||
_LOGGER.warning(
|
||||
"Detected that %s did not load. Activating recovery mode",
|
||||
",".join(CRITICAL_INTEGRATIONS),
|
||||
)
|
||||
|
||||
old_config = hass.config
|
||||
old_logging = hass.data.get(DATA_LOGGING)
|
||||
old_config = hass.config
|
||||
old_logging = hass.data.get(DATA_LOGGING)
|
||||
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
|
||||
if old_logging:
|
||||
hass.data[DATA_LOGGING] = old_logging
|
||||
hass.config.debug = old_config.debug
|
||||
hass.config.skip_pip = old_config.skip_pip
|
||||
hass.config.skip_pip_packages = old_config.skip_pip_packages
|
||||
hass.config.internal_url = old_config.internal_url
|
||||
hass.config.external_url = old_config.external_url
|
||||
# Setup loader cache after the config dir has been set
|
||||
loader.async_setup(hass)
|
||||
if old_logging:
|
||||
hass.data[DATA_LOGGING] = old_logging
|
||||
hass.config.debug = old_config.debug
|
||||
hass.config.skip_pip = old_config.skip_pip
|
||||
hass.config.skip_pip_packages = old_config.skip_pip_packages
|
||||
hass.config.internal_url = old_config.internal_url
|
||||
hass.config.external_url = old_config.external_url
|
||||
# Setup loader cache after the config dir has been set
|
||||
loader.async_setup(hass)
|
||||
|
||||
if recovery_mode:
|
||||
_LOGGER.info("Starting in recovery mode")
|
||||
@@ -901,6 +898,10 @@ async def _async_set_up_integrations(
|
||||
if "recorder" in domains_to_setup:
|
||||
recorder.async_initialize_recorder(hass)
|
||||
|
||||
# Initialize backup
|
||||
if "backup" in domains_to_setup:
|
||||
backup.async_initialize_backup(hass)
|
||||
|
||||
stage_0_and_1_domains: list[tuple[str, set[str], int | None]] = [
|
||||
*(
|
||||
(name, domain_group & domains_to_setup, timeout)
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"azure_devops",
|
||||
"azure_event_hub",
|
||||
"azure_service_bus",
|
||||
"azure_storage",
|
||||
"microsoft_face_detect",
|
||||
"microsoft_face_identify",
|
||||
"microsoft_face",
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"requirements": ["accuweather==4.0.0"],
|
||||
"requirements": ["accuweather==4.1.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["adext", "alarmdecoder"],
|
||||
"requirements": ["adext==0.4.3"]
|
||||
"requirements": ["adext==0.4.4"]
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant.components.notify import (
|
||||
)
|
||||
from homeassistant.const import STATE_IDLE, STATE_OFF, STATE_ON
|
||||
from homeassistant.core import Event, EventStateChangedData, HassJob, HomeAssistant
|
||||
from homeassistant.exceptions import ServiceNotFound
|
||||
from homeassistant.exceptions import ServiceNotFound, ServiceValidationError
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_point_in_time,
|
||||
@@ -195,7 +195,8 @@ class AlertEntity(Entity):
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Async Acknowledge alert."""
|
||||
LOGGER.debug("Acknowledged Alert: %s", self._attr_name)
|
||||
if not self._can_ack:
|
||||
raise ServiceValidationError("This alert cannot be acknowledged")
|
||||
self._ack = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["androidtvremote2"],
|
||||
"requirements": ["androidtvremote2==0.1.2"],
|
||||
"requirements": ["androidtvremote2==0.2.0"],
|
||||
"zeroconf": ["_androidtvremote2._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
|
||||
import anthropic
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -20,7 +22,9 @@ type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool:
|
||||
"""Set up Anthropic from a config entry."""
|
||||
client = anthropic.AsyncAnthropic(api_key=entry.data[CONF_API_KEY])
|
||||
client = await hass.async_add_executor_job(
|
||||
partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY])
|
||||
)
|
||||
try:
|
||||
await client.messages.create(
|
||||
model="claude-3-haiku-20240307",
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
@@ -59,7 +60,9 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
|
||||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
client = anthropic.AsyncAnthropic(api_key=data[CONF_API_KEY])
|
||||
client = await hass.async_add_executor_job(
|
||||
partial(anthropic.AsyncAnthropic, api_key=data[CONF_API_KEY])
|
||||
)
|
||||
await client.messages.create(
|
||||
model="claude-3-haiku-20240307",
|
||||
max_tokens=1,
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["anthropic==0.44.0"]
|
||||
"requirements": ["anthropic==0.47.2"]
|
||||
}
|
||||
|
||||
@@ -233,7 +233,6 @@ class AppleTVManager(DeviceListener):
|
||||
pass
|
||||
except Exception:
|
||||
_LOGGER.exception("Failed to connect")
|
||||
await self.disconnect()
|
||||
|
||||
async def _connect_loop(self) -> None:
|
||||
"""Connect loop background task function."""
|
||||
|
||||
@@ -1103,12 +1103,16 @@ class PipelineRun:
|
||||
) & conversation.ConversationEntityFeature.CONTROL:
|
||||
intent_filter = _async_local_fallback_intent_filter
|
||||
|
||||
# Try local intents first, if preferred.
|
||||
elif self.pipeline.prefer_local_intents and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
intent_filter=intent_filter,
|
||||
# Try local intents
|
||||
if (
|
||||
intent_response is None
|
||||
and self.pipeline.prefer_local_intents
|
||||
and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
intent_filter=intent_filter,
|
||||
)
|
||||
)
|
||||
):
|
||||
# Local intent matched
|
||||
|
||||
82
homeassistant/components/azure_storage/__init__.py
Normal file
82
homeassistant/components/azure_storage/__init__.py
Normal file
@@ -0,0 +1,82 @@
|
||||
"""The Azure Storage integration."""
|
||||
|
||||
from aiohttp import ClientTimeout
|
||||
from azure.core.exceptions import (
|
||||
ClientAuthenticationError,
|
||||
HttpResponseError,
|
||||
ResourceNotFoundError,
|
||||
)
|
||||
from azure.core.pipeline.transport._aiohttp import (
|
||||
AioHttpTransport,
|
||||
) # need to import from private file, as it is not properly imported in the init
|
||||
from azure.storage.blob.aio import ContainerClient
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import (
|
||||
CONF_ACCOUNT_NAME,
|
||||
CONF_CONTAINER_NAME,
|
||||
CONF_STORAGE_ACCOUNT_KEY,
|
||||
DATA_BACKUP_AGENT_LISTENERS,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
type AzureStorageConfigEntry = ConfigEntry[ContainerClient]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: AzureStorageConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Azure Storage integration."""
|
||||
# set increase aiohttp timeout for long running operations (up/download)
|
||||
session = async_create_clientsession(
|
||||
hass, timeout=ClientTimeout(connect=10, total=12 * 60 * 60)
|
||||
)
|
||||
container_client = ContainerClient(
|
||||
account_url=f"https://{entry.data[CONF_ACCOUNT_NAME]}.blob.core.windows.net/",
|
||||
container_name=entry.data[CONF_CONTAINER_NAME],
|
||||
credential=entry.data[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=session),
|
||||
)
|
||||
|
||||
try:
|
||||
if not await container_client.exists():
|
||||
await container_client.create_container()
|
||||
except ResourceNotFoundError as err:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="account_not_found",
|
||||
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
||||
) from err
|
||||
except ClientAuthenticationError as err:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
||||
) from err
|
||||
except HttpResponseError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
||||
) from err
|
||||
|
||||
entry.runtime_data = container_client
|
||||
|
||||
def _async_notify_backup_listeners() -> None:
|
||||
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||
listener()
|
||||
|
||||
entry.async_on_unload(entry.async_on_state_change(_async_notify_backup_listeners))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: AzureStorageConfigEntry
|
||||
) -> bool:
|
||||
"""Unload an Azure Storage config entry."""
|
||||
return True
|
||||
182
homeassistant/components/azure_storage/backup.py
Normal file
182
homeassistant/components/azure_storage/backup.py
Normal file
@@ -0,0 +1,182 @@
|
||||
"""Support for Azure Storage backup."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from functools import wraps
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from azure.core.exceptions import HttpResponseError
|
||||
from azure.storage.blob import BlobProperties
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
BackupNotFound,
|
||||
suggested_filename,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from . import AzureStorageConfigEntry
|
||||
from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
METADATA_VERSION = "1"
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
hass: HomeAssistant,
|
||||
) -> list[BackupAgent]:
|
||||
"""Return a list of backup agents."""
|
||||
entries: list[AzureStorageConfigEntry] = hass.config_entries.async_loaded_entries(
|
||||
DOMAIN
|
||||
)
|
||||
return [AzureStorageBackupAgent(hass, entry) for entry in entries]
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_backup_agents_listener(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
listener: Callable[[], None],
|
||||
**kwargs: Any,
|
||||
) -> Callable[[], None]:
|
||||
"""Register a listener to be called when agents are added or removed."""
|
||||
hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener)
|
||||
|
||||
@callback
|
||||
def remove_listener() -> None:
|
||||
"""Remove the listener."""
|
||||
hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener)
|
||||
if not hass.data[DATA_BACKUP_AGENT_LISTENERS]:
|
||||
hass.data.pop(DATA_BACKUP_AGENT_LISTENERS)
|
||||
|
||||
return remove_listener
|
||||
|
||||
|
||||
def handle_backup_errors[_R, **P](
|
||||
func: Callable[Concatenate[AzureStorageBackupAgent, P], Coroutine[Any, Any, _R]],
|
||||
) -> Callable[Concatenate[AzureStorageBackupAgent, P], Coroutine[Any, Any, _R]]:
|
||||
"""Handle backup errors."""
|
||||
|
||||
@wraps(func)
|
||||
async def wrapper(
|
||||
self: AzureStorageBackupAgent, *args: P.args, **kwargs: P.kwargs
|
||||
) -> _R:
|
||||
try:
|
||||
return await func(self, *args, **kwargs)
|
||||
except HttpResponseError as err:
|
||||
_LOGGER.debug(
|
||||
"Error during backup in %s: Status %s, message %s",
|
||||
func.__name__,
|
||||
err.status_code,
|
||||
err.message,
|
||||
exc_info=True,
|
||||
)
|
||||
raise BackupAgentError(
|
||||
f"Error during backup operation in {func.__name__}:"
|
||||
f" Status {err.status_code}, message: {err.message}"
|
||||
) from err
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class AzureStorageBackupAgent(BackupAgent):
|
||||
"""Azure storage backup agent."""
|
||||
|
||||
domain = DOMAIN
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: AzureStorageConfigEntry) -> None:
|
||||
"""Initialize the Azure storage backup agent."""
|
||||
super().__init__()
|
||||
self._client = entry.runtime_data
|
||||
self.name = entry.title
|
||||
self.unique_id = entry.entry_id
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_download_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file."""
|
||||
blob = await self._find_blob_by_backup_id(backup_id)
|
||||
if blob is None:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
download_stream = await self._client.download_blob(blob.name)
|
||||
return download_stream.chunks()
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
backup: AgentBackup,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
|
||||
metadata = {
|
||||
"metadata_version": METADATA_VERSION,
|
||||
"backup_id": backup.backup_id,
|
||||
"backup_metadata": json.dumps(backup.as_dict()),
|
||||
}
|
||||
|
||||
await self._client.upload_blob(
|
||||
name=suggested_filename(backup),
|
||||
metadata=metadata,
|
||||
data=await open_stream(),
|
||||
length=backup.size,
|
||||
)
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_delete_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Delete a backup file."""
|
||||
blob = await self._find_blob_by_backup_id(backup_id)
|
||||
if blob is None:
|
||||
return
|
||||
await self._client.delete_blob(blob.name)
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
backups: list[AgentBackup] = []
|
||||
async for blob in self._client.list_blobs(include="metadata"):
|
||||
metadata = blob.metadata
|
||||
|
||||
if metadata.get("metadata_version") == METADATA_VERSION:
|
||||
backups.append(
|
||||
AgentBackup.from_dict(json.loads(metadata["backup_metadata"]))
|
||||
)
|
||||
|
||||
return backups
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_get_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
blob = await self._find_blob_by_backup_id(backup_id)
|
||||
if blob is None:
|
||||
return None
|
||||
|
||||
return AgentBackup.from_dict(json.loads(blob.metadata["backup_metadata"]))
|
||||
|
||||
async def _find_blob_by_backup_id(self, backup_id: str) -> BlobProperties | None:
|
||||
"""Find a blob by backup id."""
|
||||
async for blob in self._client.list_blobs(include="metadata"):
|
||||
if (
|
||||
backup_id == blob.metadata.get("backup_id", "")
|
||||
and blob.metadata.get("metadata_version") == METADATA_VERSION
|
||||
):
|
||||
return blob
|
||||
return None
|
||||
72
homeassistant/components/azure_storage/config_flow.py
Normal file
72
homeassistant/components/azure_storage/config_flow.py
Normal file
@@ -0,0 +1,72 @@
|
||||
"""Config flow for Azure Storage integration."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from azure.core.exceptions import ClientAuthenticationError, ResourceNotFoundError
|
||||
from azure.core.pipeline.transport._aiohttp import (
|
||||
AioHttpTransport,
|
||||
) # need to import from private file, as it is not properly imported in the init
|
||||
from azure.storage.blob.aio import ContainerClient
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import (
|
||||
CONF_ACCOUNT_NAME,
|
||||
CONF_CONTAINER_NAME,
|
||||
CONF_STORAGE_ACCOUNT_KEY,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for azure storage."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""User step for Azure Storage."""
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match(
|
||||
{CONF_ACCOUNT_NAME: user_input[CONF_ACCOUNT_NAME]}
|
||||
)
|
||||
container_client = ContainerClient(
|
||||
account_url=f"https://{user_input[CONF_ACCOUNT_NAME]}.blob.core.windows.net/",
|
||||
container_name=user_input[CONF_CONTAINER_NAME],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
)
|
||||
try:
|
||||
await container_client.exists()
|
||||
except ResourceNotFoundError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except ClientAuthenticationError:
|
||||
errors[CONF_STORAGE_ACCOUNT_KEY] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown exception occurred")
|
||||
errors["base"] = "unknown"
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=f"{user_input[CONF_ACCOUNT_NAME]}/{user_input[CONF_CONTAINER_NAME]}",
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ACCOUNT_NAME): str,
|
||||
vol.Required(
|
||||
CONF_CONTAINER_NAME, default="home-assistant-backups"
|
||||
): str,
|
||||
vol.Required(CONF_STORAGE_ACCOUNT_KEY): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
16
homeassistant/components/azure_storage/const.py
Normal file
16
homeassistant/components/azure_storage/const.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""Constants for the Azure Storage integration."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
DOMAIN: Final = "azure_storage"
|
||||
|
||||
CONF_STORAGE_ACCOUNT_KEY: Final = "storage_account_key"
|
||||
CONF_ACCOUNT_NAME: Final = "account_name"
|
||||
CONF_CONTAINER_NAME: Final = "container_name"
|
||||
|
||||
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
|
||||
f"{DOMAIN}.backup_agent_listeners"
|
||||
)
|
||||
12
homeassistant/components/azure_storage/manifest.json
Normal file
12
homeassistant/components/azure_storage/manifest.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"domain": "azure_storage",
|
||||
"name": "Azure Storage",
|
||||
"codeowners": ["@zweckj"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/azure_storage",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["azure-storage-blob"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["azure-storage-blob==12.24.0"]
|
||||
}
|
||||
133
homeassistant/components/azure_storage/quality_scale.yaml
Normal file
133
homeassistant/components/azure_storage/quality_scale.yaml
Normal file
@@ -0,0 +1,133 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not poll.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have any custom actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities of this integration does not explicitly subscribe to events.
|
||||
entity-unique-id:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
has-entity-name:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have any configuration parameters.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have platforms.
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration connects to a single service.
|
||||
diagnostics:
|
||||
status: exempt
|
||||
comment: |
|
||||
There is no data to diagnose.
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service and does not support discovery.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service and does not support discovery.
|
||||
docs-data-update:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not poll or push.
|
||||
docs-examples:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration only serves backup.
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service.
|
||||
docs-supported-functions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration connects to a single service.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
entity-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: done
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration connects to a single service.
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
||||
48
homeassistant/components/azure_storage/strings.json
Normal file
48
homeassistant/components/azure_storage/strings.json
Normal file
@@ -0,0 +1,48 @@
|
||||
{
|
||||
"config": {
|
||||
"error": {
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"storage_account_key": "Storage account key",
|
||||
"account_name": "Account name",
|
||||
"container_name": "Container name"
|
||||
},
|
||||
"data_description": {
|
||||
"storage_account_key": "Storage account access key used for authorization",
|
||||
"account_name": "Name of the storage account",
|
||||
"container_name": "Name of the storage container to be used (will be created if it does not exist)"
|
||||
},
|
||||
"description": "Set up an Azure (Blob) storage account to be used for backups.",
|
||||
"title": "Add Azure storage account"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"container_not_found": {
|
||||
"title": "Storage container not found",
|
||||
"description": "The storage container {container_name} has not been found in the storage account. Please re-create it manually, then fix this issue."
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"account_not_found": {
|
||||
"message": "Storage account {account_name} not found"
|
||||
},
|
||||
"cannot_connect": {
|
||||
"message": "Can not connect to storage account {account_name}"
|
||||
},
|
||||
"invalid_auth": {
|
||||
"message": "Authentication failed for storage account {account_name}"
|
||||
},
|
||||
"container_not_found": {
|
||||
"message": "Storage container {container_name} not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
"""The Backup integration."""
|
||||
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.backup import DATA_BACKUP
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -32,6 +32,7 @@ from .manager import (
|
||||
IdleEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
ManagerStateEvent,
|
||||
NewBackup,
|
||||
RestoreBackupEvent,
|
||||
RestoreBackupStage,
|
||||
@@ -63,12 +64,12 @@ __all__ = [
|
||||
"IncorrectPasswordError",
|
||||
"LocalBackupAgent",
|
||||
"ManagerBackup",
|
||||
"ManagerStateEvent",
|
||||
"NewBackup",
|
||||
"RestoreBackupEvent",
|
||||
"RestoreBackupStage",
|
||||
"RestoreBackupState",
|
||||
"WrittenBackup",
|
||||
"async_get_manager",
|
||||
"suggested_filename",
|
||||
"suggested_filename_from_name_date",
|
||||
]
|
||||
@@ -91,7 +92,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
backup_manager = BackupManager(hass, reader_writer)
|
||||
hass.data[DATA_MANAGER] = backup_manager
|
||||
await backup_manager.async_setup()
|
||||
try:
|
||||
await backup_manager.async_setup()
|
||||
except Exception as err:
|
||||
hass.data[DATA_BACKUP].manager_ready.set_exception(err)
|
||||
raise
|
||||
else:
|
||||
hass.data[DATA_BACKUP].manager_ready.set_result(None)
|
||||
|
||||
async_register_websocket_handlers(hass, with_hassio)
|
||||
|
||||
@@ -122,15 +129,3 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async_register_http_views(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@callback
|
||||
def async_get_manager(hass: HomeAssistant) -> BackupManager:
|
||||
"""Get the backup manager instance.
|
||||
|
||||
Raises HomeAssistantError if the backup integration is not available.
|
||||
"""
|
||||
if DATA_MANAGER not in hass.data:
|
||||
raise HomeAssistantError("Backup integration is not available")
|
||||
|
||||
return hass.data[DATA_MANAGER]
|
||||
|
||||
38
homeassistant/components/backup/basic_websocket.py
Normal file
38
homeassistant/components/backup/basic_websocket.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Websocket commands for the Backup integration."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.backup import async_subscribe_events
|
||||
|
||||
from .const import DATA_MANAGER
|
||||
from .manager import ManagerStateEvent
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_websocket_handlers(hass: HomeAssistant) -> None:
|
||||
"""Register websocket commands."""
|
||||
websocket_api.async_register_command(hass, handle_subscribe_events)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"})
|
||||
@websocket_api.async_response
|
||||
async def handle_subscribe_events(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to backup events."""
|
||||
|
||||
def on_event(event: ManagerStateEvent) -> None:
|
||||
connection.send_message(websocket_api.event_message(msg["id"], event))
|
||||
|
||||
if DATA_MANAGER in hass.data:
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
on_event(manager.last_event)
|
||||
connection.subscriptions[msg["id"]] = async_subscribe_events(hass, on_event)
|
||||
connection.send_result(msg["id"])
|
||||
@@ -12,16 +12,19 @@ from typing import TYPE_CHECKING, Self, TypedDict
|
||||
from cronsim import CronSim
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.event import async_call_later, async_track_point_in_time
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import LOGGER
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .models import BackupManagerError, Folder
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .manager import BackupManager, ManagerBackup
|
||||
|
||||
AUTOMATIC_BACKUP_AGENTS_UNAVAILABLE_ISSUE_ID = "automatic_backup_agents_unavailable"
|
||||
|
||||
CRON_PATTERN_DAILY = "{m} {h} * * *"
|
||||
CRON_PATTERN_WEEKLY = "{m} {h} * * {d}"
|
||||
|
||||
@@ -39,6 +42,7 @@ class StoredBackupConfig(TypedDict):
|
||||
"""Represent the stored backup config."""
|
||||
|
||||
agents: dict[str, StoredAgentConfig]
|
||||
automatic_backups_configured: bool
|
||||
create_backup: StoredCreateBackupConfig
|
||||
last_attempted_automatic_backup: str | None
|
||||
last_completed_automatic_backup: str | None
|
||||
@@ -51,6 +55,7 @@ class BackupConfigData:
|
||||
"""Represent loaded backup config data."""
|
||||
|
||||
agents: dict[str, AgentConfig]
|
||||
automatic_backups_configured: bool # only used by frontend
|
||||
create_backup: CreateBackupConfig
|
||||
last_attempted_automatic_backup: datetime | None = None
|
||||
last_completed_automatic_backup: datetime | None = None
|
||||
@@ -88,6 +93,7 @@ class BackupConfigData:
|
||||
agent_id: AgentConfig(protected=agent_data["protected"])
|
||||
for agent_id, agent_data in data["agents"].items()
|
||||
},
|
||||
automatic_backups_configured=data["automatic_backups_configured"],
|
||||
create_backup=CreateBackupConfig(
|
||||
agent_ids=data["create_backup"]["agent_ids"],
|
||||
include_addons=data["create_backup"]["include_addons"],
|
||||
@@ -127,6 +133,7 @@ class BackupConfigData:
|
||||
agents={
|
||||
agent_id: agent.to_dict() for agent_id, agent in self.agents.items()
|
||||
},
|
||||
automatic_backups_configured=self.automatic_backups_configured,
|
||||
create_backup=self.create_backup.to_dict(),
|
||||
last_attempted_automatic_backup=last_attempted,
|
||||
last_completed_automatic_backup=last_completed,
|
||||
@@ -142,10 +149,12 @@ class BackupConfig:
|
||||
"""Initialize backup config."""
|
||||
self.data = BackupConfigData(
|
||||
agents={},
|
||||
automatic_backups_configured=False,
|
||||
create_backup=CreateBackupConfig(),
|
||||
retention=RetentionConfig(),
|
||||
schedule=BackupSchedule(),
|
||||
)
|
||||
self._hass = hass
|
||||
self._manager = manager
|
||||
|
||||
def load(self, stored_config: StoredBackupConfig) -> None:
|
||||
@@ -159,6 +168,7 @@ class BackupConfig:
|
||||
self,
|
||||
*,
|
||||
agents: dict[str, AgentParametersDict] | UndefinedType = UNDEFINED,
|
||||
automatic_backups_configured: bool | UndefinedType = UNDEFINED,
|
||||
create_backup: CreateBackupParametersDict | UndefinedType = UNDEFINED,
|
||||
retention: RetentionParametersDict | UndefinedType = UNDEFINED,
|
||||
schedule: ScheduleParametersDict | UndefinedType = UNDEFINED,
|
||||
@@ -172,8 +182,12 @@ class BackupConfig:
|
||||
self.data.agents[agent_id] = replace(
|
||||
self.data.agents[agent_id], **agent_config
|
||||
)
|
||||
if automatic_backups_configured is not UNDEFINED:
|
||||
self.data.automatic_backups_configured = automatic_backups_configured
|
||||
if create_backup is not UNDEFINED:
|
||||
self.data.create_backup = replace(self.data.create_backup, **create_backup)
|
||||
if "agent_ids" in create_backup:
|
||||
check_unavailable_agents(self._hass, self._manager)
|
||||
if retention is not UNDEFINED:
|
||||
new_retention = RetentionConfig(**retention)
|
||||
if new_retention != self.data.retention:
|
||||
@@ -554,3 +568,46 @@ async def delete_backups_exceeding_configured_count(manager: BackupManager) -> N
|
||||
await manager.async_delete_filtered_backups(
|
||||
include_filter=_automatic_backups_filter, delete_filter=_delete_filter
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def check_unavailable_agents(hass: HomeAssistant, manager: BackupManager) -> None:
|
||||
"""Check for unavailable agents."""
|
||||
if missing_agent_ids := set(manager.config.data.create_backup.agent_ids) - set(
|
||||
manager.backup_agents
|
||||
):
|
||||
LOGGER.debug(
|
||||
"Agents %s are configured for automatic backup but are unavailable",
|
||||
missing_agent_ids,
|
||||
)
|
||||
|
||||
# Remove issues for unavailable agents that are not unavailable anymore.
|
||||
issue_registry = ir.async_get(hass)
|
||||
existing_missing_agent_issue_ids = {
|
||||
issue_id
|
||||
for domain, issue_id in issue_registry.issues
|
||||
if domain == DOMAIN
|
||||
and issue_id.startswith(AUTOMATIC_BACKUP_AGENTS_UNAVAILABLE_ISSUE_ID)
|
||||
}
|
||||
current_missing_agent_issue_ids = {
|
||||
f"{AUTOMATIC_BACKUP_AGENTS_UNAVAILABLE_ISSUE_ID}_{agent_id}": agent_id
|
||||
for agent_id in missing_agent_ids
|
||||
}
|
||||
for issue_id in existing_missing_agent_issue_ids - set(
|
||||
current_missing_agent_issue_ids
|
||||
):
|
||||
ir.async_delete_issue(hass, DOMAIN, issue_id)
|
||||
for issue_id, agent_id in current_missing_agent_issue_ids.items():
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
issue_id,
|
||||
is_fixable=False,
|
||||
learn_more_url="homeassistant://config/backup",
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="automatic_backup_agents_unavailable",
|
||||
translation_placeholders={
|
||||
"agent_id": agent_id,
|
||||
"backup_settings": "/config/backup/settings",
|
||||
},
|
||||
)
|
||||
|
||||
@@ -14,6 +14,7 @@ from itertools import chain
|
||||
import json
|
||||
from pathlib import Path, PurePath
|
||||
import shutil
|
||||
import sys
|
||||
import tarfile
|
||||
import time
|
||||
from typing import IO, TYPE_CHECKING, Any, Protocol, TypedDict, cast
|
||||
@@ -32,7 +33,9 @@ from homeassistant.helpers import (
|
||||
instance_id,
|
||||
integration_platform,
|
||||
issue_registry as ir,
|
||||
start,
|
||||
)
|
||||
from homeassistant.helpers.backup import DATA_BACKUP
|
||||
from homeassistant.helpers.json import json_bytes
|
||||
from homeassistant.util import dt as dt_util, json as json_util
|
||||
|
||||
@@ -46,6 +49,7 @@ from .agent import (
|
||||
from .config import (
|
||||
BackupConfig,
|
||||
CreateBackupParametersDict,
|
||||
check_unavailable_agents,
|
||||
delete_backups_exceeding_configured_count,
|
||||
)
|
||||
from .const import (
|
||||
@@ -305,6 +309,12 @@ class DecryptOnDowloadNotSupported(BackupManagerError):
|
||||
_message = "On-the-fly decryption is not supported for this backup."
|
||||
|
||||
|
||||
class BackupManagerExceptionGroup(BackupManagerError, ExceptionGroup):
|
||||
"""Raised when multiple exceptions occur."""
|
||||
|
||||
error_code = "multiple_errors"
|
||||
|
||||
|
||||
class BackupManager:
|
||||
"""Define the format that backup managers can have."""
|
||||
|
||||
@@ -332,7 +342,9 @@ class BackupManager:
|
||||
# Latest backup event and backup event subscribers
|
||||
self.last_event: ManagerStateEvent = IdleEvent()
|
||||
self.last_non_idle_event: ManagerStateEvent | None = None
|
||||
self._backup_event_subscriptions: list[Callable[[ManagerStateEvent], None]] = []
|
||||
self._backup_event_subscriptions = hass.data[
|
||||
DATA_BACKUP
|
||||
].backup_event_subscriptions
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the backup manager."""
|
||||
@@ -414,6 +426,13 @@ class BackupManager:
|
||||
}
|
||||
)
|
||||
|
||||
@callback
|
||||
def check_unavailable_agents_after_start(hass: HomeAssistant) -> None:
|
||||
"""Check unavailable agents after start."""
|
||||
check_unavailable_agents(hass, self)
|
||||
|
||||
start.async_at_started(self.hass, check_unavailable_agents_after_start)
|
||||
|
||||
async def _add_platform(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
@@ -1279,19 +1298,6 @@ class BackupManager:
|
||||
for subscription in self._backup_event_subscriptions:
|
||||
subscription(event)
|
||||
|
||||
@callback
|
||||
def async_subscribe_events(
|
||||
self,
|
||||
on_event: Callable[[ManagerStateEvent], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe events."""
|
||||
|
||||
def remove_subscription() -> None:
|
||||
self._backup_event_subscriptions.remove(on_event)
|
||||
|
||||
self._backup_event_subscriptions.append(on_event)
|
||||
return remove_subscription
|
||||
|
||||
def _update_issue_backup_failed(self) -> None:
|
||||
"""Update issue registry when a backup fails."""
|
||||
ir.async_create_issue(
|
||||
@@ -1606,10 +1612,24 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
)
|
||||
finally:
|
||||
# Inform integrations the backup is done
|
||||
# If there's an unhandled exception, we keep it so we can rethrow it in case
|
||||
# the post backup actions also fail.
|
||||
unhandled_exc = sys.exception()
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
except Exception as err:
|
||||
if not unhandled_exc:
|
||||
raise
|
||||
# If there's an unhandled exception, we wrap both that and the exception
|
||||
# from the post backup actions in an ExceptionGroup so the caller is
|
||||
# aware of both exceptions.
|
||||
raise BackupManagerExceptionGroup(
|
||||
f"Multiple errors when creating backup: {unhandled_exc}, {err}",
|
||||
[unhandled_exc, err],
|
||||
) from None
|
||||
|
||||
def _mkdir_and_generate_backup_contents(
|
||||
self,
|
||||
@@ -1621,7 +1641,13 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
"""Generate backup contents and return the size."""
|
||||
if not tar_file_path:
|
||||
tar_file_path = self.temp_backup_dir / f"{backup_data['slug']}.tar"
|
||||
make_backup_dir(tar_file_path.parent)
|
||||
try:
|
||||
make_backup_dir(tar_file_path.parent)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(
|
||||
f"Failed to create dir {tar_file_path.parent}: "
|
||||
f"{err} ({err.__class__.__name__})"
|
||||
) from err
|
||||
|
||||
excludes = EXCLUDE_FROM_BACKUP
|
||||
if not database_included:
|
||||
@@ -1659,7 +1685,14 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
file_filter=is_excluded_by_filter,
|
||||
arcname="data",
|
||||
)
|
||||
return (tar_file_path, tar_file_path.stat().st_size)
|
||||
try:
|
||||
stat_result = tar_file_path.stat()
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(
|
||||
f"Error getting size of {tar_file_path}: "
|
||||
f"{err} ({err.__class__.__name__})"
|
||||
) from err
|
||||
return (tar_file_path, stat_result.st_size)
|
||||
|
||||
async def async_receive_backup(
|
||||
self,
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["cronsim==2.6", "securetar==2025.1.4"]
|
||||
"requirements": ["cronsim==2.6", "securetar==2025.2.1"]
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ if TYPE_CHECKING:
|
||||
STORE_DELAY_SAVE = 30
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_VERSION_MINOR = 4
|
||||
STORAGE_VERSION_MINOR = 5
|
||||
|
||||
|
||||
class StoredBackupData(TypedDict):
|
||||
@@ -67,6 +67,11 @@ class _BackupStore(Store[StoredBackupData]):
|
||||
data["config"]["retention"]["copies"] = None
|
||||
if data["config"]["retention"]["days"] == 0:
|
||||
data["config"]["retention"]["days"] = None
|
||||
if old_minor_version < 5:
|
||||
# Version 1.5 adds automatic_backups_configured
|
||||
data["config"]["automatic_backups_configured"] = (
|
||||
data["config"]["create_backup"]["password"] is not None
|
||||
)
|
||||
|
||||
# Note: We allow reading data with major version 2.
|
||||
# Reject if major version is higher than 2.
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
{
|
||||
"issues": {
|
||||
"automatic_backup_agents_unavailable": {
|
||||
"title": "The backup location {agent_id} is unavailable",
|
||||
"description": "The backup location `{agent_id}` is unavailable but is still configured for automatic backups.\n\nPlease visit the [automatic backup configuration page]({backup_settings}) to review and update your backup locations. Backups will not be uploaded to selected locations that are unavailable."
|
||||
},
|
||||
"automatic_backup_failed_create": {
|
||||
"title": "Automatic backup could not be created",
|
||||
"description": "The automatic backup could not be created. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
|
||||
@@ -10,11 +10,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .config import Day, ScheduleRecurrence
|
||||
from .const import DATA_MANAGER, LOGGER
|
||||
from .manager import (
|
||||
DecryptOnDowloadNotSupported,
|
||||
IncorrectPasswordError,
|
||||
ManagerStateEvent,
|
||||
)
|
||||
from .manager import DecryptOnDowloadNotSupported, IncorrectPasswordError
|
||||
from .models import BackupNotFound, Folder
|
||||
|
||||
|
||||
@@ -34,7 +30,6 @@ def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) ->
|
||||
websocket_api.async_register_command(hass, handle_create_with_automatic_settings)
|
||||
websocket_api.async_register_command(hass, handle_delete)
|
||||
websocket_api.async_register_command(hass, handle_restore)
|
||||
websocket_api.async_register_command(hass, handle_subscribe_events)
|
||||
|
||||
websocket_api.async_register_command(hass, handle_config_info)
|
||||
websocket_api.async_register_command(hass, handle_config_update)
|
||||
@@ -352,6 +347,7 @@ async def handle_config_info(
|
||||
{
|
||||
vol.Required("type"): "backup/config/update",
|
||||
vol.Optional("agents"): vol.Schema({str: {"protected": bool}}),
|
||||
vol.Optional("automatic_backups_configured"): bool,
|
||||
vol.Optional("create_backup"): vol.Schema(
|
||||
{
|
||||
vol.Optional("agent_ids"): vol.All([str], vol.Unique()),
|
||||
@@ -400,22 +396,3 @@ def handle_config_update(
|
||||
changes.pop("type")
|
||||
manager.config.update(**changes)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"})
|
||||
@websocket_api.async_response
|
||||
async def handle_subscribe_events(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to backup events."""
|
||||
|
||||
def on_event(event: ManagerStateEvent) -> None:
|
||||
connection.send_message(websocket_api.event_message(msg["id"], event))
|
||||
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
on_event(manager.last_event)
|
||||
connection.subscriptions[msg["id"]] = manager.async_subscribe_events(on_event)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
"name": "Activity",
|
||||
"state": {
|
||||
"available": "Available",
|
||||
"charging": "Charging",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"unavailable": "Unavailable",
|
||||
"error": "Error",
|
||||
"offline": "Offline"
|
||||
|
||||
@@ -21,6 +21,6 @@
|
||||
"bluetooth-auto-recovery==1.4.4",
|
||||
"bluetooth-data-tools==1.23.4",
|
||||
"dbus-fast==2.33.0",
|
||||
"habluetooth==3.22.1"
|
||||
"habluetooth==3.24.1"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -138,7 +138,7 @@
|
||||
"name": "Charging status",
|
||||
"state": {
|
||||
"default": "Default",
|
||||
"charging": "Charging",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"error": "Error",
|
||||
"complete": "Complete",
|
||||
"fully_charged": "Fully charged",
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
"""Virtual integration: Burbank Water and Power (BWP)."""
|
||||
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "burbank_water_and_power",
|
||||
"name": "Burbank Water and Power (BWP)",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "opower"
|
||||
}
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiostreammagic"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiostreammagic==2.10.0"],
|
||||
"requirements": ["aiostreammagic==2.11.0"],
|
||||
"zeroconf": ["_stream-magic._tcp.local.", "_smoip._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -104,7 +104,7 @@ class CiscoDeviceScanner(DeviceScanner):
|
||||
"""Open connection to the router and get arp entries."""
|
||||
|
||||
try:
|
||||
cisco_ssh: pxssh.pxssh[str] = pxssh.pxssh(encoding="uft-8")
|
||||
cisco_ssh: pxssh.pxssh[str] = pxssh.pxssh(encoding="utf-8")
|
||||
cisco_ssh.login(
|
||||
self.host,
|
||||
self.username,
|
||||
|
||||
@@ -68,7 +68,6 @@ from .const import ( # noqa: F401
|
||||
FAN_ON,
|
||||
FAN_TOP,
|
||||
HVAC_MODES,
|
||||
INTENT_GET_TEMPERATURE,
|
||||
INTENT_SET_TEMPERATURE,
|
||||
PRESET_ACTIVITY,
|
||||
PRESET_AWAY,
|
||||
|
||||
@@ -126,7 +126,6 @@ DEFAULT_MAX_HUMIDITY = 99
|
||||
|
||||
DOMAIN = "climate"
|
||||
|
||||
INTENT_GET_TEMPERATURE = "HassClimateGetTemperature"
|
||||
INTENT_SET_TEMPERATURE = "HassClimateSetTemperature"
|
||||
|
||||
SERVICE_SET_AUX_HEAT = "set_aux_heat"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Intents for the client integration."""
|
||||
"""Intents for the climate integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -11,7 +11,6 @@ from homeassistant.helpers import config_validation as cv, intent
|
||||
from . import (
|
||||
ATTR_TEMPERATURE,
|
||||
DOMAIN,
|
||||
INTENT_GET_TEMPERATURE,
|
||||
INTENT_SET_TEMPERATURE,
|
||||
SERVICE_SET_TEMPERATURE,
|
||||
ClimateEntityFeature,
|
||||
@@ -20,49 +19,9 @@ from . import (
|
||||
|
||||
async def async_setup_intents(hass: HomeAssistant) -> None:
|
||||
"""Set up the climate intents."""
|
||||
intent.async_register(hass, GetTemperatureIntent())
|
||||
intent.async_register(hass, SetTemperatureIntent())
|
||||
|
||||
|
||||
class GetTemperatureIntent(intent.IntentHandler):
|
||||
"""Handle GetTemperature intents."""
|
||||
|
||||
intent_type = INTENT_GET_TEMPERATURE
|
||||
description = "Gets the current temperature of a climate device or entity"
|
||||
slot_schema = {
|
||||
vol.Optional("area"): intent.non_empty_string,
|
||||
vol.Optional("name"): intent.non_empty_string,
|
||||
}
|
||||
platforms = {DOMAIN}
|
||||
|
||||
async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse:
|
||||
"""Handle the intent."""
|
||||
hass = intent_obj.hass
|
||||
slots = self.async_validate_slots(intent_obj.slots)
|
||||
|
||||
name: str | None = None
|
||||
if "name" in slots:
|
||||
name = slots["name"]["value"]
|
||||
|
||||
area: str | None = None
|
||||
if "area" in slots:
|
||||
area = slots["area"]["value"]
|
||||
|
||||
match_constraints = intent.MatchTargetsConstraints(
|
||||
name=name, area_name=area, domains=[DOMAIN], assistant=intent_obj.assistant
|
||||
)
|
||||
match_result = intent.async_match_targets(hass, match_constraints)
|
||||
if not match_result.is_match:
|
||||
raise intent.MatchFailedError(
|
||||
result=match_result, constraints=match_constraints
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.response_type = intent.IntentResponseType.QUERY_ANSWER
|
||||
response.async_set_states(matched_states=match_result.states)
|
||||
return response
|
||||
|
||||
|
||||
class SetTemperatureIntent(intent.IntentHandler):
|
||||
"""Handle SetTemperature intents."""
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.2.5"]
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.2.26"]
|
||||
}
|
||||
|
||||
@@ -30,10 +30,15 @@ async def async_setup_entry(
|
||||
async_add_entities(
|
||||
[
|
||||
DemoWaterHeater(
|
||||
"Demo Water Heater", 119, UnitOfTemperature.FAHRENHEIT, False, "eco"
|
||||
"Demo Water Heater", 119, UnitOfTemperature.FAHRENHEIT, False, "eco", 1
|
||||
),
|
||||
DemoWaterHeater(
|
||||
"Demo Water Heater Celsius", 45, UnitOfTemperature.CELSIUS, True, "eco"
|
||||
"Demo Water Heater Celsius",
|
||||
45,
|
||||
UnitOfTemperature.CELSIUS,
|
||||
True,
|
||||
"eco",
|
||||
1,
|
||||
),
|
||||
]
|
||||
)
|
||||
@@ -52,6 +57,7 @@ class DemoWaterHeater(WaterHeaterEntity):
|
||||
unit_of_measurement: str,
|
||||
away: bool,
|
||||
current_operation: str,
|
||||
target_temperature_step: float,
|
||||
) -> None:
|
||||
"""Initialize the water_heater device."""
|
||||
self._attr_name = name
|
||||
@@ -74,6 +80,7 @@ class DemoWaterHeater(WaterHeaterEntity):
|
||||
"gas",
|
||||
"off",
|
||||
]
|
||||
self._attr_target_temperature_step = target_temperature_step
|
||||
|
||||
def set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperatures."""
|
||||
|
||||
@@ -24,7 +24,14 @@ from homeassistant.const import (
|
||||
STATE_UNKNOWN,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import Event, EventStateChangedData, HomeAssistant, callback
|
||||
from homeassistant.core import (
|
||||
Event,
|
||||
EventStateChangedData,
|
||||
EventStateReportedData,
|
||||
HomeAssistant,
|
||||
State,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_entity
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -32,7 +39,10 @@ from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.event import async_track_state_change_event
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_state_change_event,
|
||||
async_track_state_report_event,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import (
|
||||
@@ -200,13 +210,33 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
_LOGGER.warning("Could not restore last state: %s", err)
|
||||
|
||||
@callback
|
||||
def calc_derivative(event: Event[EventStateChangedData]) -> None:
|
||||
def on_state_reported(event: Event[EventStateReportedData]) -> None:
|
||||
"""Handle constant sensor state."""
|
||||
if self._attr_native_value == Decimal(0):
|
||||
# If the derivative is zero, and the source sensor hasn't
|
||||
# changed state, then we know it will still be zero.
|
||||
return
|
||||
new_state = event.data["new_state"]
|
||||
if new_state is not None:
|
||||
calc_derivative(
|
||||
new_state, new_state.state, event.data["old_last_reported"]
|
||||
)
|
||||
|
||||
@callback
|
||||
def on_state_changed(event: Event[EventStateChangedData]) -> None:
|
||||
"""Handle changed sensor state."""
|
||||
new_state = event.data["new_state"]
|
||||
old_state = event.data["old_state"]
|
||||
if new_state is not None and old_state is not None:
|
||||
calc_derivative(new_state, old_state.state, old_state.last_reported)
|
||||
|
||||
def calc_derivative(
|
||||
new_state: State, old_value: str, old_last_reported: datetime
|
||||
) -> None:
|
||||
"""Handle the sensor state changes."""
|
||||
if (
|
||||
(old_state := event.data["old_state"]) is None
|
||||
or old_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE)
|
||||
or (new_state := event.data["new_state"]) is None
|
||||
or new_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE)
|
||||
if old_value in (STATE_UNKNOWN, STATE_UNAVAILABLE) or new_state.state in (
|
||||
STATE_UNKNOWN,
|
||||
STATE_UNAVAILABLE,
|
||||
):
|
||||
return
|
||||
|
||||
@@ -220,15 +250,15 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
self._state_list = [
|
||||
(time_start, time_end, state)
|
||||
for time_start, time_end, state in self._state_list
|
||||
if (new_state.last_updated - time_end).total_seconds()
|
||||
if (new_state.last_reported - time_end).total_seconds()
|
||||
< self._time_window
|
||||
]
|
||||
|
||||
try:
|
||||
elapsed_time = (
|
||||
new_state.last_updated - old_state.last_updated
|
||||
new_state.last_reported - old_last_reported
|
||||
).total_seconds()
|
||||
delta_value = Decimal(new_state.state) - Decimal(old_state.state)
|
||||
delta_value = Decimal(new_state.state) - Decimal(old_value)
|
||||
new_derivative = (
|
||||
delta_value
|
||||
/ Decimal(elapsed_time)
|
||||
@@ -240,7 +270,7 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
_LOGGER.warning("While calculating derivative: %s", err)
|
||||
except DecimalException as err:
|
||||
_LOGGER.warning(
|
||||
"Invalid state (%s > %s): %s", old_state.state, new_state.state, err
|
||||
"Invalid state (%s > %s): %s", old_value, new_state.state, err
|
||||
)
|
||||
except AssertionError as err:
|
||||
_LOGGER.error("Could not calculate derivative: %s", err)
|
||||
@@ -257,7 +287,7 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
|
||||
# add latest derivative to the window list
|
||||
self._state_list.append(
|
||||
(old_state.last_updated, new_state.last_updated, new_derivative)
|
||||
(old_last_reported, new_state.last_reported, new_derivative)
|
||||
)
|
||||
|
||||
def calculate_weight(
|
||||
@@ -277,13 +307,19 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
else:
|
||||
derivative = Decimal("0.00")
|
||||
for start, end, value in self._state_list:
|
||||
weight = calculate_weight(start, end, new_state.last_updated)
|
||||
weight = calculate_weight(start, end, new_state.last_reported)
|
||||
derivative = derivative + (value * Decimal(weight))
|
||||
self._attr_native_value = round(derivative, self._round_digits)
|
||||
self.async_write_ha_state()
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_change_event(
|
||||
self.hass, self._sensor_source_id, calc_derivative
|
||||
self.hass, self._sensor_source_id, on_state_changed
|
||||
)
|
||||
)
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_report_event(
|
||||
self.hass, self._sensor_source_id, on_state_reported
|
||||
)
|
||||
)
|
||||
|
||||
@@ -8,6 +8,7 @@ from devolo_plc_api.device_api import (
|
||||
WifiGuestAccessGet,
|
||||
)
|
||||
from devolo_plc_api.plcnet_api import DataRate, LogicalNetwork
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.const import ATTR_CONNECTIONS
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
@@ -43,7 +44,7 @@ class DevoloEntity(Entity):
|
||||
self.entry = entry
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=f"http://{self.device.ip}",
|
||||
configuration_url=URL.build(scheme="http", host=self.device.ip),
|
||||
identifiers={(DOMAIN, str(self.device.serial_number))},
|
||||
manufacturer="devolo",
|
||||
model=self.device.product,
|
||||
|
||||
@@ -14,8 +14,8 @@
|
||||
],
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"aiodhcpwatcher==1.1.0",
|
||||
"aiodiscover==2.6.0",
|
||||
"cached-ipaddress==0.8.0"
|
||||
"aiodhcpwatcher==1.1.1",
|
||||
"aiodiscover==2.6.1",
|
||||
"cached-ipaddress==0.9.2"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.1.0"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.3.1"]
|
||||
}
|
||||
|
||||
@@ -105,6 +105,7 @@ class ElkArea(ElkAttachedEntity, AlarmControlPanelEntity, RestoreEntity):
|
||||
AlarmControlPanelEntityFeature.ARM_HOME
|
||||
| AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
| AlarmControlPanelEntityFeature.ARM_NIGHT
|
||||
| AlarmControlPanelEntityFeature.ARM_VACATION
|
||||
)
|
||||
_element: Area
|
||||
|
||||
@@ -204,7 +205,7 @@ class ElkArea(ElkAttachedEntity, AlarmControlPanelEntity, RestoreEntity):
|
||||
ArmedStatus.ARMED_STAY_INSTANT: AlarmControlPanelState.ARMED_HOME,
|
||||
ArmedStatus.ARMED_TO_NIGHT: AlarmControlPanelState.ARMED_NIGHT,
|
||||
ArmedStatus.ARMED_TO_NIGHT_INSTANT: AlarmControlPanelState.ARMED_NIGHT,
|
||||
ArmedStatus.ARMED_TO_VACATION: AlarmControlPanelState.ARMED_AWAY,
|
||||
ArmedStatus.ARMED_TO_VACATION: AlarmControlPanelState.ARMED_VACATION,
|
||||
}
|
||||
|
||||
if self._element.alarm_state is None:
|
||||
|
||||
@@ -360,9 +360,9 @@
|
||||
"acb_battery_state": {
|
||||
"name": "Battery state",
|
||||
"state": {
|
||||
"discharging": "Discharging",
|
||||
"discharging": "[%key:common::state::discharging%]",
|
||||
"idle": "[%key:common::state::idle%]",
|
||||
"charging": "Charging",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"full": "Full"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env-canada==0.7.2"]
|
||||
"requirements": ["env-canada==0.8.0"]
|
||||
}
|
||||
|
||||
@@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.7.1"]
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.8.0"]
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
from math import isfinite
|
||||
from typing import Any, cast
|
||||
|
||||
from aioesphomeapi import (
|
||||
@@ -238,9 +239,13 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
@esphome_state_property
|
||||
def current_humidity(self) -> int | None:
|
||||
"""Return the current humidity."""
|
||||
if not self._static_info.supports_current_humidity:
|
||||
if (
|
||||
not self._static_info.supports_current_humidity
|
||||
or (val := self._state.current_humidity) is None
|
||||
or not isfinite(val)
|
||||
):
|
||||
return None
|
||||
return round(self._state.current_humidity)
|
||||
return round(val)
|
||||
|
||||
@property
|
||||
@esphome_float_state_property
|
||||
|
||||
@@ -41,6 +41,7 @@ from .const import (
|
||||
CONF_ALLOW_SERVICE_CALLS,
|
||||
CONF_DEVICE_NAME,
|
||||
CONF_NOISE_PSK,
|
||||
CONF_SUBSCRIBE_LOGS,
|
||||
DEFAULT_ALLOW_SERVICE_CALLS,
|
||||
DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS,
|
||||
DOMAIN,
|
||||
@@ -508,6 +509,10 @@ class OptionsFlowHandler(OptionsFlow):
|
||||
CONF_ALLOW_SERVICE_CALLS, DEFAULT_ALLOW_SERVICE_CALLS
|
||||
),
|
||||
): bool,
|
||||
vol.Required(
|
||||
CONF_SUBSCRIBE_LOGS,
|
||||
default=self.config_entry.options.get(CONF_SUBSCRIBE_LOGS, False),
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
return self.async_show_form(step_id="init", data_schema=data_schema)
|
||||
|
||||
@@ -5,6 +5,7 @@ from awesomeversion import AwesomeVersion
|
||||
DOMAIN = "esphome"
|
||||
|
||||
CONF_ALLOW_SERVICE_CALLS = "allow_service_calls"
|
||||
CONF_SUBSCRIBE_LOGS = "subscribe_logs"
|
||||
CONF_DEVICE_NAME = "device_name"
|
||||
CONF_NOISE_PSK = "noise_psk"
|
||||
|
||||
@@ -12,7 +13,7 @@ DEFAULT_ALLOW_SERVICE_CALLS = True
|
||||
DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS = False
|
||||
|
||||
|
||||
STABLE_BLE_VERSION_STR = "2023.8.0"
|
||||
STABLE_BLE_VERSION_STR = "2025.2.1"
|
||||
STABLE_BLE_VERSION = AwesomeVersion(STABLE_BLE_VERSION_STR)
|
||||
PROJECT_URLS = {
|
||||
"esphome.bluetooth-proxy": "https://esphome.github.io/bluetooth-proxies/",
|
||||
|
||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from functools import partial
|
||||
import logging
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Any, NamedTuple
|
||||
|
||||
from aioesphomeapi import (
|
||||
@@ -16,6 +17,7 @@ from aioesphomeapi import (
|
||||
HomeassistantServiceCall,
|
||||
InvalidAuthAPIError,
|
||||
InvalidEncryptionKeyAPIError,
|
||||
LogLevel,
|
||||
ReconnectLogic,
|
||||
RequiresEncryptionAPIError,
|
||||
UserService,
|
||||
@@ -33,6 +35,7 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
Event,
|
||||
EventStateChangedData,
|
||||
HomeAssistant,
|
||||
@@ -61,6 +64,7 @@ from .bluetooth import async_connect_scanner
|
||||
from .const import (
|
||||
CONF_ALLOW_SERVICE_CALLS,
|
||||
CONF_DEVICE_NAME,
|
||||
CONF_SUBSCRIBE_LOGS,
|
||||
DEFAULT_ALLOW_SERVICE_CALLS,
|
||||
DEFAULT_URL,
|
||||
DOMAIN,
|
||||
@@ -74,8 +78,38 @@ from .domain_data import DomainData
|
||||
# Import config flow so that it's added to the registry
|
||||
from .entry_data import ESPHomeConfigEntry, RuntimeEntryData
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from aioesphomeapi.api_pb2 import ( # type: ignore[attr-defined]
|
||||
SubscribeLogsResponse,
|
||||
)
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
LOG_LEVEL_TO_LOGGER = {
|
||||
LogLevel.LOG_LEVEL_NONE: logging.DEBUG,
|
||||
LogLevel.LOG_LEVEL_ERROR: logging.ERROR,
|
||||
LogLevel.LOG_LEVEL_WARN: logging.WARNING,
|
||||
LogLevel.LOG_LEVEL_INFO: logging.INFO,
|
||||
LogLevel.LOG_LEVEL_CONFIG: logging.INFO,
|
||||
LogLevel.LOG_LEVEL_DEBUG: logging.DEBUG,
|
||||
LogLevel.LOG_LEVEL_VERBOSE: logging.DEBUG,
|
||||
LogLevel.LOG_LEVEL_VERY_VERBOSE: logging.DEBUG,
|
||||
}
|
||||
LOGGER_TO_LOG_LEVEL = {
|
||||
logging.NOTSET: LogLevel.LOG_LEVEL_VERY_VERBOSE,
|
||||
logging.DEBUG: LogLevel.LOG_LEVEL_VERY_VERBOSE,
|
||||
logging.INFO: LogLevel.LOG_LEVEL_CONFIG,
|
||||
logging.WARNING: LogLevel.LOG_LEVEL_WARN,
|
||||
logging.ERROR: LogLevel.LOG_LEVEL_ERROR,
|
||||
logging.CRITICAL: LogLevel.LOG_LEVEL_ERROR,
|
||||
}
|
||||
# 7-bit and 8-bit C1 ANSI sequences
|
||||
# https://stackoverflow.com/questions/14693701/how-can-i-remove-the-ansi-escape-sequences-from-a-string-in-python
|
||||
ANSI_ESCAPE_78BIT = re.compile(
|
||||
rb"(?:\x1B[@-Z\\-_]|[\x80-\x9A\x9C-\x9F]|(?:\x1B\[|\x9B)[0-?]*[ -/]*[@-~])"
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_check_firmware_version(
|
||||
@@ -136,6 +170,8 @@ class ESPHomeManager:
|
||||
"""Class to manage an ESPHome connection."""
|
||||
|
||||
__slots__ = (
|
||||
"_cancel_subscribe_logs",
|
||||
"_log_level",
|
||||
"cli",
|
||||
"device_id",
|
||||
"domain_data",
|
||||
@@ -169,6 +205,8 @@ class ESPHomeManager:
|
||||
self.reconnect_logic: ReconnectLogic | None = None
|
||||
self.zeroconf_instance = zeroconf_instance
|
||||
self.entry_data = entry.runtime_data
|
||||
self._cancel_subscribe_logs: CALLBACK_TYPE | None = None
|
||||
self._log_level = LogLevel.LOG_LEVEL_NONE
|
||||
|
||||
async def on_stop(self, event: Event) -> None:
|
||||
"""Cleanup the socket client on HA close."""
|
||||
@@ -341,6 +379,34 @@ class ESPHomeManager:
|
||||
# Re-connection logic will trigger after this
|
||||
await self.cli.disconnect()
|
||||
|
||||
def _async_on_log(self, msg: SubscribeLogsResponse) -> None:
|
||||
"""Handle a log message from the API."""
|
||||
log: bytes = msg.message
|
||||
_LOGGER.log(
|
||||
LOG_LEVEL_TO_LOGGER.get(msg.level, logging.DEBUG),
|
||||
"%s: %s",
|
||||
self.entry.title,
|
||||
ANSI_ESCAPE_78BIT.sub(b"", log).decode("utf-8", "backslashreplace"),
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_get_equivalent_log_level(self) -> LogLevel:
|
||||
"""Get the equivalent ESPHome log level for the current logger."""
|
||||
return LOGGER_TO_LOG_LEVEL.get(
|
||||
_LOGGER.getEffectiveLevel(), LogLevel.LOG_LEVEL_VERY_VERBOSE
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_subscribe_logs(self, log_level: LogLevel) -> None:
|
||||
"""Subscribe to logs."""
|
||||
if self._cancel_subscribe_logs is not None:
|
||||
self._cancel_subscribe_logs()
|
||||
self._cancel_subscribe_logs = None
|
||||
self._log_level = log_level
|
||||
self._cancel_subscribe_logs = self.cli.subscribe_logs(
|
||||
self._async_on_log, self._log_level
|
||||
)
|
||||
|
||||
async def _on_connnect(self) -> None:
|
||||
"""Subscribe to states and list entities on successful API login."""
|
||||
entry = self.entry
|
||||
@@ -352,6 +418,8 @@ class ESPHomeManager:
|
||||
cli = self.cli
|
||||
stored_device_name = entry.data.get(CONF_DEVICE_NAME)
|
||||
unique_id_is_mac_address = unique_id and ":" in unique_id
|
||||
if entry.options.get(CONF_SUBSCRIBE_LOGS):
|
||||
self._async_subscribe_logs(self._async_get_equivalent_log_level())
|
||||
results = await asyncio.gather(
|
||||
create_eager_task(cli.device_info()),
|
||||
create_eager_task(cli.list_entities_services()),
|
||||
@@ -503,6 +571,10 @@ class ESPHomeManager:
|
||||
def _async_handle_logging_changed(self, _event: Event) -> None:
|
||||
"""Handle when the logging level changes."""
|
||||
self.cli.set_debug(_LOGGER.isEnabledFor(logging.DEBUG))
|
||||
if self.entry.options.get(CONF_SUBSCRIBE_LOGS) and self._log_level != (
|
||||
new_log_level := self._async_get_equivalent_log_level()
|
||||
):
|
||||
self._async_subscribe_logs(new_log_level)
|
||||
|
||||
async def async_start(self) -> None:
|
||||
"""Start the esphome connection manager."""
|
||||
|
||||
@@ -16,9 +16,9 @@
|
||||
"loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"],
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"requirements": [
|
||||
"aioesphomeapi==29.1.1",
|
||||
"aioesphomeapi==29.2.0",
|
||||
"esphome-dashboard-api==1.2.3",
|
||||
"bleak-esphome==2.7.1"
|
||||
"bleak-esphome==2.8.0"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -54,7 +54,8 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"allow_service_calls": "Allow the device to perform Home Assistant actions."
|
||||
"allow_service_calls": "Allow the device to perform Home Assistant actions.",
|
||||
"subscribe_logs": "Subscribe to logs from the device. When enabled, the device will send logs to Home Assistant and you can view them in the logs panel."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,6 +25,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_MODE,
|
||||
CONF_PASSWORD,
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_USERNAME,
|
||||
@@ -40,11 +41,10 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .const import (
|
||||
ATTR_DURATION_DAYS,
|
||||
ATTR_DURATION_HOURS,
|
||||
ATTR_DURATION,
|
||||
ATTR_DURATION_UNTIL,
|
||||
ATTR_SYSTEM_MODE,
|
||||
ATTR_ZONE_TEMP,
|
||||
ATTR_PERIOD,
|
||||
ATTR_SETPOINT,
|
||||
CONF_LOCATION_IDX,
|
||||
DOMAIN,
|
||||
SCAN_INTERVAL_DEFAULT,
|
||||
@@ -81,7 +81,7 @@ RESET_ZONE_OVERRIDE_SCHEMA: Final = vol.Schema(
|
||||
SET_ZONE_OVERRIDE_SCHEMA: Final = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(ATTR_ZONE_TEMP): vol.All(
|
||||
vol.Required(ATTR_SETPOINT): vol.All(
|
||||
vol.Coerce(float), vol.Range(min=4.0, max=35.0)
|
||||
),
|
||||
vol.Optional(ATTR_DURATION_UNTIL): vol.All(
|
||||
@@ -222,7 +222,7 @@ def setup_service_functions(
|
||||
# Permanent-only modes will use this schema
|
||||
perm_modes = [m[SZ_SYSTEM_MODE] for m in modes if not m[SZ_CAN_BE_TEMPORARY]]
|
||||
if perm_modes: # any of: "Auto", "HeatingOff": permanent only
|
||||
schema = vol.Schema({vol.Required(ATTR_SYSTEM_MODE): vol.In(perm_modes)})
|
||||
schema = vol.Schema({vol.Required(ATTR_MODE): vol.In(perm_modes)})
|
||||
system_mode_schemas.append(schema)
|
||||
|
||||
modes = [m for m in modes if m[SZ_CAN_BE_TEMPORARY]]
|
||||
@@ -232,8 +232,8 @@ def setup_service_functions(
|
||||
if temp_modes: # any of: "AutoWithEco", permanent or for 0-24 hours
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_SYSTEM_MODE): vol.In(temp_modes),
|
||||
vol.Optional(ATTR_DURATION_HOURS): vol.All(
|
||||
vol.Required(ATTR_MODE): vol.In(temp_modes),
|
||||
vol.Optional(ATTR_DURATION): vol.All(
|
||||
cv.time_period,
|
||||
vol.Range(min=timedelta(hours=0), max=timedelta(hours=24)),
|
||||
),
|
||||
@@ -246,8 +246,8 @@ def setup_service_functions(
|
||||
if temp_modes: # any of: "Away", "Custom", "DayOff", permanent or for 1-99 days
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_SYSTEM_MODE): vol.In(temp_modes),
|
||||
vol.Optional(ATTR_DURATION_DAYS): vol.All(
|
||||
vol.Required(ATTR_MODE): vol.In(temp_modes),
|
||||
vol.Optional(ATTR_PERIOD): vol.All(
|
||||
cv.time_period,
|
||||
vol.Range(min=timedelta(days=1), max=timedelta(days=99)),
|
||||
),
|
||||
|
||||
@@ -29,7 +29,7 @@ from homeassistant.components.climate import (
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import PRECISION_TENTHS, UnitOfTemperature
|
||||
from homeassistant.const import ATTR_MODE, PRECISION_TENTHS, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
@@ -38,11 +38,10 @@ from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import EVOHOME_KEY
|
||||
from .const import (
|
||||
ATTR_DURATION_DAYS,
|
||||
ATTR_DURATION_HOURS,
|
||||
ATTR_DURATION,
|
||||
ATTR_DURATION_UNTIL,
|
||||
ATTR_SYSTEM_MODE,
|
||||
ATTR_ZONE_TEMP,
|
||||
ATTR_PERIOD,
|
||||
ATTR_SETPOINT,
|
||||
EvoService,
|
||||
)
|
||||
from .coordinator import EvoDataUpdateCoordinator
|
||||
@@ -180,7 +179,7 @@ class EvoZone(EvoChild, EvoClimateEntity):
|
||||
return
|
||||
|
||||
# otherwise it is EvoService.SET_ZONE_OVERRIDE
|
||||
temperature = max(min(data[ATTR_ZONE_TEMP], self.max_temp), self.min_temp)
|
||||
temperature = max(min(data[ATTR_SETPOINT], self.max_temp), self.min_temp)
|
||||
|
||||
if ATTR_DURATION_UNTIL in data:
|
||||
duration: timedelta = data[ATTR_DURATION_UNTIL]
|
||||
@@ -349,16 +348,16 @@ class EvoController(EvoClimateEntity):
|
||||
Data validation is not required, it will have been done upstream.
|
||||
"""
|
||||
if service == EvoService.SET_SYSTEM_MODE:
|
||||
mode = data[ATTR_SYSTEM_MODE]
|
||||
mode = data[ATTR_MODE]
|
||||
else: # otherwise it is EvoService.RESET_SYSTEM
|
||||
mode = EvoSystemMode.AUTO_WITH_RESET
|
||||
|
||||
if ATTR_DURATION_DAYS in data:
|
||||
if ATTR_PERIOD in data:
|
||||
until = dt_util.start_of_local_day()
|
||||
until += data[ATTR_DURATION_DAYS]
|
||||
until += data[ATTR_PERIOD]
|
||||
|
||||
elif ATTR_DURATION_HOURS in data:
|
||||
until = dt_util.now() + data[ATTR_DURATION_HOURS]
|
||||
elif ATTR_DURATION in data:
|
||||
until = dt_util.now() + data[ATTR_DURATION]
|
||||
|
||||
else:
|
||||
until = None
|
||||
|
||||
@@ -18,11 +18,10 @@ USER_DATA: Final = "user_data"
|
||||
SCAN_INTERVAL_DEFAULT: Final = timedelta(seconds=300)
|
||||
SCAN_INTERVAL_MINIMUM: Final = timedelta(seconds=60)
|
||||
|
||||
ATTR_SYSTEM_MODE: Final = "mode"
|
||||
ATTR_DURATION_DAYS: Final = "period"
|
||||
ATTR_DURATION_HOURS: Final = "duration"
|
||||
ATTR_PERIOD: Final = "period" # number of days
|
||||
ATTR_DURATION: Final = "duration" # number of minutes, <24h
|
||||
|
||||
ATTR_ZONE_TEMP: Final = "setpoint"
|
||||
ATTR_SETPOINT: Final = "setpoint"
|
||||
ATTR_DURATION_UNTIL: Final = "duration"
|
||||
|
||||
|
||||
|
||||
@@ -141,11 +141,6 @@ class EzvizCamera(EzvizEntity, Camera):
|
||||
if camera_password:
|
||||
self._attr_supported_features = CameraEntityFeature.STREAM
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self.data["status"] != 2
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if on."""
|
||||
|
||||
@@ -42,6 +42,11 @@ class EzvizEntity(CoordinatorEntity[EzvizDataUpdateCoordinator], Entity):
|
||||
"""Return coordinator data for this entity."""
|
||||
return self.coordinator.data[self._serial]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self.data["status"] != 2
|
||||
|
||||
|
||||
class EzvizBaseEntity(Entity):
|
||||
"""Generic entity for EZVIZ individual poll entities."""
|
||||
@@ -72,3 +77,8 @@ class EzvizBaseEntity(Entity):
|
||||
def data(self) -> dict[str, Any]:
|
||||
"""Return coordinator data for this entity."""
|
||||
return self.coordinator.data[self._serial]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self.data["status"] != 2
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from propcache.api import cached_property
|
||||
from pyezviz.exceptions import PyEzvizError
|
||||
from pyezviz.utils import decrypt_image
|
||||
|
||||
@@ -62,6 +63,11 @@ class EzvizLastMotion(EzvizEntity, ImageEntity):
|
||||
else None
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def available(self) -> bool:
|
||||
"""Entity gets data from ezviz API so always available."""
|
||||
return True
|
||||
|
||||
async def _async_load_image_from_url(self, url: str) -> Image | None:
|
||||
"""Load an image by url."""
|
||||
if response := await self._fetch_url(url):
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/flexit_bacnet",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["flexit_bacnet==2.2.3"]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyfritzhome"],
|
||||
"requirements": ["pyfritzhome==0.6.15"],
|
||||
"requirements": ["pyfritzhome==0.6.17"],
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "urn:schemas-upnp-org:device:fritzbox:1"
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"domain": "frontend",
|
||||
"name": "Home Assistant Frontend",
|
||||
"after_dependencies": ["backup"],
|
||||
"codeowners": ["@home-assistant/frontend"],
|
||||
"dependencies": [
|
||||
"api",
|
||||
@@ -21,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250214.0"]
|
||||
"requirements": ["home-assistant-frontend==20250228.0"]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ from collections.abc import Callable
|
||||
from google_drive_api.exceptions import GoogleDriveApiError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import instance_id
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -49,7 +49,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoogleDriveConfigEntry)
|
||||
except GoogleDriveApiError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
_async_notify_backup_listeners_soon(hass)
|
||||
def async_notify_backup_listeners() -> None:
|
||||
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||
listener()
|
||||
|
||||
entry.async_on_unload(entry.async_on_state_change(async_notify_backup_listeners))
|
||||
|
||||
return True
|
||||
|
||||
@@ -58,15 +62,4 @@ async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: GoogleDriveConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
_async_notify_backup_listeners_soon(hass)
|
||||
return True
|
||||
|
||||
|
||||
def _async_notify_backup_listeners(hass: HomeAssistant) -> None:
|
||||
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||
listener()
|
||||
|
||||
|
||||
@callback
|
||||
def _async_notify_backup_listeners_soon(hass: HomeAssistant) -> None:
|
||||
hass.loop.call_soon(_async_notify_backup_listeners, hass)
|
||||
|
||||
@@ -2,14 +2,11 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
|
||||
from google.ai import generativelanguage_v1beta
|
||||
from google.api_core.client_options import ClientOptions
|
||||
from google.api_core.exceptions import ClientError, DeadlineExceeded, GoogleAPIError
|
||||
import google.generativeai as genai
|
||||
import google.generativeai.types as genai_types
|
||||
from google import genai # type: ignore[attr-defined]
|
||||
from google.genai.errors import APIError, ClientError
|
||||
from requests.exceptions import Timeout
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -27,59 +24,86 @@ from homeassistant.exceptions import (
|
||||
HomeAssistantError,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_CHAT_MODEL, CONF_PROMPT, DOMAIN, RECOMMENDED_CHAT_MODEL
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_PROMPT,
|
||||
DOMAIN,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
TIMEOUT_MILLIS,
|
||||
)
|
||||
|
||||
SERVICE_GENERATE_CONTENT = "generate_content"
|
||||
CONF_IMAGE_FILENAME = "image_filename"
|
||||
CONF_FILENAMES = "filenames"
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
PLATFORMS = (Platform.CONVERSATION,)
|
||||
|
||||
type GoogleGenerativeAIConfigEntry = ConfigEntry[genai.Client]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Google Generative AI Conversation."""
|
||||
|
||||
async def generate_content(call: ServiceCall) -> ServiceResponse:
|
||||
"""Generate content from text and optionally images."""
|
||||
prompt_parts = [call.data[CONF_PROMPT]]
|
||||
image_filenames = call.data[CONF_IMAGE_FILENAME]
|
||||
for image_filename in image_filenames:
|
||||
if not hass.config.is_allowed_path(image_filename):
|
||||
raise HomeAssistantError(
|
||||
f"Cannot read `{image_filename}`, no access to path; "
|
||||
"`allowlist_external_dirs` may need to be adjusted in "
|
||||
"`configuration.yaml`"
|
||||
)
|
||||
if not Path(image_filename).exists():
|
||||
raise HomeAssistantError(f"`{image_filename}` does not exist")
|
||||
mime_type, _ = mimetypes.guess_type(image_filename)
|
||||
if mime_type is None or not mime_type.startswith("image"):
|
||||
raise HomeAssistantError(f"`{image_filename}` is not an image")
|
||||
prompt_parts.append(
|
||||
{
|
||||
"mime_type": mime_type,
|
||||
"data": await hass.async_add_executor_job(
|
||||
Path(image_filename).read_bytes
|
||||
),
|
||||
}
|
||||
|
||||
if call.data[CONF_IMAGE_FILENAME]:
|
||||
# Deprecated in 2025.3, to remove in 2025.9
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_image_filename_parameter",
|
||||
breaks_in_ha_version="2025.9.0",
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_image_filename_parameter",
|
||||
)
|
||||
|
||||
model = genai.GenerativeModel(model_name=RECOMMENDED_CHAT_MODEL)
|
||||
prompt_parts = [call.data[CONF_PROMPT]]
|
||||
|
||||
config_entry: GoogleGenerativeAIConfigEntry = hass.config_entries.async_entries(
|
||||
DOMAIN
|
||||
)[0]
|
||||
|
||||
client = config_entry.runtime_data
|
||||
|
||||
def append_files_to_prompt():
|
||||
image_filenames = call.data[CONF_IMAGE_FILENAME]
|
||||
filenames = call.data[CONF_FILENAMES]
|
||||
for filename in set(image_filenames + filenames):
|
||||
if not hass.config.is_allowed_path(filename):
|
||||
raise HomeAssistantError(
|
||||
f"Cannot read `{filename}`, no access to path; "
|
||||
"`allowlist_external_dirs` may need to be adjusted in "
|
||||
"`configuration.yaml`"
|
||||
)
|
||||
if not Path(filename).exists():
|
||||
raise HomeAssistantError(f"`{filename}` does not exist")
|
||||
prompt_parts.append(client.files.upload(file=filename))
|
||||
|
||||
await hass.async_add_executor_job(append_files_to_prompt)
|
||||
|
||||
try:
|
||||
response = await model.generate_content_async(prompt_parts)
|
||||
response = await client.aio.models.generate_content(
|
||||
model=RECOMMENDED_CHAT_MODEL, contents=prompt_parts
|
||||
)
|
||||
except (
|
||||
GoogleAPIError,
|
||||
APIError,
|
||||
ValueError,
|
||||
genai_types.BlockedPromptException,
|
||||
genai_types.StopCandidateException,
|
||||
) as err:
|
||||
raise HomeAssistantError(f"Error generating content: {err}") from err
|
||||
|
||||
if not response.parts:
|
||||
raise HomeAssistantError("Error generating content")
|
||||
if response.prompt_feedback:
|
||||
raise HomeAssistantError(
|
||||
f"Error generating content due to content violations, reason: {response.prompt_feedback.block_reason_message}"
|
||||
)
|
||||
|
||||
if not response.candidates[0].content.parts:
|
||||
raise HomeAssistantError("Unknown error generating content")
|
||||
|
||||
return {"text": response.text}
|
||||
|
||||
@@ -93,6 +117,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
vol.Optional(CONF_IMAGE_FILENAME, default=[]): vol.All(
|
||||
cv.ensure_list, [cv.string]
|
||||
),
|
||||
vol.Optional(CONF_FILENAMES, default=[]): vol.All(
|
||||
cv.ensure_list, [cv.string]
|
||||
),
|
||||
}
|
||||
),
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
@@ -100,30 +127,34 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: GoogleGenerativeAIConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Google Generative AI Conversation from a config entry."""
|
||||
genai.configure(api_key=entry.data[CONF_API_KEY])
|
||||
|
||||
try:
|
||||
client = generativelanguage_v1beta.ModelServiceAsyncClient(
|
||||
client_options=ClientOptions(api_key=entry.data[CONF_API_KEY])
|
||||
client = genai.Client(api_key=entry.data[CONF_API_KEY])
|
||||
await client.aio.models.get(
|
||||
model=entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL),
|
||||
config={"http_options": {"timeout": TIMEOUT_MILLIS}},
|
||||
)
|
||||
await client.get_model(
|
||||
name=entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL), timeout=5.0
|
||||
)
|
||||
except (GoogleAPIError, ValueError) as err:
|
||||
if isinstance(err, ClientError) and err.reason == "API_KEY_INVALID":
|
||||
raise ConfigEntryAuthFailed(err) from err
|
||||
if isinstance(err, DeadlineExceeded):
|
||||
except (APIError, Timeout) as err:
|
||||
if isinstance(err, ClientError) and "API_KEY_INVALID" in str(err):
|
||||
raise ConfigEntryAuthFailed(err.message) from err
|
||||
if isinstance(err, Timeout):
|
||||
raise ConfigEntryNotReady(err) from err
|
||||
raise ConfigEntryError(err) from err
|
||||
else:
|
||||
entry.runtime_data = client
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: GoogleGenerativeAIConfigEntry
|
||||
) -> bool:
|
||||
"""Unload GoogleGenerativeAI."""
|
||||
if not await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
return False
|
||||
|
||||
@@ -3,15 +3,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from functools import partial
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
|
||||
from google.ai import generativelanguage_v1beta
|
||||
from google.api_core.client_options import ClientOptions
|
||||
from google.api_core.exceptions import ClientError, GoogleAPIError
|
||||
import google.generativeai as genai
|
||||
from google import genai # type: ignore[attr-defined]
|
||||
from google.genai.errors import APIError, ClientError
|
||||
from requests.exceptions import Timeout
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
@@ -53,6 +51,7 @@ from .const import (
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_TOP_K,
|
||||
RECOMMENDED_TOP_P,
|
||||
TIMEOUT_MILLIS,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -70,15 +69,20 @@ RECOMMENDED_OPTIONS = {
|
||||
}
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
async def validate_input(data: dict[str, Any]) -> None:
|
||||
"""Validate the user input allows us to connect.
|
||||
|
||||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
client = generativelanguage_v1beta.ModelServiceAsyncClient(
|
||||
client_options=ClientOptions(api_key=data[CONF_API_KEY])
|
||||
client = genai.Client(api_key=data[CONF_API_KEY])
|
||||
await client.aio.models.list(
|
||||
config={
|
||||
"http_options": {
|
||||
"timeout": TIMEOUT_MILLIS,
|
||||
},
|
||||
"query_base": True,
|
||||
}
|
||||
)
|
||||
await client.list_models(timeout=5.0)
|
||||
|
||||
|
||||
class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@@ -93,9 +97,9 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
await validate_input(self.hass, user_input)
|
||||
except GoogleAPIError as err:
|
||||
if isinstance(err, ClientError) and err.reason == "API_KEY_INVALID":
|
||||
await validate_input(user_input)
|
||||
except (APIError, Timeout) as err:
|
||||
if isinstance(err, ClientError) and "API_KEY_INVALID" in str(err):
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
errors["base"] = "cannot_connect"
|
||||
@@ -166,6 +170,7 @@ class GoogleGenerativeAIOptionsFlow(OptionsFlow):
|
||||
self.last_rendered_recommended = config_entry.options.get(
|
||||
CONF_RECOMMENDED, False
|
||||
)
|
||||
self._genai_client = config_entry.runtime_data
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -188,7 +193,9 @@ class GoogleGenerativeAIOptionsFlow(OptionsFlow):
|
||||
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
|
||||
}
|
||||
|
||||
schema = await google_generative_ai_config_option_schema(self.hass, options)
|
||||
schema = await google_generative_ai_config_option_schema(
|
||||
self.hass, options, self._genai_client
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=vol.Schema(schema),
|
||||
@@ -198,6 +205,7 @@ class GoogleGenerativeAIOptionsFlow(OptionsFlow):
|
||||
async def google_generative_ai_config_option_schema(
|
||||
hass: HomeAssistant,
|
||||
options: dict[str, Any] | MappingProxyType[str, Any],
|
||||
genai_client: genai.Client,
|
||||
) -> dict:
|
||||
"""Return a schema for Google Generative AI completion options."""
|
||||
hass_apis: list[SelectOptionDict] = [
|
||||
@@ -236,18 +244,21 @@ async def google_generative_ai_config_option_schema(
|
||||
if options.get(CONF_RECOMMENDED):
|
||||
return schema
|
||||
|
||||
api_models = await hass.async_add_executor_job(partial(genai.list_models))
|
||||
|
||||
api_models_pager = await genai_client.aio.models.list(config={"query_base": True})
|
||||
api_models = [api_model async for api_model in api_models_pager]
|
||||
models = [
|
||||
SelectOptionDict(
|
||||
label=api_model.display_name,
|
||||
value=api_model.name,
|
||||
)
|
||||
for api_model in sorted(api_models, key=lambda x: x.display_name)
|
||||
for api_model in sorted(api_models, key=lambda x: x.display_name or "")
|
||||
if (
|
||||
api_model.name != "models/gemini-1.0-pro" # duplicate of gemini-pro
|
||||
and api_model.display_name
|
||||
and api_model.name
|
||||
and api_model.supported_actions
|
||||
and "vision" not in api_model.name
|
||||
and "generateContent" in api_model.supported_generation_methods
|
||||
and "generateContent" in api_model.supported_actions
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
@@ -22,3 +22,5 @@ CONF_HATE_BLOCK_THRESHOLD = "hate_block_threshold"
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD = "sexual_block_threshold"
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD = "dangerous_block_threshold"
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD = "BLOCK_MEDIUM_AND_ABOVE"
|
||||
|
||||
TIMEOUT_MILLIS = 10000
|
||||
|
||||
@@ -6,11 +6,18 @@ import codecs
|
||||
from collections.abc import Callable
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
from google.api_core.exceptions import GoogleAPIError
|
||||
import google.generativeai as genai
|
||||
from google.generativeai import protos
|
||||
import google.generativeai.types as genai_types
|
||||
from google.protobuf.json_format import MessageToDict
|
||||
from google.genai.errors import APIError
|
||||
from google.genai.types import (
|
||||
AutomaticFunctionCallingConfig,
|
||||
Content,
|
||||
FunctionDeclaration,
|
||||
GenerateContentConfig,
|
||||
HarmCategory,
|
||||
Part,
|
||||
SafetySetting,
|
||||
Schema,
|
||||
Tool,
|
||||
)
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components import assist_pipeline, conversation
|
||||
@@ -57,21 +64,40 @@ async def async_setup_entry(
|
||||
|
||||
|
||||
SUPPORTED_SCHEMA_KEYS = {
|
||||
"type",
|
||||
"format",
|
||||
"description",
|
||||
"min_items",
|
||||
"example",
|
||||
"property_ordering",
|
||||
"pattern",
|
||||
"minimum",
|
||||
"default",
|
||||
"any_of",
|
||||
"max_length",
|
||||
"title",
|
||||
"min_properties",
|
||||
"min_length",
|
||||
"max_items",
|
||||
"maximum",
|
||||
"nullable",
|
||||
"max_properties",
|
||||
"type",
|
||||
"description",
|
||||
"enum",
|
||||
"format",
|
||||
"items",
|
||||
"properties",
|
||||
"required",
|
||||
}
|
||||
|
||||
|
||||
def _format_schema(schema: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Format the schema to protobuf."""
|
||||
if (subschemas := schema.get("anyOf")) or (subschemas := schema.get("allOf")):
|
||||
for subschema in subschemas: # Gemini API does not support anyOf and allOf keys
|
||||
def _camel_to_snake(name: str) -> str:
|
||||
"""Convert camel case to snake case."""
|
||||
return "".join(["_" + c.lower() if c.isupper() else c for c in name]).lstrip("_")
|
||||
|
||||
|
||||
def _format_schema(schema: dict[str, Any]) -> Schema:
|
||||
"""Format the schema to be compatible with Gemini API."""
|
||||
if subschemas := schema.get("allOf"):
|
||||
for subschema in subschemas: # Gemini API does not support allOf keys
|
||||
if "type" in subschema: # Fallback to first subschema with 'type' field
|
||||
return _format_schema(subschema)
|
||||
return _format_schema(
|
||||
@@ -80,42 +106,49 @@ def _format_schema(schema: dict[str, Any]) -> dict[str, Any]:
|
||||
|
||||
result = {}
|
||||
for key, val in schema.items():
|
||||
key = _camel_to_snake(key)
|
||||
if key not in SUPPORTED_SCHEMA_KEYS:
|
||||
continue
|
||||
if key == "type":
|
||||
key = "type_"
|
||||
if key == "any_of":
|
||||
val = [_format_schema(subschema) for subschema in val]
|
||||
elif key == "type":
|
||||
val = val.upper()
|
||||
elif key == "format":
|
||||
if schema.get("type") == "string" and val != "enum":
|
||||
# Gemini API does not support all formats, see: https://ai.google.dev/api/caching#Schema
|
||||
# formats that are not supported are ignored
|
||||
if schema.get("type") == "string" and val not in ("enum", "date-time"):
|
||||
continue
|
||||
if schema.get("type") not in ("number", "integer", "string"):
|
||||
if schema.get("type") == "number" and val not in ("float", "double"):
|
||||
continue
|
||||
if schema.get("type") == "integer" and val not in ("int32", "int64"):
|
||||
continue
|
||||
if schema.get("type") not in ("string", "number", "integer"):
|
||||
continue
|
||||
key = "format_"
|
||||
elif key == "items":
|
||||
val = _format_schema(val)
|
||||
elif key == "properties":
|
||||
val = {k: _format_schema(v) for k, v in val.items()}
|
||||
result[key] = val
|
||||
|
||||
if result.get("enum") and result.get("type_") != "STRING":
|
||||
if result.get("enum") and result.get("type") != "STRING":
|
||||
# enum is only allowed for STRING type. This is safe as long as the schema
|
||||
# contains vol.Coerce for the respective type, for example:
|
||||
# vol.All(vol.Coerce(int), vol.In([1, 2, 3]))
|
||||
result["type_"] = "STRING"
|
||||
result["type"] = "STRING"
|
||||
result["enum"] = [str(item) for item in result["enum"]]
|
||||
|
||||
if result.get("type_") == "OBJECT" and not result.get("properties"):
|
||||
if result.get("type") == "OBJECT" and not result.get("properties"):
|
||||
# An object with undefined properties is not supported by Gemini API.
|
||||
# Fallback to JSON string. This will probably fail for most tools that want it,
|
||||
# but we don't have a better fallback strategy so far.
|
||||
result["properties"] = {"json": {"type_": "STRING"}}
|
||||
result["properties"] = {"json": {"type": "STRING"}}
|
||||
result["required"] = []
|
||||
return result
|
||||
return cast(Schema, result)
|
||||
|
||||
|
||||
def _format_tool(
|
||||
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
|
||||
) -> dict[str, Any]:
|
||||
) -> Tool:
|
||||
"""Format tool specification."""
|
||||
|
||||
if tool.parameters.schema:
|
||||
@@ -125,16 +158,14 @@ def _format_tool(
|
||||
else:
|
||||
parameters = None
|
||||
|
||||
return protos.Tool(
|
||||
{
|
||||
"function_declarations": [
|
||||
{
|
||||
"name": tool.name,
|
||||
"description": tool.description,
|
||||
"parameters": parameters,
|
||||
}
|
||||
]
|
||||
}
|
||||
return Tool(
|
||||
function_declarations=[
|
||||
FunctionDeclaration(
|
||||
name=tool.name,
|
||||
description=tool.description,
|
||||
parameters=parameters,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -151,14 +182,12 @@ def _escape_decode(value: Any) -> Any:
|
||||
|
||||
def _create_google_tool_response_content(
|
||||
content: list[conversation.ToolResultContent],
|
||||
) -> protos.Content:
|
||||
) -> Content:
|
||||
"""Create a Google tool response content."""
|
||||
return protos.Content(
|
||||
return Content(
|
||||
parts=[
|
||||
protos.Part(
|
||||
function_response=protos.FunctionResponse(
|
||||
name=tool_result.tool_name, response=tool_result.tool_result
|
||||
)
|
||||
Part.from_function_response(
|
||||
name=tool_result.tool_name, response=tool_result.tool_result
|
||||
)
|
||||
for tool_result in content
|
||||
]
|
||||
@@ -169,33 +198,36 @@ def _convert_content(
|
||||
content: conversation.UserContent
|
||||
| conversation.AssistantContent
|
||||
| conversation.SystemContent,
|
||||
) -> genai_types.ContentDict:
|
||||
) -> Content:
|
||||
"""Convert HA content to Google content."""
|
||||
if content.role != "assistant" or not content.tool_calls: # type: ignore[union-attr]
|
||||
role = "model" if content.role == "assistant" else content.role
|
||||
return {"role": role, "parts": content.content}
|
||||
return Content(
|
||||
role=role,
|
||||
parts=[
|
||||
Part.from_text(text=content.content if content.content else ""),
|
||||
],
|
||||
)
|
||||
|
||||
# Handle the Assistant content with tool calls.
|
||||
assert type(content) is conversation.AssistantContent
|
||||
parts = []
|
||||
parts: list[Part] = []
|
||||
|
||||
if content.content:
|
||||
parts.append(protos.Part(text=content.content))
|
||||
parts.append(Part.from_text(text=content.content))
|
||||
|
||||
if content.tool_calls:
|
||||
parts.extend(
|
||||
[
|
||||
protos.Part(
|
||||
function_call=protos.FunctionCall(
|
||||
name=tool_call.tool_name,
|
||||
args=_escape_decode(tool_call.tool_args),
|
||||
)
|
||||
Part.from_function_call(
|
||||
name=tool_call.tool_name,
|
||||
args=_escape_decode(tool_call.tool_args),
|
||||
)
|
||||
for tool_call in content.tool_calls
|
||||
]
|
||||
)
|
||||
|
||||
return protos.Content({"role": "model", "parts": parts})
|
||||
return Content(role="model", parts=parts)
|
||||
|
||||
|
||||
class GoogleGenerativeAIConversationEntity(
|
||||
@@ -209,6 +241,7 @@ class GoogleGenerativeAIConversationEntity(
|
||||
def __init__(self, entry: ConfigEntry) -> None:
|
||||
"""Initialize the agent."""
|
||||
self.entry = entry
|
||||
self._genai_client = entry.runtime_data
|
||||
self._attr_unique_id = entry.entry_id
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, entry.entry_id)},
|
||||
@@ -273,7 +306,7 @@ class GoogleGenerativeAIConversationEntity(
|
||||
except conversation.ConverseError as err:
|
||||
return err.as_conversation_result()
|
||||
|
||||
tools: list[dict[str, Any]] | None = None
|
||||
tools: list[Tool | Callable[..., Any]] | None = None
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
@@ -288,13 +321,22 @@ class GoogleGenerativeAIConversationEntity(
|
||||
"gemini-1.0" not in model_name and "gemini-pro" not in model_name
|
||||
)
|
||||
|
||||
prompt = chat_log.content[0].content # type: ignore[union-attr]
|
||||
messages: list[genai_types.ContentDict] = []
|
||||
prompt_content = cast(
|
||||
conversation.SystemContent,
|
||||
chat_log.content[0],
|
||||
)
|
||||
|
||||
if prompt_content.content:
|
||||
prompt = prompt_content.content
|
||||
else:
|
||||
raise HomeAssistantError("Invalid prompt content")
|
||||
|
||||
messages: list[Content] = []
|
||||
|
||||
# Google groups tool results, we do not. Group them before sending.
|
||||
tool_results: list[conversation.ToolResultContent] = []
|
||||
|
||||
for chat_content in chat_log.content[1:]:
|
||||
for chat_content in chat_log.content[1:-1]:
|
||||
if chat_content.role == "tool_result":
|
||||
# mypy doesn't like picking a type based on checking shared property 'role'
|
||||
tool_results.append(cast(conversation.ToolResultContent, chat_content))
|
||||
@@ -317,85 +359,93 @@ class GoogleGenerativeAIConversationEntity(
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
|
||||
model = genai.GenerativeModel(
|
||||
model_name=model_name,
|
||||
generation_config={
|
||||
"temperature": self.entry.options.get(
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
generateContentConfig = GenerateContentConfig(
|
||||
temperature=self.entry.options.get(
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
),
|
||||
top_k=self.entry.options.get(CONF_TOP_K, RECOMMENDED_TOP_K),
|
||||
top_p=self.entry.options.get(CONF_TOP_P, RECOMMENDED_TOP_P),
|
||||
max_output_tokens=self.entry.options.get(
|
||||
CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS
|
||||
),
|
||||
safety_settings=[
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_HATE_SPEECH,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_HATE_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
"top_p": self.entry.options.get(CONF_TOP_P, RECOMMENDED_TOP_P),
|
||||
"top_k": self.entry.options.get(CONF_TOP_K, RECOMMENDED_TOP_K),
|
||||
"max_output_tokens": self.entry.options.get(
|
||||
CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_HARASSMENT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_HARASSMENT_BLOCK_THRESHOLD,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
),
|
||||
),
|
||||
},
|
||||
safety_settings={
|
||||
"HARASSMENT": self.entry.options.get(
|
||||
CONF_HARASSMENT_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
"HATE": self.entry.options.get(
|
||||
CONF_HATE_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
"SEXUAL": self.entry.options.get(
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
"DANGEROUS": self.entry.options.get(
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
},
|
||||
],
|
||||
tools=tools or None,
|
||||
system_instruction=prompt if supports_system_instruction else None,
|
||||
automatic_function_calling=AutomaticFunctionCallingConfig(
|
||||
disable=True, maximum_remote_calls=None
|
||||
),
|
||||
)
|
||||
|
||||
if not supports_system_instruction:
|
||||
messages = [
|
||||
{"role": "user", "parts": prompt},
|
||||
{"role": "model", "parts": "Ok"},
|
||||
Content(role="user", parts=[Part.from_text(text=prompt)]),
|
||||
Content(role="model", parts=[Part.from_text(text="Ok")]),
|
||||
*messages,
|
||||
]
|
||||
|
||||
chat = model.start_chat(history=messages)
|
||||
chat_request = user_input.text
|
||||
chat = self._genai_client.aio.chats.create(
|
||||
model=model_name, history=messages, config=generateContentConfig
|
||||
)
|
||||
chat_request: str | Content = user_input.text
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
try:
|
||||
chat_response = await chat.send_message_async(chat_request)
|
||||
except (
|
||||
GoogleAPIError,
|
||||
ValueError,
|
||||
genai_types.BlockedPromptException,
|
||||
genai_types.StopCandidateException,
|
||||
) as err:
|
||||
LOGGER.error("Error sending message: %s %s", type(err), err)
|
||||
chat_response = await chat.send_message(message=chat_request)
|
||||
|
||||
if isinstance(
|
||||
err, genai_types.StopCandidateException
|
||||
) and "finish_reason: SAFETY\n" in str(err):
|
||||
error = "The message got blocked by your safety settings"
|
||||
else:
|
||||
error = (
|
||||
f"Sorry, I had a problem talking to Google Generative AI: {err}"
|
||||
if chat_response.prompt_feedback:
|
||||
raise HomeAssistantError(
|
||||
f"The message got blocked due to content violations, reason: {chat_response.prompt_feedback.block_reason_message}"
|
||||
)
|
||||
|
||||
except (
|
||||
APIError,
|
||||
ValueError,
|
||||
) as err:
|
||||
LOGGER.error("Error sending message: %s %s", type(err), err)
|
||||
error = f"Sorry, I had a problem talking to Google Generative AI: {err}"
|
||||
raise HomeAssistantError(error) from err
|
||||
|
||||
LOGGER.debug("Response: %s", chat_response.parts)
|
||||
if not chat_response.parts:
|
||||
response_parts = chat_response.candidates[0].content.parts
|
||||
if not response_parts:
|
||||
raise HomeAssistantError(
|
||||
"Sorry, I had a problem getting a response from Google Generative AI."
|
||||
)
|
||||
content = " ".join(
|
||||
[part.text.strip() for part in chat_response.parts if part.text]
|
||||
[part.text.strip() for part in response_parts if part.text]
|
||||
)
|
||||
|
||||
tool_calls = []
|
||||
for part in chat_response.parts:
|
||||
for part in response_parts:
|
||||
if not part.function_call:
|
||||
continue
|
||||
tool_call = MessageToDict(part.function_call._pb) # noqa: SLF001
|
||||
tool_name = tool_call["name"]
|
||||
tool_args = _escape_decode(tool_call["args"])
|
||||
tool_call = part.function_call
|
||||
tool_name = tool_call.name
|
||||
tool_args = _escape_decode(tool_call.args)
|
||||
tool_calls.append(
|
||||
llm.ToolInput(tool_name=tool_name, tool_args=tool_args)
|
||||
)
|
||||
@@ -418,7 +468,7 @@ class GoogleGenerativeAIConversationEntity(
|
||||
|
||||
response = intent.IntentResponse(language=user_input.language)
|
||||
response.async_set_speech(
|
||||
" ".join([part.text.strip() for part in chat_response.parts if part.text])
|
||||
" ".join([part.text.strip() for part in response_parts if part.text])
|
||||
)
|
||||
return conversation.ConversationResult(
|
||||
response=response, conversation_id=chat_log.conversation_id
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google_generative_ai_conversation",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["google-generativeai==0.8.2"]
|
||||
"requirements": ["google-genai==1.1.0"]
|
||||
}
|
||||
|
||||
@@ -9,3 +9,8 @@ generate_content:
|
||||
required: false
|
||||
selector:
|
||||
object:
|
||||
filenames:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
multiple: true
|
||||
|
||||
@@ -56,10 +56,21 @@
|
||||
},
|
||||
"image_filename": {
|
||||
"name": "Image filename",
|
||||
"description": "Images",
|
||||
"description": "Deprecated. Use filenames instead.",
|
||||
"example": "/config/www/image.jpg"
|
||||
},
|
||||
"filenames": {
|
||||
"name": "Attachment filenames",
|
||||
"description": "Attachments to add to the prompt (images, PDFs, etc)",
|
||||
"example": "/config/www/image.jpg"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_image_filename_parameter": {
|
||||
"title": "Deprecated 'image_filename' parameter",
|
||||
"description": "The 'image_filename' parameter in Google Generative AI actions is deprecated. Please edit scripts and automations to use 'filenames' intead."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -89,6 +89,10 @@ class GoveeLocalApiCoordinator(DataUpdateCoordinator[list[GoveeDevice]]):
|
||||
"""Set light color in kelvin."""
|
||||
await device.set_temperature(temperature)
|
||||
|
||||
async def set_scene(self, device: GoveeController, scene: str) -> None:
|
||||
"""Set light scene."""
|
||||
await device.set_scene(scene)
|
||||
|
||||
@property
|
||||
def devices(self) -> list[GoveeDevice]:
|
||||
"""Return a list of discovered Govee devices."""
|
||||
|
||||
@@ -10,9 +10,11 @@ from govee_local_api import GoveeDevice, GoveeLightFeatures
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
ATTR_COLOR_TEMP_KELVIN,
|
||||
ATTR_EFFECT,
|
||||
ATTR_RGB_COLOR,
|
||||
ColorMode,
|
||||
LightEntity,
|
||||
LightEntityFeature,
|
||||
filter_supported_color_modes,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -25,6 +27,8 @@ from .coordinator import GoveeLocalApiCoordinator, GoveeLocalConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_NONE_SCENE = "none"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -50,10 +54,22 @@ async def async_setup_entry(
|
||||
class GoveeLight(CoordinatorEntity[GoveeLocalApiCoordinator], LightEntity):
|
||||
"""Govee Light."""
|
||||
|
||||
_attr_translation_key = "govee_light"
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
_attr_supported_color_modes: set[ColorMode]
|
||||
_fixed_color_mode: ColorMode | None = None
|
||||
_attr_effect_list: list[str] | None = None
|
||||
_attr_effect: str | None = None
|
||||
_attr_supported_features: LightEntityFeature = LightEntityFeature(0)
|
||||
_last_color_state: (
|
||||
tuple[
|
||||
ColorMode | str | None,
|
||||
int | None,
|
||||
tuple[int, int, int] | tuple[int | None] | None,
|
||||
]
|
||||
| None
|
||||
) = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -80,6 +96,13 @@ class GoveeLight(CoordinatorEntity[GoveeLocalApiCoordinator], LightEntity):
|
||||
if GoveeLightFeatures.BRIGHTNESS & capabilities.features:
|
||||
color_modes.add(ColorMode.BRIGHTNESS)
|
||||
|
||||
if (
|
||||
GoveeLightFeatures.SCENES & capabilities.features
|
||||
and capabilities.scenes
|
||||
):
|
||||
self._attr_supported_features = LightEntityFeature.EFFECT
|
||||
self._attr_effect_list = [_NONE_SCENE, *capabilities.scenes.keys()]
|
||||
|
||||
self._attr_supported_color_modes = filter_supported_color_modes(color_modes)
|
||||
if len(self._attr_supported_color_modes) == 1:
|
||||
# If the light supports only a single color mode, set it now
|
||||
@@ -143,12 +166,27 @@ class GoveeLight(CoordinatorEntity[GoveeLocalApiCoordinator], LightEntity):
|
||||
|
||||
if ATTR_RGB_COLOR in kwargs:
|
||||
self._attr_color_mode = ColorMode.RGB
|
||||
self._attr_effect = None
|
||||
self._last_color_state = None
|
||||
red, green, blue = kwargs[ATTR_RGB_COLOR]
|
||||
await self.coordinator.set_rgb_color(self._device, red, green, blue)
|
||||
elif ATTR_COLOR_TEMP_KELVIN in kwargs:
|
||||
self._attr_color_mode = ColorMode.COLOR_TEMP
|
||||
self._attr_effect = None
|
||||
self._last_color_state = None
|
||||
temperature: float = kwargs[ATTR_COLOR_TEMP_KELVIN]
|
||||
await self.coordinator.set_temperature(self._device, int(temperature))
|
||||
elif ATTR_EFFECT in kwargs:
|
||||
effect = kwargs[ATTR_EFFECT]
|
||||
if effect and self._attr_effect_list and effect in self._attr_effect_list:
|
||||
if effect == _NONE_SCENE:
|
||||
self._attr_effect = None
|
||||
await self._restore_last_color_state()
|
||||
else:
|
||||
self._attr_effect = effect
|
||||
self._save_last_color_state()
|
||||
await self.coordinator.set_scene(self._device, effect)
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
@@ -159,3 +197,27 @@ class GoveeLight(CoordinatorEntity[GoveeLocalApiCoordinator], LightEntity):
|
||||
@callback
|
||||
def _update_callback(self, device: GoveeDevice) -> None:
|
||||
self.async_write_ha_state()
|
||||
|
||||
def _save_last_color_state(self) -> None:
|
||||
color_mode = self.color_mode
|
||||
self._last_color_state = (
|
||||
color_mode,
|
||||
self.brightness,
|
||||
(self.color_temp_kelvin,)
|
||||
if color_mode == ColorMode.COLOR_TEMP
|
||||
else self.rgb_color,
|
||||
)
|
||||
|
||||
async def _restore_last_color_state(self) -> None:
|
||||
if self._last_color_state:
|
||||
color_mode, brightness, color = self._last_color_state
|
||||
if color:
|
||||
if color_mode == ColorMode.RGB:
|
||||
await self.coordinator.set_rgb_color(self._device, *color)
|
||||
elif color_mode == ColorMode.COLOR_TEMP:
|
||||
await self.coordinator.set_temperature(self._device, *color)
|
||||
if brightness:
|
||||
await self.coordinator.set_brightness(
|
||||
self._device, int((float(brightness) / 255.0) * 100.0)
|
||||
)
|
||||
self._last_color_state = None
|
||||
|
||||
@@ -9,5 +9,29 @@
|
||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"light": {
|
||||
"govee_light": {
|
||||
"state_attributes": {
|
||||
"effect": {
|
||||
"state": {
|
||||
"none": "None",
|
||||
"sunrise": "Sunrise",
|
||||
"sunset": "Sunset",
|
||||
"movie": "Movie",
|
||||
"dating": "Dating",
|
||||
"romantic": "Romantic",
|
||||
"twinkle": "Twinkle",
|
||||
"candlelight": "Candlelight",
|
||||
"snowflake": "Snowflake",
|
||||
"energetic": "Energetic",
|
||||
"breathe": "Breathe",
|
||||
"crossing": "Crossing"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,3 +20,4 @@ MAX_ERRORS = 2
|
||||
TARGET_TEMPERATURE_STEP = 1
|
||||
|
||||
UPDATE_INTERVAL = 60
|
||||
MAX_EXPECTED_RESPONSE_TIME_INTERVAL = UPDATE_INTERVAL * 2
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
@@ -24,6 +25,7 @@ from .const import (
|
||||
DISPATCH_DEVICE_DISCOVERED,
|
||||
DOMAIN,
|
||||
MAX_ERRORS,
|
||||
MAX_EXPECTED_RESPONSE_TIME_INTERVAL,
|
||||
UPDATE_INTERVAL,
|
||||
)
|
||||
|
||||
@@ -48,7 +50,6 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
always_update=False,
|
||||
)
|
||||
self.device = device
|
||||
self.device.add_handler(Response.DATA, self.device_state_updated)
|
||||
self.device.add_handler(Response.RESULT, self.device_state_updated)
|
||||
|
||||
self._error_count: int = 0
|
||||
@@ -88,7 +89,9 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
# raise update failed if time for more than MAX_ERRORS has passed since last update
|
||||
now = utcnow()
|
||||
elapsed_success = now - self._last_response_time
|
||||
if self.update_interval and elapsed_success >= self.update_interval:
|
||||
if self.update_interval and elapsed_success >= timedelta(
|
||||
seconds=MAX_EXPECTED_RESPONSE_TIME_INTERVAL
|
||||
):
|
||||
if not self._last_error_time or (
|
||||
(now - self.update_interval) >= self._last_error_time
|
||||
):
|
||||
@@ -96,16 +99,19 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
self._error_count += 1
|
||||
|
||||
_LOGGER.warning(
|
||||
"Device %s is unresponsive for %s seconds",
|
||||
"Device %s took an unusually long time to respond, %s seconds",
|
||||
self.name,
|
||||
elapsed_success,
|
||||
)
|
||||
else:
|
||||
self._error_count = 0
|
||||
if self.last_update_success and self._error_count >= MAX_ERRORS:
|
||||
raise UpdateFailed(
|
||||
f"Device {self.name} is unresponsive for too long and now unavailable"
|
||||
)
|
||||
|
||||
return self.device.raw_properties
|
||||
self._last_response_time = utcnow()
|
||||
return copy.deepcopy(self.device.raw_properties)
|
||||
|
||||
async def push_state_update(self):
|
||||
"""Send state updates to the physical device."""
|
||||
|
||||
@@ -26,6 +26,7 @@ TOTAL_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="todayEnergy",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="total_output_power",
|
||||
@@ -33,6 +34,7 @@ TOTAL_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="invTodayPpv",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="total_energy_output",
|
||||
|
||||
@@ -35,6 +35,11 @@ ATTR_TYPE = "type"
|
||||
ATTR_PRIORITY = "priority"
|
||||
ATTR_TAG = "tag"
|
||||
ATTR_KEYWORD = "keyword"
|
||||
ATTR_REMOVE_TAG = "remove_tag"
|
||||
ATTR_ALIAS = "alias"
|
||||
ATTR_PRIORITY = "priority"
|
||||
ATTR_COST = "cost"
|
||||
ATTR_NOTES = "notes"
|
||||
|
||||
SERVICE_CAST_SKILL = "cast_skill"
|
||||
SERVICE_START_QUEST = "start_quest"
|
||||
@@ -50,6 +55,7 @@ SERVICE_SCORE_REWARD = "score_reward"
|
||||
|
||||
SERVICE_TRANSFORMATION = "transformation"
|
||||
|
||||
SERVICE_UPDATE_REWARD = "update_reward"
|
||||
|
||||
DEVELOPER_ID = "4c4ca53f-c059-4ffa-966e-9d29dd405daf"
|
||||
X_CLIENT = f"{DEVELOPER_ID} - {APPLICATION_NAME} {__version__}"
|
||||
|
||||
@@ -217,6 +217,13 @@
|
||||
"sections": {
|
||||
"filter": "mdi:calendar-filter"
|
||||
}
|
||||
},
|
||||
"update_reward": {
|
||||
"service": "mdi:treasure-chest",
|
||||
"sections": {
|
||||
"tag_options": "mdi:tag",
|
||||
"developer_options": "mdi:test-tube"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,8 @@ from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from uuid import UUID
|
||||
|
||||
from aiohttp import ClientError
|
||||
from habiticalib import (
|
||||
@@ -13,6 +14,7 @@ from habiticalib import (
|
||||
NotAuthorizedError,
|
||||
NotFoundError,
|
||||
Skill,
|
||||
Task,
|
||||
TaskData,
|
||||
TaskPriority,
|
||||
TaskType,
|
||||
@@ -20,6 +22,7 @@ from habiticalib import (
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.todo import ATTR_RENAME
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_NAME, CONF_NAME
|
||||
from homeassistant.core import (
|
||||
@@ -34,14 +37,18 @@ from homeassistant.helpers.issue_registry import IssueSeverity, async_create_iss
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
|
||||
from .const import (
|
||||
ATTR_ALIAS,
|
||||
ATTR_ARGS,
|
||||
ATTR_CONFIG_ENTRY,
|
||||
ATTR_COST,
|
||||
ATTR_DATA,
|
||||
ATTR_DIRECTION,
|
||||
ATTR_ITEM,
|
||||
ATTR_KEYWORD,
|
||||
ATTR_NOTES,
|
||||
ATTR_PATH,
|
||||
ATTR_PRIORITY,
|
||||
ATTR_REMOVE_TAG,
|
||||
ATTR_SKILL,
|
||||
ATTR_TAG,
|
||||
ATTR_TARGET,
|
||||
@@ -61,6 +68,7 @@ from .const import (
|
||||
SERVICE_SCORE_REWARD,
|
||||
SERVICE_START_QUEST,
|
||||
SERVICE_TRANSFORMATION,
|
||||
SERVICE_UPDATE_REWARD,
|
||||
)
|
||||
from .coordinator import HabiticaConfigEntry
|
||||
|
||||
@@ -104,6 +112,21 @@ SERVICE_TRANSFORMATION_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_UPDATE_TASK_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_TASK): cv.string,
|
||||
vol.Optional(ATTR_RENAME): cv.string,
|
||||
vol.Optional(ATTR_NOTES): cv.string,
|
||||
vol.Optional(ATTR_TAG): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_REMOVE_TAG): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_ALIAS): vol.All(
|
||||
cv.string, cv.matches_regex("^[a-zA-Z0-9-_]*$")
|
||||
),
|
||||
vol.Optional(ATTR_COST): vol.Coerce(float),
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_GET_TASKS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
@@ -516,6 +539,130 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
|
||||
return result
|
||||
|
||||
async def update_task(call: ServiceCall) -> ServiceResponse:
|
||||
"""Update task action."""
|
||||
entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY])
|
||||
coordinator = entry.runtime_data
|
||||
await coordinator.async_refresh()
|
||||
|
||||
try:
|
||||
current_task = next(
|
||||
task
|
||||
for task in coordinator.data.tasks
|
||||
if call.data[ATTR_TASK] in (str(task.id), task.alias, task.text)
|
||||
and task.Type is TaskType.REWARD
|
||||
)
|
||||
except StopIteration as e:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="task_not_found",
|
||||
translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"},
|
||||
) from e
|
||||
|
||||
task_id = current_task.id
|
||||
if TYPE_CHECKING:
|
||||
assert task_id
|
||||
data = Task()
|
||||
|
||||
if rename := call.data.get(ATTR_RENAME):
|
||||
data["text"] = rename
|
||||
|
||||
if (notes := call.data.get(ATTR_NOTES)) is not None:
|
||||
data["notes"] = notes
|
||||
|
||||
tags = cast(list[str], call.data.get(ATTR_TAG))
|
||||
remove_tags = cast(list[str], call.data.get(ATTR_REMOVE_TAG))
|
||||
|
||||
if tags or remove_tags:
|
||||
update_tags = set(current_task.tags)
|
||||
user_tags = {
|
||||
tag.name.lower(): tag.id
|
||||
for tag in coordinator.data.user.tags
|
||||
if tag.id and tag.name
|
||||
}
|
||||
|
||||
if tags:
|
||||
# Creates new tag if it doesn't exist
|
||||
async def create_tag(tag_name: str) -> UUID:
|
||||
tag_id = (await coordinator.habitica.create_tag(tag_name)).data.id
|
||||
if TYPE_CHECKING:
|
||||
assert tag_id
|
||||
return tag_id
|
||||
|
||||
try:
|
||||
update_tags.update(
|
||||
{
|
||||
user_tags.get(tag_name.lower())
|
||||
or (await create_tag(tag_name))
|
||||
for tag_name in tags
|
||||
}
|
||||
)
|
||||
except TooManyRequestsError as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="setup_rate_limit_exception",
|
||||
translation_placeholders={"retry_after": str(e.retry_after)},
|
||||
) from e
|
||||
except HabiticaException as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="service_call_exception",
|
||||
translation_placeholders={"reason": str(e.error.message)},
|
||||
) from e
|
||||
except ClientError as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="service_call_exception",
|
||||
translation_placeholders={"reason": str(e)},
|
||||
) from e
|
||||
|
||||
if remove_tags:
|
||||
update_tags.difference_update(
|
||||
{
|
||||
user_tags[tag_name.lower()]
|
||||
for tag_name in remove_tags
|
||||
if tag_name.lower() in user_tags
|
||||
}
|
||||
)
|
||||
|
||||
data["tags"] = list(update_tags)
|
||||
|
||||
if (alias := call.data.get(ATTR_ALIAS)) is not None:
|
||||
data["alias"] = alias
|
||||
|
||||
if (cost := call.data.get(ATTR_COST)) is not None:
|
||||
data["value"] = cost
|
||||
|
||||
try:
|
||||
response = await coordinator.habitica.update_task(task_id, data)
|
||||
except TooManyRequestsError as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="setup_rate_limit_exception",
|
||||
translation_placeholders={"retry_after": str(e.retry_after)},
|
||||
) from e
|
||||
except HabiticaException as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="service_call_exception",
|
||||
translation_placeholders={"reason": str(e.error.message)},
|
||||
) from e
|
||||
except ClientError as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="service_call_exception",
|
||||
translation_placeholders={"reason": str(e)},
|
||||
) from e
|
||||
else:
|
||||
return response.data.to_dict(omit_none=True)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_UPDATE_REWARD,
|
||||
update_task,
|
||||
schema=SERVICE_UPDATE_TASK_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_API_CALL,
|
||||
|
||||
@@ -140,3 +140,43 @@ get_tasks:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
update_reward:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
task: *task
|
||||
rename:
|
||||
selector:
|
||||
text:
|
||||
notes:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
multiline: true
|
||||
cost:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
step: 0.01
|
||||
unit_of_measurement: "🪙"
|
||||
mode: box
|
||||
tag_options:
|
||||
collapsed: true
|
||||
fields:
|
||||
tag:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
multiple: true
|
||||
remove_tag:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
multiple: true
|
||||
developer_options:
|
||||
collapsed: true
|
||||
fields:
|
||||
alias:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
|
||||
@@ -7,7 +7,23 @@
|
||||
"unit_tasks": "tasks",
|
||||
"unit_health_points": "HP",
|
||||
"unit_mana_points": "MP",
|
||||
"unit_experience_points": "XP"
|
||||
"unit_experience_points": "XP",
|
||||
"config_entry_description": "Select the Habitica account to update a task.",
|
||||
"task_description": "The name (or task ID) of the task you want to update.",
|
||||
"rename_name": "Rename",
|
||||
"rename_description": "The new title for the Habitica task.",
|
||||
"notes_name": "Update notes",
|
||||
"notes_description": "The new notes for the Habitica task.",
|
||||
"tag_name": "Add tags",
|
||||
"tag_description": "Add tags to the Habitica task. If a tag does not already exist, a new one will be created.",
|
||||
"remove_tag_name": "Remove tags",
|
||||
"remove_tag_description": "Remove tags from the Habitica task.",
|
||||
"alias_name": "Task alias",
|
||||
"alias_description": "A task alias can be used instead of the name or task ID. Only dashes, underscores, and alphanumeric characters are supported. The task alias must be unique among all your tasks.",
|
||||
"developer_options_name": "Advanced settings",
|
||||
"developer_options_description": "Additional features available in developer mode.",
|
||||
"tag_options_name": "Tags",
|
||||
"tag_options_description": "Add or remove tags from a task."
|
||||
},
|
||||
"config": {
|
||||
"abort": {
|
||||
@@ -457,6 +473,12 @@
|
||||
},
|
||||
"authentication_failed": {
|
||||
"message": "Authentication failed. It looks like your API token has been reset. Please re-authenticate using your new token"
|
||||
},
|
||||
"frequency_not_weekly": {
|
||||
"message": "Unable to update task, weekly repeat settings apply only to weekly recurring dailies."
|
||||
},
|
||||
"frequency_not_monthly": {
|
||||
"message": "Unable to update task, monthly repeat settings apply only to monthly recurring dailies."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
@@ -651,6 +673,54 @@
|
||||
"description": "Use the optional filters to narrow the returned tasks."
|
||||
}
|
||||
}
|
||||
},
|
||||
"update_reward": {
|
||||
"name": "Update a reward",
|
||||
"description": "Updates a specific reward for the selected Habitica character",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
"description": "Select the Habitica account to update a reward."
|
||||
},
|
||||
"task": {
|
||||
"name": "[%key:component::habitica::common::task_name%]",
|
||||
"description": "[%key:component::habitica::common::task_description%]"
|
||||
},
|
||||
"rename": {
|
||||
"name": "[%key:component::habitica::common::rename_name%]",
|
||||
"description": "[%key:component::habitica::common::rename_description%]"
|
||||
},
|
||||
"notes": {
|
||||
"name": "[%key:component::habitica::common::notes_name%]",
|
||||
"description": "[%key:component::habitica::common::notes_description%]"
|
||||
},
|
||||
"tag": {
|
||||
"name": "[%key:component::habitica::common::tag_name%]",
|
||||
"description": "[%key:component::habitica::common::tag_description%]"
|
||||
},
|
||||
"remove_tag": {
|
||||
"name": "[%key:component::habitica::common::remove_tag_name%]",
|
||||
"description": "[%key:component::habitica::common::remove_tag_description%]"
|
||||
},
|
||||
"alias": {
|
||||
"name": "[%key:component::habitica::common::alias_name%]",
|
||||
"description": "[%key:component::habitica::common::alias_description%]"
|
||||
},
|
||||
"cost": {
|
||||
"name": "Cost",
|
||||
"description": "Update the cost of a reward."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"tag_options": {
|
||||
"name": "[%key:component::habitica::common::tag_options_name%]",
|
||||
"description": "[%key:component::habitica::common::tag_options_description%]"
|
||||
},
|
||||
"developer_options": {
|
||||
"name": "[%key:component::habitica::common::developer_options_name%]",
|
||||
"description": "[%key:component::habitica::common::developer_options_description%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
|
||||
@@ -117,19 +117,24 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity):
|
||||
"""Move an item in the To-do list."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.todo_items
|
||||
tasks_order = (
|
||||
self.coordinator.data.user.tasksOrder.todos
|
||||
if self.entity_description.key is HabiticaTodoList.TODOS
|
||||
else self.coordinator.data.user.tasksOrder.dailys
|
||||
)
|
||||
|
||||
if previous_uid:
|
||||
pos = (
|
||||
self.todo_items.index(
|
||||
next(item for item in self.todo_items if item.uid == previous_uid)
|
||||
)
|
||||
+ 1
|
||||
)
|
||||
pos = tasks_order.index(UUID(previous_uid))
|
||||
if pos < tasks_order.index(UUID(uid)):
|
||||
pos += 1
|
||||
|
||||
else:
|
||||
pos = 0
|
||||
|
||||
try:
|
||||
await self.coordinator.habitica.reorder_task(UUID(uid), pos)
|
||||
tasks_order[:] = (
|
||||
await self.coordinator.habitica.reorder_task(UUID(uid), pos)
|
||||
).data
|
||||
except TooManyRequestsError as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
@@ -143,20 +148,6 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity):
|
||||
translation_key=f"move_{self.entity_description.key}_item_failed",
|
||||
translation_placeholders={"pos": str(pos)},
|
||||
) from e
|
||||
else:
|
||||
# move tasks in the coordinator until we have fresh data
|
||||
tasks = self.coordinator.data.tasks
|
||||
new_pos = (
|
||||
tasks.index(
|
||||
next(task for task in tasks if task.id == UUID(previous_uid))
|
||||
)
|
||||
+ 1
|
||||
if previous_uid
|
||||
else 0
|
||||
)
|
||||
old_pos = tasks.index(next(task for task in tasks if task.id == UUID(uid)))
|
||||
tasks.insert(new_pos, tasks.pop(old_pos))
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_update_todo_item(self, item: TodoItem) -> None:
|
||||
"""Update a Habitica todo."""
|
||||
@@ -270,7 +261,7 @@ class HabiticaTodosListEntity(BaseHabiticaListEntity):
|
||||
def todo_items(self) -> list[TodoItem]:
|
||||
"""Return the todo items."""
|
||||
|
||||
return [
|
||||
tasks = [
|
||||
*(
|
||||
TodoItem(
|
||||
uid=str(task.id),
|
||||
@@ -287,6 +278,15 @@ class HabiticaTodosListEntity(BaseHabiticaListEntity):
|
||||
if task.Type is TaskType.TODO
|
||||
),
|
||||
]
|
||||
return sorted(
|
||||
tasks,
|
||||
key=lambda task: (
|
||||
float("inf")
|
||||
if (uid := UUID(task.uid))
|
||||
not in (tasks_order := self.coordinator.data.user.tasksOrder.todos)
|
||||
else tasks_order.index(uid)
|
||||
),
|
||||
)
|
||||
|
||||
async def async_create_todo_item(self, item: TodoItem) -> None:
|
||||
"""Create a Habitica todo."""
|
||||
@@ -347,7 +347,7 @@ class HabiticaDailiesListEntity(BaseHabiticaListEntity):
|
||||
if TYPE_CHECKING:
|
||||
assert self.coordinator.data.user.lastCron
|
||||
|
||||
return [
|
||||
tasks = [
|
||||
*(
|
||||
TodoItem(
|
||||
uid=str(task.id),
|
||||
@@ -364,3 +364,12 @@ class HabiticaDailiesListEntity(BaseHabiticaListEntity):
|
||||
if task.Type is TaskType.DAILY
|
||||
)
|
||||
]
|
||||
return sorted(
|
||||
tasks,
|
||||
key=lambda task: (
|
||||
float("inf")
|
||||
if (uid := UUID(task.uid))
|
||||
not in (tasks_order := self.coordinator.data.user.tasksOrder.dailys)
|
||||
else tasks_order.index(uid)
|
||||
),
|
||||
)
|
||||
|
||||
@@ -45,13 +45,13 @@ from homeassistant.components.backup import (
|
||||
RestoreBackupStage,
|
||||
RestoreBackupState,
|
||||
WrittenBackup,
|
||||
async_get_manager as async_get_backup_manager,
|
||||
suggested_filename as suggested_backup_filename,
|
||||
suggested_filename_from_name_date,
|
||||
)
|
||||
from homeassistant.const import __version__ as HAVERSION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.backup import async_get_manager as async_get_backup_manager
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
@@ -751,7 +751,7 @@ async def backup_addon_before_update(
|
||||
|
||||
async def backup_core_before_update(hass: HomeAssistant) -> None:
|
||||
"""Prepare for updating core."""
|
||||
backup_manager = async_get_backup_manager(hass)
|
||||
backup_manager = await async_get_backup_manager(hass)
|
||||
client = get_supervisor_client(hass)
|
||||
|
||||
try:
|
||||
|
||||
@@ -102,6 +102,18 @@ async def _validate_auth(
|
||||
return True
|
||||
|
||||
|
||||
def _get_current_hosts(entry: HeosConfigEntry) -> set[str]:
|
||||
"""Get a set of current hosts from the entry."""
|
||||
hosts = set(entry.data[CONF_HOST])
|
||||
if hasattr(entry, "runtime_data"):
|
||||
hosts.update(
|
||||
player.ip_address
|
||||
for player in entry.runtime_data.heos.players.values()
|
||||
if player.ip_address is not None
|
||||
)
|
||||
return hosts
|
||||
|
||||
|
||||
class HeosFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Define a flow for HEOS."""
|
||||
|
||||
@@ -125,10 +137,15 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
if TYPE_CHECKING:
|
||||
assert discovery_info.ssdp_location
|
||||
|
||||
await self.async_set_unique_id(DOMAIN)
|
||||
# Connect to discovered host and get system information
|
||||
entry: HeosConfigEntry | None = await self.async_set_unique_id(DOMAIN)
|
||||
hostname = urlparse(discovery_info.ssdp_location).hostname
|
||||
assert hostname is not None
|
||||
|
||||
# Abort early when discovered host is part of the current system
|
||||
if entry and hostname in _get_current_hosts(entry):
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
# Connect to discovered host and get system information
|
||||
heos = Heos(HeosOptions(hostname, events=False, heart_beat=False))
|
||||
try:
|
||||
await heos.connect()
|
||||
@@ -146,8 +163,23 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
# Select the preferred host, if available
|
||||
if system_info.preferred_hosts:
|
||||
hostname = system_info.preferred_hosts[0].ip_address
|
||||
self._discovered_host = hostname
|
||||
return await self.async_step_confirm_discovery()
|
||||
|
||||
# Move to confirmation when not configured
|
||||
if entry is None:
|
||||
self._discovered_host = hostname
|
||||
return await self.async_step_confirm_discovery()
|
||||
|
||||
# Only update if the configured host isn't part of the discovered hosts to ensure new players that come online don't trigger a reload
|
||||
if entry.data[CONF_HOST] not in [host.ip_address for host in system_info.hosts]:
|
||||
_LOGGER.debug(
|
||||
"Updated host %s to discovered host %s", entry.data[CONF_HOST], hostname
|
||||
)
|
||||
return self.async_update_reload_and_abort(
|
||||
entry,
|
||||
data_updates={CONF_HOST: hostname},
|
||||
reason="reconfigure_successful",
|
||||
)
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
async def async_step_confirm_discovery(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -167,6 +199,7 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Obtain host and validate connection."""
|
||||
await self.async_set_unique_id(DOMAIN)
|
||||
self._abort_if_unique_id_configured(error="single_instance_allowed")
|
||||
# Try connecting to host if provided
|
||||
errors: dict[str, str] = {}
|
||||
host = None
|
||||
|
||||
@@ -7,9 +7,8 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyheos"],
|
||||
"quality_scale": "silver",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyheos==1.0.2"],
|
||||
"single_config_entry": true,
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "urn:schemas-denon-com:device:ACT-Denon:1"
|
||||
|
||||
@@ -38,9 +38,7 @@ rules:
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: todo
|
||||
comment: Explore if this is possible.
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.66", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.67", "babel==2.15.0"]
|
||||
}
|
||||
|
||||
@@ -187,6 +187,7 @@ SERVICE_COMMAND_SCHEMA = vol.Schema({vol.Required(ATTR_DEVICE_ID): str})
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.LIGHT,
|
||||
Platform.NUMBER,
|
||||
Platform.SELECT,
|
||||
@@ -202,7 +203,13 @@ async def _get_client_and_ha_id(
|
||||
device_registry = dr.async_get(hass)
|
||||
device_entry = device_registry.async_get(device_id)
|
||||
if device_entry is None:
|
||||
raise ServiceValidationError("Device entry not found for device id")
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="device_entry_not_found",
|
||||
translation_placeholders={
|
||||
"device_id": device_id,
|
||||
},
|
||||
)
|
||||
entry: HomeConnectConfigEntry | None = None
|
||||
for entry_id in device_entry.config_entries:
|
||||
_entry = hass.config_entries.async_get_entry(entry_id)
|
||||
@@ -212,7 +219,11 @@ async def _get_client_and_ha_id(
|
||||
break
|
||||
if entry is None:
|
||||
raise ServiceValidationError(
|
||||
"Home Connect config entry not found for that device id"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="config_entry_not_found",
|
||||
translation_placeholders={
|
||||
"device_id": device_id,
|
||||
},
|
||||
)
|
||||
|
||||
ha_id = next(
|
||||
@@ -404,6 +415,17 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
|
||||
"""Execute calls to services executing a command."""
|
||||
client, ha_id = await _get_client_and_ha_id(hass, call.data[ATTR_DEVICE_ID])
|
||||
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_command_actions",
|
||||
breaks_in_ha_version="2025.9.0",
|
||||
is_fixable=True,
|
||||
is_persistent=True,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_command_actions",
|
||||
)
|
||||
|
||||
try:
|
||||
await client.put_command(ha_id, command_key=command_key, value=True)
|
||||
except HomeConnectError as err:
|
||||
@@ -609,6 +631,7 @@ async def async_unload_entry(
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
async_delete_issue(hass, DOMAIN, "deprecated_set_program_and_option_actions")
|
||||
async_delete_issue(hass, DOMAIN, "deprecated_command_actions")
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
|
||||
@@ -38,6 +38,8 @@ from .coordinator import (
|
||||
)
|
||||
from .entity import HomeConnectEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
REFRIGERATION_DOOR_BOOLEAN_MAP = {
|
||||
REFRIGERATION_STATUS_DOOR_CLOSED: False,
|
||||
REFRIGERATION_STATUS_DOOR_OPEN: True,
|
||||
|
||||
162
homeassistant/components/home_connect/button.py
Normal file
162
homeassistant/components/home_connect/button.py
Normal file
@@ -0,0 +1,162 @@
|
||||
"""Provides button entities for Home Connect."""
|
||||
|
||||
from aiohomeconnect.model import CommandKey, EventKey
|
||||
from aiohomeconnect.model.error import HomeConnectError
|
||||
|
||||
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .common import setup_home_connect_entry
|
||||
from .const import APPLIANCES_WITH_PROGRAMS, DOMAIN
|
||||
from .coordinator import (
|
||||
HomeConnectApplianceData,
|
||||
HomeConnectConfigEntry,
|
||||
HomeConnectCoordinator,
|
||||
)
|
||||
from .entity import HomeConnectEntity
|
||||
from .utils import get_dict_from_home_connect_error
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
class HomeConnectCommandButtonEntityDescription(ButtonEntityDescription):
|
||||
"""Describes Home Connect button entity."""
|
||||
|
||||
key: CommandKey
|
||||
|
||||
|
||||
COMMAND_BUTTONS = (
|
||||
HomeConnectCommandButtonEntityDescription(
|
||||
key=CommandKey.BSH_COMMON_OPEN_DOOR,
|
||||
translation_key="open_door",
|
||||
),
|
||||
HomeConnectCommandButtonEntityDescription(
|
||||
key=CommandKey.BSH_COMMON_PARTLY_OPEN_DOOR,
|
||||
translation_key="partly_open_door",
|
||||
),
|
||||
HomeConnectCommandButtonEntityDescription(
|
||||
key=CommandKey.BSH_COMMON_PAUSE_PROGRAM,
|
||||
translation_key="pause_program",
|
||||
),
|
||||
HomeConnectCommandButtonEntityDescription(
|
||||
key=CommandKey.BSH_COMMON_RESUME_PROGRAM,
|
||||
translation_key="resume_program",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _get_entities_for_appliance(
|
||||
entry: HomeConnectConfigEntry,
|
||||
appliance: HomeConnectApplianceData,
|
||||
) -> list[HomeConnectEntity]:
|
||||
"""Get a list of entities."""
|
||||
entities: list[HomeConnectEntity] = []
|
||||
entities.extend(
|
||||
HomeConnectCommandButtonEntity(entry.runtime_data, appliance, description)
|
||||
for description in COMMAND_BUTTONS
|
||||
if description.key in appliance.commands
|
||||
)
|
||||
if appliance.info.type in APPLIANCES_WITH_PROGRAMS:
|
||||
entities.append(
|
||||
HomeConnectStopProgramButtonEntity(entry.runtime_data, appliance)
|
||||
)
|
||||
|
||||
return entities
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: HomeConnectConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Home Connect button entities."""
|
||||
setup_home_connect_entry(
|
||||
entry,
|
||||
_get_entities_for_appliance,
|
||||
async_add_entities,
|
||||
)
|
||||
|
||||
|
||||
class HomeConnectButtonEntity(HomeConnectEntity, ButtonEntity):
|
||||
"""Describes Home Connect button entity."""
|
||||
|
||||
entity_description: ButtonEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HomeConnectCoordinator,
|
||||
appliance: HomeConnectApplianceData,
|
||||
desc: ButtonEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(
|
||||
coordinator,
|
||||
appliance,
|
||||
# The entity is subscribed to the appliance connected event,
|
||||
# but it will receive also the disconnected event
|
||||
ButtonEntityDescription(
|
||||
key=EventKey.BSH_COMMON_APPLIANCE_CONNECTED,
|
||||
),
|
||||
)
|
||||
self.entity_description = desc
|
||||
self.appliance = appliance
|
||||
self.unique_id = f"{appliance.info.ha_id}-{desc.key}"
|
||||
|
||||
def update_native_value(self) -> None:
|
||||
"""Set the value of the entity."""
|
||||
|
||||
|
||||
class HomeConnectCommandButtonEntity(HomeConnectButtonEntity):
|
||||
"""Button entity for Home Connect commands."""
|
||||
|
||||
entity_description: HomeConnectCommandButtonEntityDescription
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Press the button."""
|
||||
try:
|
||||
await self.coordinator.client.put_command(
|
||||
self.appliance.info.ha_id,
|
||||
command_key=self.entity_description.key,
|
||||
value=True,
|
||||
)
|
||||
except HomeConnectError as error:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="execute_command",
|
||||
translation_placeholders={
|
||||
**get_dict_from_home_connect_error(error),
|
||||
"command": self.entity_description.key,
|
||||
},
|
||||
) from error
|
||||
|
||||
|
||||
class HomeConnectStopProgramButtonEntity(HomeConnectButtonEntity):
|
||||
"""Button entity for stopping a program."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HomeConnectCoordinator,
|
||||
appliance: HomeConnectApplianceData,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(
|
||||
coordinator,
|
||||
appliance,
|
||||
ButtonEntityDescription(
|
||||
key="StopProgram",
|
||||
translation_key="stop_program",
|
||||
),
|
||||
)
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Press the button."""
|
||||
try:
|
||||
await self.coordinator.client.stop_program(self.appliance.info.ha_id)
|
||||
except HomeConnectError as error:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="stop_program",
|
||||
translation_placeholders=get_dict_from_home_connect_error(error),
|
||||
) from error
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Common callbacks for all Home Connect platforms."""
|
||||
|
||||
from collections import defaultdict
|
||||
from collections.abc import Callable
|
||||
from functools import partial
|
||||
from typing import cast
|
||||
@@ -9,7 +10,32 @@ from aiohomeconnect.model import EventKey
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry
|
||||
from .entity import HomeConnectEntity
|
||||
from .entity import HomeConnectEntity, HomeConnectOptionEntity
|
||||
|
||||
|
||||
def _create_option_entities(
|
||||
entry: HomeConnectConfigEntry,
|
||||
appliance: HomeConnectApplianceData,
|
||||
known_entity_unique_ids: dict[str, str],
|
||||
get_option_entities_for_appliance: Callable[
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData],
|
||||
list[HomeConnectOptionEntity],
|
||||
],
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Create the required option entities for the appliances."""
|
||||
option_entities_to_add = [
|
||||
entity
|
||||
for entity in get_option_entities_for_appliance(entry, appliance)
|
||||
if entity.unique_id not in known_entity_unique_ids
|
||||
]
|
||||
known_entity_unique_ids.update(
|
||||
{
|
||||
cast(str, entity.unique_id): appliance.info.ha_id
|
||||
for entity in option_entities_to_add
|
||||
}
|
||||
)
|
||||
async_add_entities(option_entities_to_add)
|
||||
|
||||
|
||||
def _handle_paired_or_connected_appliance(
|
||||
@@ -18,6 +44,12 @@ def _handle_paired_or_connected_appliance(
|
||||
get_entities_for_appliance: Callable[
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData], list[HomeConnectEntity]
|
||||
],
|
||||
get_option_entities_for_appliance: Callable[
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData],
|
||||
list[HomeConnectOptionEntity],
|
||||
]
|
||||
| None,
|
||||
changed_options_listener_remove_callbacks: dict[str, list[Callable[[], None]]],
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Handle a new paired appliance or an appliance that has been connected.
|
||||
@@ -34,6 +66,33 @@ def _handle_paired_or_connected_appliance(
|
||||
for entity in get_entities_for_appliance(entry, appliance)
|
||||
if entity.unique_id not in known_entity_unique_ids
|
||||
]
|
||||
if get_option_entities_for_appliance:
|
||||
entities_to_add.extend(
|
||||
entity
|
||||
for entity in get_option_entities_for_appliance(entry, appliance)
|
||||
if entity.unique_id not in known_entity_unique_ids
|
||||
)
|
||||
for event_key in (
|
||||
EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM,
|
||||
EventKey.BSH_COMMON_ROOT_SELECTED_PROGRAM,
|
||||
):
|
||||
changed_options_listener_remove_callback = (
|
||||
entry.runtime_data.async_add_listener(
|
||||
partial(
|
||||
_create_option_entities,
|
||||
entry,
|
||||
appliance,
|
||||
known_entity_unique_ids,
|
||||
get_option_entities_for_appliance,
|
||||
async_add_entities,
|
||||
),
|
||||
(appliance.info.ha_id, event_key),
|
||||
)
|
||||
)
|
||||
entry.async_on_unload(changed_options_listener_remove_callback)
|
||||
changed_options_listener_remove_callbacks[appliance.info.ha_id].append(
|
||||
changed_options_listener_remove_callback
|
||||
)
|
||||
known_entity_unique_ids.update(
|
||||
{
|
||||
cast(str, entity.unique_id): appliance.info.ha_id
|
||||
@@ -47,11 +106,17 @@ def _handle_paired_or_connected_appliance(
|
||||
def _handle_depaired_appliance(
|
||||
entry: HomeConnectConfigEntry,
|
||||
known_entity_unique_ids: dict[str, str],
|
||||
changed_options_listener_remove_callbacks: dict[str, list[Callable[[], None]]],
|
||||
) -> None:
|
||||
"""Handle a removed appliance."""
|
||||
for entity_unique_id, appliance_id in known_entity_unique_ids.copy().items():
|
||||
if appliance_id not in entry.runtime_data.data:
|
||||
known_entity_unique_ids.pop(entity_unique_id, None)
|
||||
if appliance_id in changed_options_listener_remove_callbacks:
|
||||
for listener in changed_options_listener_remove_callbacks.pop(
|
||||
appliance_id
|
||||
):
|
||||
listener()
|
||||
|
||||
|
||||
def setup_home_connect_entry(
|
||||
@@ -60,13 +125,44 @@ def setup_home_connect_entry(
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData], list[HomeConnectEntity]
|
||||
],
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
get_option_entities_for_appliance: Callable[
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData],
|
||||
list[HomeConnectOptionEntity],
|
||||
]
|
||||
| None = None,
|
||||
) -> None:
|
||||
"""Set up the callbacks for paired and depaired appliances."""
|
||||
known_entity_unique_ids: dict[str, str] = {}
|
||||
changed_options_listener_remove_callbacks: dict[str, list[Callable[[], None]]] = (
|
||||
defaultdict(list)
|
||||
)
|
||||
|
||||
entities: list[HomeConnectEntity] = []
|
||||
for appliance in entry.runtime_data.data.values():
|
||||
entities_to_add = get_entities_for_appliance(entry, appliance)
|
||||
if get_option_entities_for_appliance:
|
||||
entities_to_add.extend(get_option_entities_for_appliance(entry, appliance))
|
||||
for event_key in (
|
||||
EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM,
|
||||
EventKey.BSH_COMMON_ROOT_SELECTED_PROGRAM,
|
||||
):
|
||||
changed_options_listener_remove_callback = (
|
||||
entry.runtime_data.async_add_listener(
|
||||
partial(
|
||||
_create_option_entities,
|
||||
entry,
|
||||
appliance,
|
||||
known_entity_unique_ids,
|
||||
get_option_entities_for_appliance,
|
||||
async_add_entities,
|
||||
),
|
||||
(appliance.info.ha_id, event_key),
|
||||
)
|
||||
)
|
||||
entry.async_on_unload(changed_options_listener_remove_callback)
|
||||
changed_options_listener_remove_callbacks[appliance.info.ha_id].append(
|
||||
changed_options_listener_remove_callback
|
||||
)
|
||||
known_entity_unique_ids.update(
|
||||
{
|
||||
cast(str, entity.unique_id): appliance.info.ha_id
|
||||
@@ -83,6 +179,8 @@ def setup_home_connect_entry(
|
||||
entry,
|
||||
known_entity_unique_ids,
|
||||
get_entities_for_appliance,
|
||||
get_option_entities_for_appliance,
|
||||
changed_options_listener_remove_callbacks,
|
||||
async_add_entities,
|
||||
),
|
||||
(
|
||||
@@ -93,7 +191,12 @@ def setup_home_connect_entry(
|
||||
)
|
||||
entry.async_on_unload(
|
||||
entry.runtime_data.async_add_special_listener(
|
||||
partial(_handle_depaired_appliance, entry, known_entity_unique_ids),
|
||||
partial(
|
||||
_handle_depaired_appliance,
|
||||
entry,
|
||||
known_entity_unique_ids,
|
||||
changed_options_listener_remove_callbacks,
|
||||
),
|
||||
(EventKey.BSH_COMMON_APPLIANCE_DEPAIRED,),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -4,6 +4,8 @@ from typing import cast
|
||||
|
||||
from aiohomeconnect.model import EventKey, OptionKey, ProgramKey, SettingKey, StatusKey
|
||||
|
||||
from homeassistant.const import UnitOfTemperature, UnitOfTime, UnitOfVolume
|
||||
|
||||
from .utils import bsh_key_to_translation_key
|
||||
|
||||
DOMAIN = "home_connect"
|
||||
@@ -21,6 +23,13 @@ APPLIANCES_WITH_PROGRAMS = (
|
||||
"WasherDryer",
|
||||
)
|
||||
|
||||
UNIT_MAP = {
|
||||
"seconds": UnitOfTime.SECONDS,
|
||||
"ml": UnitOfVolume.MILLILITERS,
|
||||
"°C": UnitOfTemperature.CELSIUS,
|
||||
"°F": UnitOfTemperature.FAHRENHEIT,
|
||||
}
|
||||
|
||||
|
||||
BSH_POWER_ON = "BSH.Common.EnumType.PowerState.On"
|
||||
BSH_POWER_OFF = "BSH.Common.EnumType.PowerState.Off"
|
||||
@@ -87,7 +96,7 @@ PROGRAMS_TRANSLATION_KEYS_MAP = {
|
||||
value: key for key, value in TRANSLATION_KEYS_PROGRAMS_MAP.items()
|
||||
}
|
||||
|
||||
REFERENCE_MAP_ID_OPTIONS = {
|
||||
AVAILABLE_MAPS_ENUM = {
|
||||
bsh_key_to_translation_key(option): option
|
||||
for option in (
|
||||
"ConsumerProducts.CleaningRobot.EnumType.AvailableMaps.TempMap",
|
||||
@@ -305,7 +314,7 @@ PROGRAM_ENUM_OPTIONS = {
|
||||
for option_key, options in (
|
||||
(
|
||||
OptionKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_REFERENCE_MAP_ID,
|
||||
REFERENCE_MAP_ID_OPTIONS,
|
||||
AVAILABLE_MAPS_ENUM,
|
||||
),
|
||||
(
|
||||
OptionKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_CLEANING_MODE,
|
||||
|
||||
@@ -7,16 +7,19 @@ from collections import defaultdict
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from aiohomeconnect.client import Client as HomeConnectClient
|
||||
from aiohomeconnect.model import (
|
||||
CommandKey,
|
||||
Event,
|
||||
EventKey,
|
||||
EventMessage,
|
||||
EventType,
|
||||
GetSetting,
|
||||
HomeAppliance,
|
||||
OptionKey,
|
||||
ProgramKey,
|
||||
SettingKey,
|
||||
Status,
|
||||
StatusKey,
|
||||
@@ -28,7 +31,7 @@ from aiohomeconnect.model.error import (
|
||||
HomeConnectRequestError,
|
||||
UnauthorizedError,
|
||||
)
|
||||
from aiohomeconnect.model.program import EnumerateProgram
|
||||
from aiohomeconnect.model.program import EnumerateProgram, ProgramDefinitionOption
|
||||
from propcache.api import cached_property
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -51,16 +54,21 @@ EVENT_STREAM_RECONNECT_DELAY = 30
|
||||
class HomeConnectApplianceData:
|
||||
"""Class to hold Home Connect appliance data."""
|
||||
|
||||
commands: set[CommandKey]
|
||||
events: dict[EventKey, Event]
|
||||
info: HomeAppliance
|
||||
options: dict[OptionKey, ProgramDefinitionOption]
|
||||
programs: list[EnumerateProgram]
|
||||
settings: dict[SettingKey, GetSetting]
|
||||
status: dict[StatusKey, Status]
|
||||
|
||||
def update(self, other: HomeConnectApplianceData) -> None:
|
||||
"""Update data with data from other instance."""
|
||||
self.commands.update(other.commands)
|
||||
self.events.update(other.events)
|
||||
self.info.connected = other.info.connected
|
||||
self.options.clear()
|
||||
self.options.update(other.options)
|
||||
self.programs.clear()
|
||||
self.programs.extend(other.programs)
|
||||
self.settings.update(other.settings)
|
||||
@@ -172,8 +180,9 @@ class HomeConnectCoordinator(
|
||||
settings = self.data[event_message_ha_id].settings
|
||||
events = self.data[event_message_ha_id].events
|
||||
for event in event_message.data.items:
|
||||
if event.key in SettingKey:
|
||||
setting_key = SettingKey(event.key)
|
||||
event_key = event.key
|
||||
if event_key in SettingKey:
|
||||
setting_key = SettingKey(event_key)
|
||||
if setting_key in settings:
|
||||
settings[setting_key].value = event.value
|
||||
else:
|
||||
@@ -183,7 +192,16 @@ class HomeConnectCoordinator(
|
||||
value=event.value,
|
||||
)
|
||||
else:
|
||||
events[event.key] = event
|
||||
if event_key in (
|
||||
EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM,
|
||||
EventKey.BSH_COMMON_ROOT_SELECTED_PROGRAM,
|
||||
):
|
||||
await self.update_options(
|
||||
event_message_ha_id,
|
||||
event_key,
|
||||
ProgramKey(cast(str, event.value)),
|
||||
)
|
||||
events[event_key] = event
|
||||
self._call_event_listener(event_message)
|
||||
|
||||
case EventType.EVENT:
|
||||
@@ -338,6 +356,7 @@ class HomeConnectCoordinator(
|
||||
|
||||
programs = []
|
||||
events = {}
|
||||
options = {}
|
||||
if appliance.type in APPLIANCES_WITH_PROGRAMS:
|
||||
try:
|
||||
all_programs = await self.client.get_all_programs(appliance.ha_id)
|
||||
@@ -351,15 +370,17 @@ class HomeConnectCoordinator(
|
||||
)
|
||||
else:
|
||||
programs.extend(all_programs.programs)
|
||||
current_program_key = None
|
||||
program_options = None
|
||||
for program, event_key in (
|
||||
(
|
||||
all_programs.active,
|
||||
EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM,
|
||||
),
|
||||
(
|
||||
all_programs.selected,
|
||||
EventKey.BSH_COMMON_ROOT_SELECTED_PROGRAM,
|
||||
),
|
||||
(
|
||||
all_programs.active,
|
||||
EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM,
|
||||
),
|
||||
):
|
||||
if program and program.key:
|
||||
events[event_key] = Event(
|
||||
@@ -370,10 +391,41 @@ class HomeConnectCoordinator(
|
||||
"",
|
||||
program.key,
|
||||
)
|
||||
current_program_key = program.key
|
||||
program_options = program.options
|
||||
if current_program_key:
|
||||
options = await self.get_options_definitions(
|
||||
appliance.ha_id, current_program_key
|
||||
)
|
||||
for option in program_options or []:
|
||||
option_event_key = EventKey(option.key)
|
||||
events[option_event_key] = Event(
|
||||
option_event_key,
|
||||
option.key,
|
||||
0,
|
||||
"",
|
||||
"",
|
||||
option.value,
|
||||
option.name,
|
||||
display_value=option.display_value,
|
||||
unit=option.unit,
|
||||
)
|
||||
|
||||
try:
|
||||
commands = {
|
||||
command.key
|
||||
for command in (
|
||||
await self.client.get_available_commands(appliance.ha_id)
|
||||
).commands
|
||||
}
|
||||
except HomeConnectError:
|
||||
commands = set()
|
||||
|
||||
appliance_data = HomeConnectApplianceData(
|
||||
commands=commands,
|
||||
events=events,
|
||||
info=appliance,
|
||||
options=options,
|
||||
programs=programs,
|
||||
settings=settings,
|
||||
status=status,
|
||||
@@ -383,3 +435,61 @@ class HomeConnectCoordinator(
|
||||
appliance_data = appliance_data_to_update
|
||||
|
||||
return appliance_data
|
||||
|
||||
async def get_options_definitions(
|
||||
self, ha_id: str, program_key: ProgramKey
|
||||
) -> dict[OptionKey, ProgramDefinitionOption]:
|
||||
"""Get options with constraints for appliance."""
|
||||
if program_key is ProgramKey.UNKNOWN:
|
||||
return {}
|
||||
try:
|
||||
return {
|
||||
option.key: option
|
||||
for option in (
|
||||
await self.client.get_available_program(
|
||||
ha_id, program_key=program_key
|
||||
)
|
||||
).options
|
||||
or []
|
||||
}
|
||||
except HomeConnectError as error:
|
||||
_LOGGER.debug(
|
||||
"Error fetching options for %s: %s",
|
||||
ha_id,
|
||||
error
|
||||
if isinstance(error, HomeConnectApiError)
|
||||
else type(error).__name__,
|
||||
)
|
||||
return {}
|
||||
|
||||
async def update_options(
|
||||
self, ha_id: str, event_key: EventKey, program_key: ProgramKey
|
||||
) -> None:
|
||||
"""Update options for appliance."""
|
||||
options = self.data[ha_id].options
|
||||
events = self.data[ha_id].events
|
||||
options_to_notify = options.copy()
|
||||
options.clear()
|
||||
options.update(await self.get_options_definitions(ha_id, program_key))
|
||||
|
||||
for option in options.values():
|
||||
option_value = option.constraints.default if option.constraints else None
|
||||
if option_value is not None:
|
||||
option_event_key = EventKey(option.key)
|
||||
events[option_event_key] = Event(
|
||||
option_event_key,
|
||||
option.key.value,
|
||||
0,
|
||||
"",
|
||||
"",
|
||||
option_value,
|
||||
option.name,
|
||||
unit=option.unit,
|
||||
)
|
||||
options_to_notify.update(options)
|
||||
for option_key in options_to_notify:
|
||||
for listener in self.context_listeners.get(
|
||||
(ha_id, EventKey(option_key)),
|
||||
[],
|
||||
):
|
||||
listener()
|
||||
|
||||
@@ -1,17 +1,22 @@
|
||||
"""Home Connect entity base class."""
|
||||
|
||||
from abc import abstractmethod
|
||||
import contextlib
|
||||
import logging
|
||||
from typing import cast
|
||||
|
||||
from aiohomeconnect.model import EventKey
|
||||
from aiohomeconnect.model import EventKey, OptionKey
|
||||
from aiohomeconnect.model.error import ActiveProgramNotSetError, HomeConnectError
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import HomeConnectApplianceData, HomeConnectCoordinator
|
||||
from .utils import get_dict_from_home_connect_error
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -60,3 +65,59 @@ class HomeConnectEntity(CoordinatorEntity[HomeConnectCoordinator]):
|
||||
return (
|
||||
self.appliance.info.connected and self._attr_available and super().available
|
||||
)
|
||||
|
||||
|
||||
class HomeConnectOptionEntity(HomeConnectEntity):
|
||||
"""Class for entities that represents program options."""
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return super().available and self.bsh_key in self.appliance.options
|
||||
|
||||
@property
|
||||
def option_value(self) -> str | int | float | bool | None:
|
||||
"""Return the state of the entity."""
|
||||
if event := self.appliance.events.get(EventKey(self.bsh_key)):
|
||||
return event.value
|
||||
return None
|
||||
|
||||
async def async_set_option(self, value: str | float | bool) -> None:
|
||||
"""Set an option for the entity."""
|
||||
try:
|
||||
# We try to set the active program option first,
|
||||
# if it fails we try to set the selected program option
|
||||
with contextlib.suppress(ActiveProgramNotSetError):
|
||||
await self.coordinator.client.set_active_program_option(
|
||||
self.appliance.info.ha_id,
|
||||
option_key=self.bsh_key,
|
||||
value=value,
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Updated %s for the active program, new state: %s",
|
||||
self.entity_id,
|
||||
self.state,
|
||||
)
|
||||
return
|
||||
|
||||
await self.coordinator.client.set_selected_program_option(
|
||||
self.appliance.info.ha_id,
|
||||
option_key=self.bsh_key,
|
||||
value=value,
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Updated %s for the selected program, new state: %s",
|
||||
self.entity_id,
|
||||
self.state,
|
||||
)
|
||||
except HomeConnectError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_option",
|
||||
translation_placeholders=get_dict_from_home_connect_error(err),
|
||||
) from err
|
||||
|
||||
@property
|
||||
def bsh_key(self) -> OptionKey:
|
||||
"""Return the BSH key."""
|
||||
return cast(OptionKey, self.entity_description.key)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user