forked from home-assistant/core
Compare commits
347 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| b27ef88be7 | |||
| 0aa09a2d51 | |||
| 62e45e393d | |||
| eadff2938f | |||
| 354cd90c92 | |||
| 11348959ca | |||
| 935890e4e0 | |||
| 82d5304b45 | |||
| 2401d8900a | |||
| c834944ee7 | |||
| 9a90e1e410 | |||
| 73ef240921 | |||
| 2985f08054 | |||
| 8780bc99eb | |||
| 452fbbe61c | |||
| 6be8370eb3 | |||
| 9682d3b313 | |||
| d47481a30e | |||
| 3dd1fadc7d | |||
| fd1044dcba | |||
| 2aa584ce39 | |||
| e78139edf1 | |||
| eb49e596f9 | |||
| eaad8ec49d | |||
| 99e1a7a676 | |||
| 4ff2309a90 | |||
| 4bafdf5e4b | |||
| f38a32477e | |||
| 59073d47a1 | |||
| df1563daaf | |||
| 93dfbb4166 | |||
| 9549b1488e | |||
| 6ba45a32c0 | |||
| 88f18fdfdc | |||
| 377e0a64d1 | |||
| 1a4a3a0f08 | |||
| 485da61d3c | |||
| 5d7b60e4c8 | |||
| 5d8e03c124 | |||
| edc763b7d2 | |||
| c51e644203 | |||
| dc4464a347 | |||
| 47919fe7e9 | |||
| 6455daf092 | |||
| e06af94a1a | |||
| 052eed6bb3 | |||
| 095b04caf9 | |||
| 83dd1af6d2 | |||
| 8bfffcbd29 | |||
| f2b07ea886 | |||
| 4f255439eb | |||
| b44c26d324 | |||
| 46f4bc3434 | |||
| b280874dc0 | |||
| aec6868af1 | |||
| 48865e00b6 | |||
| a5002018e0 | |||
| 8e35783164 | |||
| f8e3f2a94f | |||
| cc30823726 | |||
| 97cc3984c5 | |||
| cc5c8bf5e3 | |||
| cfaf18f942 | |||
| 98e317dd55 | |||
| 1f24e5aec4 | |||
| ed088aa72f | |||
| 51162320cb | |||
| b88eab8ba3 | |||
| 6c080ee650 | |||
| 8056b0df2b | |||
| 3f94b7a61c | |||
| 1484e46317 | |||
| 2812c8a993 | |||
| cfe102f274 | |||
| b225a7f370 | |||
| 0f3409bd09 | |||
| e7d371cddc | |||
| fffb414ba9 | |||
| 1552aec416 | |||
| c69cec28fe | |||
| 61e0b938ae | |||
| c0e5a549b6 | |||
| f0bba1d6d4 | |||
| 1c1a950c05 | |||
| df2248bb82 | |||
| 5043e2ad10 | |||
| 2c2fd76270 | |||
| 7fe75a959f | |||
| 5f88354cb3 | |||
| 09561aeb39 | |||
| 7001f8daaf | |||
| b41fc932c5 | |||
| 2a11c413c7 | |||
| 0872243297 | |||
| bba889975a | |||
| 36412a034d | |||
| 13dfd27b7e | |||
| 1fb02944b7 | |||
| 1c045ab222 | |||
| 0d329bd83d | |||
| d1995086cc | |||
| f0ad0e6eae | |||
| 457a7216ff | |||
| 782f504522 | |||
| e60a284354 | |||
| d5d9bc1df6 | |||
| 27fd0a88f4 | |||
| 24188ffb31 | |||
| e51d9bd6f4 | |||
| 3eb7302fde | |||
| 49b2f8fd7f | |||
| 0143a71e97 | |||
| 9bc806ab21 | |||
| 366c5c3f10 | |||
| 3ee5262a8d | |||
| c671862d3f | |||
| 50ba93042b | |||
| 1456d9d800 | |||
| 3b9bb96784 | |||
| 7359013db0 | |||
| be3d678f23 | |||
| e8099fd3b2 | |||
| 344cfedd6f | |||
| c0d882e305 | |||
| e86fc88631 | |||
| e1127fc78c | |||
| 95fbba1d74 | |||
| 46ac44c248 | |||
| 0ebdb1c2a8 | |||
| e3a90831bf | |||
| ec100e5a6c | |||
| 0eb087ba3f | |||
| e55757dc82 | |||
| c51a2317e1 | |||
| 7fb949dff7 | |||
| 74ea553b63 | |||
| d9690507a4 | |||
| e69b4f389f | |||
| 8a97c2bfca | |||
| d5ba55d2fc | |||
| d38e046494 | |||
| 50cec420ef | |||
| 23dac3933f | |||
| 32f59bfd25 | |||
| 4f36bbdfe6 | |||
| 973fee9fe1 | |||
| 13001faf51 | |||
| 9f780a5308 | |||
| d87c963db5 | |||
| c6a9472fdb | |||
| cd0a983850 | |||
| 890d3f4af4 | |||
| a778092941 | |||
| 9ea582de26 | |||
| b6f2d8f30b | |||
| 139072bb59 | |||
| 07a93dade2 | |||
| 9dc04cb088 | |||
| 890c672f8c | |||
| e28e4d210f | |||
| dcd2d42894 | |||
| e47e151259 | |||
| 2c44043e6a | |||
| f248901ea8 | |||
| 62b6be900f | |||
| 1b15df3075 | |||
| 229407d685 | |||
| aaecb47125 | |||
| b17ee78dec | |||
| 20e48054cf | |||
| ee486c269c | |||
| aee891434f | |||
| 5472345f45 | |||
| 572534b306 | |||
| 3c363eb5ce | |||
| 7e4432e321 | |||
| 5ae7109561 | |||
| 4602c0a1c3 | |||
| 53bc5ff029 | |||
| c782a6ab63 | |||
| 23644a60ac | |||
| 14e66ffef4 | |||
| fa40d02a07 | |||
| 8536f2b4cb | |||
| 387bf83ba8 | |||
| 18b0f54a3e | |||
| f76e295204 | |||
| e63b17cd58 | |||
| 05e23f0fc7 | |||
| fca4ef3b1e | |||
| 1226354823 | |||
| 40099547ef | |||
| de4540c68e | |||
| d006d33dc0 | |||
| 4c8a58f7cc | |||
| 8d6178ffa6 | |||
| 0a3562aca3 | |||
| c9abe76023 | |||
| 0c803520a3 | |||
| 5ac3fe6ee1 | |||
| b7bedd4b8f | |||
| 0694f9e164 | |||
| 5b1f3d3e7f | |||
| d922c723d4 | |||
| 3eadfcc01d | |||
| 29f680f912 | |||
| ee2b53ed0f | |||
| b0b5567316 | |||
| e6c946b3f4 | |||
| b2c7c5b1aa | |||
| 220509fd6c | |||
| 7293ae5d51 | |||
| 4a7fd89abd | |||
| 077ff63b38 | |||
| 55fd5fa869 | |||
| e3eb6051de | |||
| 3e9304253d | |||
| cc8ed2c228 | |||
| 89b655c192 | |||
| 56ddfa9ff8 | |||
| a2a11ad02e | |||
| f7927f9da1 | |||
| 13918f07d8 | |||
| 35825be12b | |||
| 1786bb9903 | |||
| 3588784f1e | |||
| 2cce1b024e | |||
| c168695323 | |||
| 913a4ee9ba | |||
| dd21d48ae4 | |||
| b3f14d72c0 | |||
| 51beb1c0a8 | |||
| 0c5766184b | |||
| b1a2b89691 | |||
| 4813da33d6 | |||
| d4099ab917 | |||
| ee206938d8 | |||
| 9fe08f292d | |||
| 9a331de878 | |||
| 2de941bc11 | |||
| c5e0418f75 | |||
| 679b57e450 | |||
| 91eba0855e | |||
| 43f48b8562 | |||
| df95902004 | |||
| 3edc7913de | |||
| 1852052dff | |||
| fe5cd5c55c | |||
| 042e4d20c5 | |||
| dfe1921737 | |||
| 2c620f1f60 | |||
| 66a17bd072 | |||
| 18217a594f | |||
| a6e2dc485b | |||
| 8e7960fa0e | |||
| 1dc6a94093 | |||
| 615d79b429 | |||
| ebd6daa31d | |||
| d6750624ce | |||
| 577b22374a | |||
| ee1fe2cae4 | |||
| db05aa17d3 | |||
| b1ee019e3a | |||
| b43a7ff1fe | |||
| 2d6068b842 | |||
| ac4c379a0e | |||
| 00b7c4f9ef | |||
| 3f48826370 | |||
| ed06831e9d | |||
| c21234672d | |||
| 32950df0b7 | |||
| 0f615bbe4f | |||
| 5a6ffe1901 | |||
| 6ce48eab45 | |||
| 437e545116 | |||
| 1a80934593 | |||
| 455363871f | |||
| 39bc37d225 | |||
| 90fc6ffdbf | |||
| 086c91485f | |||
| bf27ccce17 | |||
| 70bb56e0fc | |||
| 49c27ae7bc | |||
| e9bb4625d8 | |||
| 2e077cbf12 | |||
| 271d225e51 | |||
| fca19a3ec1 | |||
| 0681652aec | |||
| 5fa5d08b18 | |||
| 0f0866cd52 | |||
| 1b27365c58 | |||
| 3cd7f50216 | |||
| 40d2d6df2c | |||
| c2a7736417 | |||
| ac15d9b3d4 | |||
| 228a4eb391 | |||
| 030a1460de | |||
| d157919da2 | |||
| b79c6e772a | |||
| d6f9040baf | |||
| 0310418efc | |||
| 62dc0ac485 | |||
| 9a62b0f245 | |||
| a296c5e9ad | |||
| 12cb349160 | |||
| 5cf56ec113 | |||
| 1be9836663 | |||
| 9d10e0e054 | |||
| 05df572951 | |||
| 6953c20a65 | |||
| 4e8186491c | |||
| 6fa93edf27 | |||
| ef13b35c35 | |||
| 0afdd9556f | |||
| e11ead410b | |||
| ef7058f703 | |||
| 938855bea3 | |||
| 4c00c56afd | |||
| 8cc7e7b76f | |||
| df006aeade | |||
| ffac522554 | |||
| 9502dbee56 | |||
| a339fbaa82 | |||
| b02eaed6b0 | |||
| df594748cf | |||
| 744a7a0e82 | |||
| f677b910a6 | |||
| 0da6b28808 | |||
| f111a2c34a | |||
| 59eb323f8d | |||
| 7ae13a4d72 | |||
| 735b843f5e | |||
| 5b1783e859 | |||
| 7b14b6af0e | |||
| cc18ec2de8 | |||
| df59adf5d1 | |||
| 8c98cede60 | |||
| b1a70c86c3 | |||
| 63daed0ed6 | |||
| 2150a668b0 | |||
| b505722f38 | |||
| 036eef2b6b | |||
| f3fb7cd8e8 | |||
| 42f55bf271 | |||
| 6d7dad41d9 | |||
| 9dbce6d904 | |||
| 7f0db3181d |
@@ -94,7 +94,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v8
|
||||
uses: dawidd6/action-download-artifact@v9
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v8
|
||||
uses: dawidd6/action-download-artifact@v9
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/intents-package
|
||||
@@ -509,7 +509,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@0adf9959216b96bec444f325f1e493d4aa344497 # v6.14.0
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -522,7 +522,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@0adf9959216b96bec444f325f1e493d4aa344497 # v6.14.0
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
|
||||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
||||
+100
-50
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 11
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2025.3"
|
||||
HA_SHORT_VERSION: "2025.4"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -89,6 +89,7 @@ jobs:
|
||||
test_groups: ${{ steps.info.outputs.test_groups }}
|
||||
tests_glob: ${{ steps.info.outputs.tests_glob }}
|
||||
tests: ${{ steps.info.outputs.tests }}
|
||||
lint_only: ${{ steps.info.outputs.lint_only }}
|
||||
skip_coverage: ${{ steps.info.outputs.skip_coverage }}
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
@@ -142,6 +143,7 @@ jobs:
|
||||
test_group_count=10
|
||||
tests="[]"
|
||||
tests_glob=""
|
||||
lint_only=""
|
||||
skip_coverage=""
|
||||
|
||||
if [[ "${{ steps.integrations.outputs.changes }}" != "[]" ]];
|
||||
@@ -192,6 +194,17 @@ jobs:
|
||||
test_full_suite="true"
|
||||
fi
|
||||
|
||||
if [[ "${{ github.event.inputs.lint-only }}" == "true" ]] \
|
||||
|| [[ "${{ github.event.inputs.pylint-only }}" == "true" ]] \
|
||||
|| [[ "${{ github.event.inputs.mypy-only }}" == "true" ]] \
|
||||
|| [[ "${{ github.event.inputs.audit-licenses-only }}" == "true" ]] \
|
||||
|| [[ "${{ github.event_name }}" == "push" \
|
||||
&& "${{ github.event.repository.full_name }}" != "home-assistant/core" ]];
|
||||
then
|
||||
lint_only="true"
|
||||
skip_coverage="true"
|
||||
fi
|
||||
|
||||
if [[ "${{ github.event.inputs.skip-coverage }}" == "true" ]] \
|
||||
|| [[ "${{ contains(github.event.pull_request.labels.*.name, 'ci-skip-coverage') }}" == "true" ]];
|
||||
then
|
||||
@@ -217,6 +230,8 @@ jobs:
|
||||
echo "tests=${tests}" >> $GITHUB_OUTPUT
|
||||
echo "tests_glob: ${tests_glob}"
|
||||
echo "tests_glob=${tests_glob}" >> $GITHUB_OUTPUT
|
||||
echo "lint_only": ${lint_only}
|
||||
echo "lint_only=${lint_only}" >> $GITHUB_OUTPUT
|
||||
echo "skip_coverage: ${skip_coverage}"
|
||||
echo "skip_coverage=${skip_coverage}" >> $GITHUB_OUTPUT
|
||||
|
||||
@@ -240,7 +255,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.1
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -256,7 +271,7 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.1
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@@ -286,7 +301,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -295,7 +310,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -326,7 +341,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -335,7 +350,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -366,7 +381,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -375,7 +390,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -482,7 +497,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.1
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -490,7 +505,7 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@v4.2.1
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@@ -578,7 +593,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -611,7 +626,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -649,7 +664,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -692,7 +707,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -739,7 +754,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -791,7 +806,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -799,7 +814,7 @@ jobs:
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@v4.2.1
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -829,11 +844,7 @@ jobs:
|
||||
prepare-pytest-full:
|
||||
runs-on: ubuntu-24.04
|
||||
if: |
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
&& needs.info.outputs.test_full_suite == 'true'
|
||||
needs:
|
||||
- info
|
||||
@@ -865,7 +876,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -886,11 +897,7 @@ jobs:
|
||||
pytest-full:
|
||||
runs-on: ubuntu-24.04
|
||||
if: |
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
&& needs.info.outputs.test_full_suite == 'true'
|
||||
needs:
|
||||
- info
|
||||
@@ -929,7 +936,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -962,6 +969,7 @@ jobs:
|
||||
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
|
||||
cov_params+=(--cov="homeassistant")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||
fi
|
||||
|
||||
echo "Test group ${{ matrix.group }}: $(sed -n "${{ matrix.group }},1p" pytest_buckets.txt)"
|
||||
@@ -992,6 +1000,12 @@ jobs:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
- name: Remove pytest_buckets
|
||||
run: rm pytest_buckets.txt
|
||||
- name: Check dirty
|
||||
@@ -1009,11 +1023,7 @@ jobs:
|
||||
MYSQL_ROOT_PASSWORD: password
|
||||
options: --health-cmd="mysqladmin ping -uroot -ppassword" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||
if: |
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
&& needs.info.outputs.mariadb_groups != '[]'
|
||||
needs:
|
||||
- info
|
||||
@@ -1051,7 +1061,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1088,6 +1098,7 @@ jobs:
|
||||
cov_params+=(--cov="homeassistant.components.recorder")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||
fi
|
||||
|
||||
python3 -b -X dev -m pytest \
|
||||
@@ -1122,6 +1133,13 @@ jobs:
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: test-results-mariadb-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
path: junit.xml
|
||||
- name: Check dirty
|
||||
run: |
|
||||
./script/check_dirty
|
||||
@@ -1137,11 +1155,7 @@ jobs:
|
||||
POSTGRES_PASSWORD: password
|
||||
options: --health-cmd="pg_isready -hlocalhost -Upostgres" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||
if: |
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
&& needs.info.outputs.postgresql_groups != '[]'
|
||||
needs:
|
||||
- info
|
||||
@@ -1181,7 +1195,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1218,6 +1232,7 @@ jobs:
|
||||
cov_params+=(--cov="homeassistant.components.recorder")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||
fi
|
||||
|
||||
python3 -b -X dev -m pytest \
|
||||
@@ -1253,6 +1268,13 @@ jobs:
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: test-results-postgres-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
path: junit.xml
|
||||
- name: Check dirty
|
||||
run: |
|
||||
./script/check_dirty
|
||||
@@ -1276,7 +1298,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.3.1
|
||||
uses: codecov/codecov-action@v5.4.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1285,11 +1307,7 @@ jobs:
|
||||
pytest-partial:
|
||||
runs-on: ubuntu-24.04
|
||||
if: |
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
&& needs.info.outputs.tests_glob
|
||||
&& needs.info.outputs.test_full_suite == 'false'
|
||||
needs:
|
||||
@@ -1328,7 +1346,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1365,6 +1383,7 @@ jobs:
|
||||
cov_params+=(--cov="homeassistant.components.${{ matrix.group }}")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||
fi
|
||||
|
||||
python3 -b -X dev -m pytest \
|
||||
@@ -1394,6 +1413,12 @@ jobs:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
- name: Check dirty
|
||||
run: |
|
||||
./script/check_dirty
|
||||
@@ -1415,7 +1440,32 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.3.1
|
||||
uses: codecov/codecov-action@v5.4.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
upload-test-results:
|
||||
name: Upload test results to Codecov
|
||||
# codecov/test-results-action currently doesn't support tokenless uploads
|
||||
# therefore we can't run it on forks
|
||||
if: ${{ (github.event_name != 'pull_request' || !github.event.pull_request.head.repo.fork) && needs.info.outputs.skip_coverage != 'true' && !cancelled() }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- info
|
||||
- pytest-partial
|
||||
- pytest-full
|
||||
- pytest-postgres
|
||||
- pytest-mariadb
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
@@ -159,7 +159,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
uses: home-assistant/wheels@2025.02.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -219,7 +219,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
uses: home-assistant/wheels@2025.02.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
||||
@@ -69,6 +69,7 @@ test-reports/
|
||||
test-results.xml
|
||||
test-output.xml
|
||||
pytest-*.txt
|
||||
junit.xml
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.9.7
|
||||
rev: v0.9.8
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
||||
@@ -136,6 +136,7 @@ homeassistant.components.clicksend.*
|
||||
homeassistant.components.climate.*
|
||||
homeassistant.components.cloud.*
|
||||
homeassistant.components.co2signal.*
|
||||
homeassistant.components.comelit.*
|
||||
homeassistant.components.command_line.*
|
||||
homeassistant.components.config.*
|
||||
homeassistant.components.configurator.*
|
||||
@@ -396,6 +397,7 @@ homeassistant.components.pure_energie.*
|
||||
homeassistant.components.purpleair.*
|
||||
homeassistant.components.pushbullet.*
|
||||
homeassistant.components.pvoutput.*
|
||||
homeassistant.components.pyload.*
|
||||
homeassistant.components.python_script.*
|
||||
homeassistant.components.qbus.*
|
||||
homeassistant.components.qnap_qsw.*
|
||||
@@ -528,6 +530,7 @@ homeassistant.components.vallox.*
|
||||
homeassistant.components.valve.*
|
||||
homeassistant.components.velbus.*
|
||||
homeassistant.components.vlc_telnet.*
|
||||
homeassistant.components.vodafone_station.*
|
||||
homeassistant.components.wake_on_lan.*
|
||||
homeassistant.components.wake_word.*
|
||||
homeassistant.components.wallbox.*
|
||||
|
||||
Generated
+2
-2
@@ -1529,8 +1529,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/tedee/ @patrickhilker @zweckj
|
||||
/homeassistant/components/tellduslive/ @fredrike
|
||||
/tests/components/tellduslive/ @fredrike
|
||||
/homeassistant/components/template/ @PhracturedBlue @home-assistant/core
|
||||
/tests/components/template/ @PhracturedBlue @home-assistant/core
|
||||
/homeassistant/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
|
||||
/tests/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
|
||||
/homeassistant/components/tesla_fleet/ @Bre77
|
||||
/tests/components/tesla_fleet/ @Bre77
|
||||
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
||||
|
||||
@@ -81,6 +81,7 @@ from .helpers import (
|
||||
entity,
|
||||
entity_registry,
|
||||
floor_registry,
|
||||
frame,
|
||||
issue_registry,
|
||||
label_registry,
|
||||
recorder,
|
||||
@@ -441,9 +442,10 @@ async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
||||
if DATA_REGISTRIES_LOADED in hass.data:
|
||||
return
|
||||
hass.data[DATA_REGISTRIES_LOADED] = None
|
||||
translation.async_setup(hass)
|
||||
entity.async_setup(hass)
|
||||
frame.async_setup(hass)
|
||||
template.async_setup(hass)
|
||||
translation.async_setup(hass)
|
||||
await asyncio.gather(
|
||||
create_eager_task(get_internal_store_manager(hass).async_initialize()),
|
||||
create_eager_task(area_registry.async_load(hass)),
|
||||
@@ -664,11 +666,10 @@ def _create_log_file(
|
||||
err_handler = _RotatingFileHandlerWithoutShouldRollOver(
|
||||
err_log_path, backupCount=1
|
||||
)
|
||||
|
||||
try:
|
||||
err_handler.doRollover()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Error rolling over log file: %s", err)
|
||||
try:
|
||||
err_handler.doRollover()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Error rolling over log file: %s", err)
|
||||
|
||||
return err_handler
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from decimal import Decimal
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -14,6 +15,7 @@ from homeassistant.components.climate import (
|
||||
FAN_MEDIUM,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, UnitOfTemperature
|
||||
@@ -49,6 +51,14 @@ ADVANTAGE_AIR_MYTEMP_ENABLED = "climateControlModeEnabled"
|
||||
ADVANTAGE_AIR_HEAT_TARGET = "myAutoHeatTargetTemp"
|
||||
ADVANTAGE_AIR_COOL_TARGET = "myAutoCoolTargetTemp"
|
||||
ADVANTAGE_AIR_MYFAN = "autoAA"
|
||||
ADVANTAGE_AIR_MYAUTO_MODE_SET = "myAutoModeCurrentSetMode"
|
||||
|
||||
HVAC_ACTIONS = {
|
||||
"cool": HVACAction.COOLING,
|
||||
"heat": HVACAction.HEATING,
|
||||
"vent": HVACAction.FAN,
|
||||
"dry": HVACAction.DRYING,
|
||||
}
|
||||
|
||||
HVAC_MODES = [
|
||||
HVACMode.OFF,
|
||||
@@ -175,6 +185,17 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
||||
return ADVANTAGE_AIR_HVAC_MODES.get(self._ac["mode"])
|
||||
return HVACMode.OFF
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
"""Return the current running HVAC action."""
|
||||
if self._ac["state"] == ADVANTAGE_AIR_STATE_OFF:
|
||||
return HVACAction.OFF
|
||||
if self._ac["mode"] == "myauto":
|
||||
return HVAC_ACTIONS.get(
|
||||
self._ac.get(ADVANTAGE_AIR_MYAUTO_MODE_SET, HVACAction.OFF)
|
||||
)
|
||||
return HVAC_ACTIONS.get(self._ac["mode"])
|
||||
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
"""Return the current fan modes."""
|
||||
@@ -273,6 +294,22 @@ class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity):
|
||||
return HVACMode.HEAT_COOL
|
||||
return HVACMode.OFF
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
"""Return the HVAC action, inheriting from master AC if zone is open but idle if air is <= 5%."""
|
||||
if self._ac["state"] == ADVANTAGE_AIR_STATE_OFF:
|
||||
return HVACAction.OFF
|
||||
master_action = HVAC_ACTIONS.get(self._ac["mode"], HVACAction.OFF)
|
||||
if self._ac["mode"] == "myauto":
|
||||
master_action = HVAC_ACTIONS.get(
|
||||
str(self._ac.get(ADVANTAGE_AIR_MYAUTO_MODE_SET)), HVACAction.OFF
|
||||
)
|
||||
if self._zone["state"] == ADVANTAGE_AIR_STATE_OPEN:
|
||||
if self._zone["value"] <= Decimal(5):
|
||||
return HVACAction.IDLE
|
||||
return master_action
|
||||
return HVACAction.OFF
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
|
||||
@@ -7,3 +7,4 @@ ADVANTAGE_AIR_STATE_CLOSE = "close"
|
||||
ADVANTAGE_AIR_STATE_ON = "on"
|
||||
ADVANTAGE_AIR_STATE_OFF = "off"
|
||||
ADVANTAGE_AIR_AUTOFAN_ENABLED = "aaAutoFanModeEnabled"
|
||||
ADVANTAGE_AIR_NIGHT_MODE_ENABLED = "quietNightModeEnabled"
|
||||
|
||||
@@ -41,7 +41,7 @@ async def async_setup_entry(
|
||||
entities.append(
|
||||
AdvantageAirThingCover(instance, thing, CoverDeviceClass.BLIND)
|
||||
)
|
||||
elif thing["channelDipState"] == 3: # 3 = "Garage door"
|
||||
elif thing["channelDipState"] in [3, 10]: # 3 & 10 = "Garage door"
|
||||
entities.append(
|
||||
AdvantageAirThingCover(instance, thing, CoverDeviceClass.GARAGE)
|
||||
)
|
||||
|
||||
@@ -9,6 +9,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import AdvantageAirDataConfigEntry
|
||||
from .const import (
|
||||
ADVANTAGE_AIR_AUTOFAN_ENABLED,
|
||||
ADVANTAGE_AIR_NIGHT_MODE_ENABLED,
|
||||
ADVANTAGE_AIR_STATE_OFF,
|
||||
ADVANTAGE_AIR_STATE_ON,
|
||||
)
|
||||
@@ -32,6 +33,8 @@ async def async_setup_entry(
|
||||
entities.append(AdvantageAirFreshAir(instance, ac_key))
|
||||
if ADVANTAGE_AIR_AUTOFAN_ENABLED in ac_device["info"]:
|
||||
entities.append(AdvantageAirMyFan(instance, ac_key))
|
||||
if ADVANTAGE_AIR_NIGHT_MODE_ENABLED in ac_device["info"]:
|
||||
entities.append(AdvantageAirNightMode(instance, ac_key))
|
||||
if things := instance.coordinator.data.get("myThings"):
|
||||
entities.extend(
|
||||
AdvantageAirRelay(instance, thing)
|
||||
@@ -93,6 +96,32 @@ class AdvantageAirMyFan(AdvantageAirAcEntity, SwitchEntity):
|
||||
await self.async_update_ac({ADVANTAGE_AIR_AUTOFAN_ENABLED: False})
|
||||
|
||||
|
||||
class AdvantageAirNightMode(AdvantageAirAcEntity, SwitchEntity):
|
||||
"""Representation of Advantage 'MySleep$aver' Mode control."""
|
||||
|
||||
_attr_icon = "mdi:weather-night"
|
||||
_attr_name = "MySleep$aver"
|
||||
_attr_device_class = SwitchDeviceClass.SWITCH
|
||||
|
||||
def __init__(self, instance: AdvantageAirData, ac_key: str) -> None:
|
||||
"""Initialize an Advantage Air Night Mode control."""
|
||||
super().__init__(instance, ac_key)
|
||||
self._attr_unique_id += "-nightmode"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return the Night Mode status."""
|
||||
return self._ac[ADVANTAGE_AIR_NIGHT_MODE_ENABLED]
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn Night Mode on."""
|
||||
await self.async_update_ac({ADVANTAGE_AIR_NIGHT_MODE_ENABLED: True})
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn Night Mode off."""
|
||||
await self.async_update_ac({ADVANTAGE_AIR_NIGHT_MODE_ENABLED: False})
|
||||
|
||||
|
||||
class AdvantageAirRelay(AdvantageAirThingEntity, SwitchEntity):
|
||||
"""Representation of Advantage Air Thing."""
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ from python_homeassistant_analytics import (
|
||||
HomeassistantAnalyticsClient,
|
||||
HomeassistantAnalyticsConnectionError,
|
||||
)
|
||||
from python_homeassistant_analytics.models import IntegrationType
|
||||
from python_homeassistant_analytics.models import Environment, IntegrationType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
||||
@@ -81,7 +81,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
try:
|
||||
addons = await client.get_addons()
|
||||
integrations = await client.get_integrations()
|
||||
integrations = await client.get_integrations(Environment.NEXT)
|
||||
custom_integrations = await client.get_custom_integrations()
|
||||
except HomeassistantAnalyticsConnectionError:
|
||||
LOGGER.exception("Error connecting to Home Assistant analytics")
|
||||
@@ -165,7 +165,7 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlow):
|
||||
)
|
||||
try:
|
||||
addons = await client.get_addons()
|
||||
integrations = await client.get_integrations()
|
||||
integrations = await client.get_integrations(Environment.NEXT)
|
||||
custom_integrations = await client.get_custom_integrations()
|
||||
except HomeassistantAnalyticsConnectionError:
|
||||
LOGGER.exception("Error connecting to Home Assistant analytics")
|
||||
|
||||
@@ -12,7 +12,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .const import CONF_CHAT_MODEL, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL
|
||||
|
||||
PLATFORMS = (Platform.CONVERSATION,)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@@ -26,12 +26,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
|
||||
partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY])
|
||||
)
|
||||
try:
|
||||
await client.messages.create(
|
||||
model="claude-3-haiku-20240307",
|
||||
max_tokens=1,
|
||||
messages=[{"role": "user", "content": "Hi"}],
|
||||
timeout=10.0,
|
||||
)
|
||||
model_id = entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
model = await client.models.retrieve(model_id=model_id, timeout=10.0)
|
||||
LOGGER.debug("Anthropic model: %s", model.display_name)
|
||||
except anthropic.AuthenticationError as err:
|
||||
LOGGER.error("Invalid API key: %s", err)
|
||||
return False
|
||||
|
||||
@@ -63,12 +63,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
client = await hass.async_add_executor_job(
|
||||
partial(anthropic.AsyncAnthropic, api_key=data[CONF_API_KEY])
|
||||
)
|
||||
await client.messages.create(
|
||||
model="claude-3-haiku-20240307",
|
||||
max_tokens=1,
|
||||
messages=[{"role": "user", "content": "Hi"}],
|
||||
timeout=10.0,
|
||||
)
|
||||
await client.models.list(timeout=10.0)
|
||||
|
||||
|
||||
class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@@ -305,7 +305,9 @@ class AnthropicConversationEntity(
|
||||
intent_response = intent.IntentResponse(language=user_input.language)
|
||||
intent_response.async_set_speech(response_content.content or "")
|
||||
return conversation.ConversationResult(
|
||||
response=intent_response, conversation_id=chat_log.conversation_id
|
||||
response=intent_response,
|
||||
conversation_id=chat_log.conversation_id,
|
||||
continue_conversation=chat_log.continue_conversation,
|
||||
)
|
||||
|
||||
async def _async_entry_update_listener(
|
||||
|
||||
@@ -117,7 +117,7 @@ async def async_pipeline_from_audio_stream(
|
||||
"""
|
||||
with chat_session.async_get_chat_session(hass, conversation_id) as session:
|
||||
pipeline_input = PipelineInput(
|
||||
conversation_id=session.conversation_id,
|
||||
session=session,
|
||||
device_id=device_id,
|
||||
stt_metadata=stt_metadata,
|
||||
stt_stream=stt_stream,
|
||||
|
||||
@@ -19,14 +19,7 @@ import wave
|
||||
import hass_nabucasa
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import (
|
||||
conversation,
|
||||
media_source,
|
||||
stt,
|
||||
tts,
|
||||
wake_word,
|
||||
websocket_api,
|
||||
)
|
||||
from homeassistant.components import conversation, stt, tts, wake_word, websocket_api
|
||||
from homeassistant.components.tts import (
|
||||
generate_media_source_id as tts_generate_media_source_id,
|
||||
)
|
||||
@@ -96,6 +89,9 @@ ENGINE_LANGUAGE_PAIRS = (
|
||||
)
|
||||
|
||||
KEY_ASSIST_PIPELINE: HassKey[PipelineData] = HassKey(DOMAIN)
|
||||
KEY_PIPELINE_CONVERSATION_DATA: HassKey[dict[str, PipelineConversationData]] = HassKey(
|
||||
"pipeline_conversation_data"
|
||||
)
|
||||
|
||||
|
||||
def validate_language(data: dict[str, Any]) -> Any:
|
||||
@@ -566,8 +562,7 @@ class PipelineRun:
|
||||
|
||||
id: str = field(default_factory=ulid_util.ulid_now)
|
||||
stt_provider: stt.SpeechToTextEntity | stt.Provider = field(init=False, repr=False)
|
||||
tts_engine: str = field(init=False, repr=False)
|
||||
tts_options: dict | None = field(init=False, default=None)
|
||||
tts_stream: tts.ResultStream | None = field(init=False, default=None)
|
||||
wake_word_entity_id: str | None = field(init=False, default=None, repr=False)
|
||||
wake_word_entity: wake_word.WakeWordDetectionEntity = field(init=False, repr=False)
|
||||
|
||||
@@ -590,6 +585,12 @@ class PipelineRun:
|
||||
_device_id: str | None = None
|
||||
"""Optional device id set during run start."""
|
||||
|
||||
_conversation_data: PipelineConversationData | None = None
|
||||
"""Data tied to the conversation ID."""
|
||||
|
||||
_intent_agent_only = False
|
||||
"""If request should only be handled by agent, ignoring sentence triggers and local processing."""
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Set language for pipeline."""
|
||||
self.language = self.pipeline.language or self.hass.config.language
|
||||
@@ -639,13 +640,18 @@ class PipelineRun:
|
||||
self._device_id = device_id
|
||||
self._start_debug_recording_thread()
|
||||
|
||||
data = {
|
||||
data: dict[str, Any] = {
|
||||
"pipeline": self.pipeline.id,
|
||||
"language": self.language,
|
||||
"conversation_id": conversation_id,
|
||||
}
|
||||
if self.runner_data is not None:
|
||||
data["runner_data"] = self.runner_data
|
||||
if self.tts_stream:
|
||||
data["tts_output"] = {
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
}
|
||||
|
||||
self.process_event(PipelineEvent(PipelineEventType.RUN_START, data))
|
||||
|
||||
@@ -1007,19 +1013,36 @@ class PipelineRun:
|
||||
|
||||
yield chunk.audio
|
||||
|
||||
async def prepare_recognize_intent(self) -> None:
|
||||
async def prepare_recognize_intent(self, session: chat_session.ChatSession) -> None:
|
||||
"""Prepare recognizing an intent."""
|
||||
agent_info = conversation.async_get_agent_info(
|
||||
self.hass,
|
||||
self.pipeline.conversation_engine or conversation.HOME_ASSISTANT_AGENT,
|
||||
self._conversation_data = async_get_pipeline_conversation_data(
|
||||
self.hass, session
|
||||
)
|
||||
|
||||
if agent_info is None:
|
||||
engine = self.pipeline.conversation_engine or "default"
|
||||
raise IntentRecognitionError(
|
||||
code="intent-not-supported",
|
||||
message=f"Intent recognition engine {engine} is not found",
|
||||
if self._conversation_data.continue_conversation_agent is not None:
|
||||
agent_info = conversation.async_get_agent_info(
|
||||
self.hass, self._conversation_data.continue_conversation_agent
|
||||
)
|
||||
self._conversation_data.continue_conversation_agent = None
|
||||
if agent_info is None:
|
||||
raise IntentRecognitionError(
|
||||
code="intent-agent-not-found",
|
||||
message=f"Intent recognition engine {self._conversation_data.continue_conversation_agent} asked for follow-up but is no longer found",
|
||||
)
|
||||
self._intent_agent_only = True
|
||||
|
||||
else:
|
||||
agent_info = conversation.async_get_agent_info(
|
||||
self.hass,
|
||||
self.pipeline.conversation_engine or conversation.HOME_ASSISTANT_AGENT,
|
||||
)
|
||||
|
||||
if agent_info is None:
|
||||
engine = self.pipeline.conversation_engine or "default"
|
||||
raise IntentRecognitionError(
|
||||
code="intent-not-supported",
|
||||
message=f"Intent recognition engine {engine} is not found",
|
||||
)
|
||||
|
||||
self.intent_agent = agent_info.id
|
||||
|
||||
@@ -1031,7 +1054,7 @@ class PipelineRun:
|
||||
conversation_extra_system_prompt: str | None,
|
||||
) -> str:
|
||||
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
||||
if self.intent_agent is None:
|
||||
if self.intent_agent is None or self._conversation_data is None:
|
||||
raise RuntimeError("Recognize intent was not prepared")
|
||||
|
||||
if self.pipeline.conversation_language == MATCH_ALL:
|
||||
@@ -1078,7 +1101,7 @@ class PipelineRun:
|
||||
agent_id = self.intent_agent
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally:
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
# Sentence triggers override conversation agent
|
||||
if (
|
||||
trigger_response_text
|
||||
@@ -1195,6 +1218,9 @@ class PipelineRun:
|
||||
)
|
||||
)
|
||||
|
||||
if conversation_result.continue_conversation:
|
||||
self._conversation_data.continue_conversation_agent = agent_id
|
||||
|
||||
return speech
|
||||
|
||||
async def prepare_text_to_speech(self) -> None:
|
||||
@@ -1217,36 +1243,31 @@ class PipelineRun:
|
||||
tts_options[tts.ATTR_PREFERRED_SAMPLE_BYTES] = SAMPLE_WIDTH
|
||||
|
||||
try:
|
||||
options_supported = await tts.async_support_options(
|
||||
self.hass,
|
||||
engine,
|
||||
self.pipeline.tts_language,
|
||||
tts_options,
|
||||
self.tts_stream = tts.async_create_stream(
|
||||
hass=self.hass,
|
||||
engine=engine,
|
||||
language=self.pipeline.tts_language,
|
||||
options=tts_options,
|
||||
)
|
||||
except HomeAssistantError as err:
|
||||
raise TextToSpeechError(
|
||||
code="tts-not-supported",
|
||||
message=f"Text-to-speech engine '{engine}' not found",
|
||||
) from err
|
||||
if not options_supported:
|
||||
raise TextToSpeechError(
|
||||
code="tts-not-supported",
|
||||
message=(
|
||||
f"Text-to-speech engine {engine} "
|
||||
f"does not support language {self.pipeline.tts_language} or options {tts_options}"
|
||||
f"does not support language {self.pipeline.tts_language} or options {tts_options}:"
|
||||
f" {err}"
|
||||
),
|
||||
)
|
||||
|
||||
self.tts_engine = engine
|
||||
self.tts_options = tts_options
|
||||
) from err
|
||||
|
||||
async def text_to_speech(self, tts_input: str) -> None:
|
||||
"""Run text-to-speech portion of pipeline."""
|
||||
assert self.tts_stream is not None
|
||||
|
||||
self.process_event(
|
||||
PipelineEvent(
|
||||
PipelineEventType.TTS_START,
|
||||
{
|
||||
"engine": self.tts_engine,
|
||||
"engine": self.tts_stream.engine,
|
||||
"language": self.pipeline.tts_language,
|
||||
"voice": self.pipeline.tts_voice,
|
||||
"tts_input": tts_input,
|
||||
@@ -1259,14 +1280,9 @@ class PipelineRun:
|
||||
tts_media_id = tts_generate_media_source_id(
|
||||
self.hass,
|
||||
tts_input,
|
||||
engine=self.tts_engine,
|
||||
language=self.pipeline.tts_language,
|
||||
options=self.tts_options,
|
||||
)
|
||||
tts_media = await media_source.async_resolve_media(
|
||||
self.hass,
|
||||
tts_media_id,
|
||||
None,
|
||||
engine=self.tts_stream.engine,
|
||||
language=self.tts_stream.language,
|
||||
options=self.tts_stream.options,
|
||||
)
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during text-to-speech")
|
||||
@@ -1275,10 +1291,12 @@ class PipelineRun:
|
||||
message="Unexpected error during text-to-speech",
|
||||
) from src_error
|
||||
|
||||
_LOGGER.debug("TTS result %s", tts_media)
|
||||
self.tts_stream.async_set_message(tts_input)
|
||||
|
||||
tts_output = {
|
||||
"media_id": tts_media_id,
|
||||
**asdict(tts_media),
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
}
|
||||
|
||||
self.process_event(
|
||||
@@ -1458,8 +1476,8 @@ class PipelineInput:
|
||||
|
||||
run: PipelineRun
|
||||
|
||||
conversation_id: str
|
||||
"""Identifier for the conversation."""
|
||||
session: chat_session.ChatSession
|
||||
"""Session for the conversation."""
|
||||
|
||||
stt_metadata: stt.SpeechMetadata | None = None
|
||||
"""Metadata of stt input audio. Required when start_stage = stt."""
|
||||
@@ -1484,7 +1502,9 @@ class PipelineInput:
|
||||
|
||||
async def execute(self) -> None:
|
||||
"""Run pipeline."""
|
||||
self.run.start(conversation_id=self.conversation_id, device_id=self.device_id)
|
||||
self.run.start(
|
||||
conversation_id=self.session.conversation_id, device_id=self.device_id
|
||||
)
|
||||
current_stage: PipelineStage | None = self.run.start_stage
|
||||
stt_audio_buffer: list[EnhancedAudioChunk] = []
|
||||
stt_processed_stream: AsyncIterable[EnhancedAudioChunk] | None = None
|
||||
@@ -1568,7 +1588,7 @@ class PipelineInput:
|
||||
assert intent_input is not None
|
||||
tts_input = await self.run.recognize_intent(
|
||||
intent_input,
|
||||
self.conversation_id,
|
||||
self.session.conversation_id,
|
||||
self.device_id,
|
||||
self.conversation_extra_system_prompt,
|
||||
)
|
||||
@@ -1652,7 +1672,7 @@ class PipelineInput:
|
||||
<= PIPELINE_STAGE_ORDER.index(PipelineStage.INTENT)
|
||||
<= end_stage_index
|
||||
):
|
||||
prepare_tasks.append(self.run.prepare_recognize_intent())
|
||||
prepare_tasks.append(self.run.prepare_recognize_intent(self.session))
|
||||
|
||||
if (
|
||||
start_stage_index
|
||||
@@ -1931,7 +1951,7 @@ class PipelineRunDebug:
|
||||
|
||||
|
||||
class PipelineStore(Store[SerializedPipelineStorageCollection]):
|
||||
"""Store entity registry data."""
|
||||
"""Store pipeline data."""
|
||||
|
||||
async def _async_migrate_func(
|
||||
self,
|
||||
@@ -2013,3 +2033,37 @@ async def async_run_migrations(hass: HomeAssistant) -> None:
|
||||
|
||||
for pipeline, attr_updates in updates:
|
||||
await async_update_pipeline(hass, pipeline, **attr_updates)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PipelineConversationData:
|
||||
"""Hold data for the duration of a conversation."""
|
||||
|
||||
continue_conversation_agent: str | None = None
|
||||
"""The agent that requested the conversation to be continued."""
|
||||
|
||||
|
||||
@callback
|
||||
def async_get_pipeline_conversation_data(
|
||||
hass: HomeAssistant, session: chat_session.ChatSession
|
||||
) -> PipelineConversationData:
|
||||
"""Get the pipeline data for a specific conversation."""
|
||||
all_conversation_data = hass.data.get(KEY_PIPELINE_CONVERSATION_DATA)
|
||||
if all_conversation_data is None:
|
||||
all_conversation_data = {}
|
||||
hass.data[KEY_PIPELINE_CONVERSATION_DATA] = all_conversation_data
|
||||
|
||||
data = all_conversation_data.get(session.conversation_id)
|
||||
|
||||
if data is not None:
|
||||
return data
|
||||
|
||||
@callback
|
||||
def do_cleanup() -> None:
|
||||
"""Handle cleanup."""
|
||||
all_conversation_data.pop(session.conversation_id)
|
||||
|
||||
session.async_on_cleanup(do_cleanup)
|
||||
|
||||
data = all_conversation_data[session.conversation_id] = PipelineConversationData()
|
||||
return data
|
||||
|
||||
@@ -239,7 +239,7 @@ async def websocket_run(
|
||||
with chat_session.async_get_chat_session(
|
||||
hass, msg.get("conversation_id")
|
||||
) as session:
|
||||
input_args["conversation_id"] = session.conversation_id
|
||||
input_args["session"] = session
|
||||
pipeline_input = PipelineInput(**input_args)
|
||||
|
||||
try:
|
||||
|
||||
@@ -13,7 +13,11 @@ from azure.storage.blob.aio import ContainerClient
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryError,
|
||||
ConfigEntryNotReady,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import (
|
||||
@@ -52,7 +56,7 @@ async def async_setup_entry(
|
||||
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
||||
) from err
|
||||
except ClientAuthenticationError as err:
|
||||
raise ConfigEntryError(
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
||||
|
||||
@@ -141,7 +141,7 @@ class AzureStorageBackupAgent(BackupAgent):
|
||||
"""Delete a backup file."""
|
||||
blob = await self._find_blob_by_backup_id(backup_id)
|
||||
if blob is None:
|
||||
return
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
await self._client.delete_blob(blob.name)
|
||||
|
||||
@handle_backup_errors
|
||||
@@ -163,11 +163,11 @@ class AzureStorageBackupAgent(BackupAgent):
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup | None:
|
||||
) -> AgentBackup:
|
||||
"""Return a backup."""
|
||||
blob = await self._find_blob_by_backup_id(backup_id)
|
||||
if blob is None:
|
||||
return None
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
return AgentBackup.from_dict(json.loads(blob.metadata["backup_metadata"]))
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Config flow for Azure Storage integration."""
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -26,6 +27,26 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for azure storage."""
|
||||
|
||||
def get_account_url(self, account_name: str) -> str:
|
||||
"""Get the account URL."""
|
||||
return f"https://{account_name}.blob.core.windows.net/"
|
||||
|
||||
async def validate_config(
|
||||
self, container_client: ContainerClient
|
||||
) -> dict[str, str]:
|
||||
"""Validate the configuration."""
|
||||
errors: dict[str, str] = {}
|
||||
try:
|
||||
await container_client.exists()
|
||||
except ResourceNotFoundError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except ClientAuthenticationError:
|
||||
errors[CONF_STORAGE_ACCOUNT_KEY] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown exception occurred")
|
||||
errors["base"] = "unknown"
|
||||
return errors
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -38,20 +59,13 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
{CONF_ACCOUNT_NAME: user_input[CONF_ACCOUNT_NAME]}
|
||||
)
|
||||
container_client = ContainerClient(
|
||||
account_url=f"https://{user_input[CONF_ACCOUNT_NAME]}.blob.core.windows.net/",
|
||||
account_url=self.get_account_url(user_input[CONF_ACCOUNT_NAME]),
|
||||
container_name=user_input[CONF_CONTAINER_NAME],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
)
|
||||
try:
|
||||
await container_client.exists()
|
||||
except ResourceNotFoundError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except ClientAuthenticationError:
|
||||
errors[CONF_STORAGE_ACCOUNT_KEY] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown exception occurred")
|
||||
errors["base"] = "unknown"
|
||||
errors = await self.validate_config(container_client)
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=f"{user_input[CONF_ACCOUNT_NAME]}/{user_input[CONF_CONTAINER_NAME]}",
|
||||
@@ -70,3 +84,77 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauth upon an API authentication error."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauth dialog."""
|
||||
errors: dict[str, str] = {}
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
|
||||
if user_input is not None:
|
||||
container_client = ContainerClient(
|
||||
account_url=self.get_account_url(reauth_entry.data[CONF_ACCOUNT_NAME]),
|
||||
container_name=reauth_entry.data[CONF_CONTAINER_NAME],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
)
|
||||
errors = await self.validate_config(container_client)
|
||||
if not errors:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data={**reauth_entry.data, **user_input},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_STORAGE_ACCOUNT_KEY): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Reconfigure the entry."""
|
||||
errors: dict[str, str] = {}
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input is not None:
|
||||
container_client = ContainerClient(
|
||||
account_url=self.get_account_url(
|
||||
reconfigure_entry.data[CONF_ACCOUNT_NAME]
|
||||
),
|
||||
container_name=user_input[CONF_CONTAINER_NAME],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
)
|
||||
errors = await self.validate_config(container_client)
|
||||
if not errors:
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry,
|
||||
data={**reconfigure_entry.data, **user_input},
|
||||
)
|
||||
return self.async_show_form(
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_CONTAINER_NAME,
|
||||
default=reconfigure_entry.data[CONF_CONTAINER_NAME],
|
||||
): str,
|
||||
vol.Required(
|
||||
CONF_STORAGE_ACCOUNT_KEY,
|
||||
default=reconfigure_entry.data[CONF_STORAGE_ACCOUNT_KEY],
|
||||
): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["azure-storage-blob"],
|
||||
"quality_scale": "bronze",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["azure-storage-blob==12.24.0"]
|
||||
}
|
||||
|
||||
@@ -57,7 +57,7 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have platforms.
|
||||
reauthentication-flow: todo
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
@@ -121,7 +121,7 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
reconfiguration-flow: todo
|
||||
reconfiguration-flow: done
|
||||
repair-issues: done
|
||||
stale-devices:
|
||||
status: exempt
|
||||
|
||||
@@ -19,10 +19,34 @@
|
||||
},
|
||||
"description": "Set up an Azure (Blob) storage account to be used for backups.",
|
||||
"title": "Add Azure storage account"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"storage_account_key": "[%key:component::azure_storage::config::step::user::data::storage_account_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"storage_account_key": "[%key:component::azure_storage::config::step::user::data_description::storage_account_key%]"
|
||||
},
|
||||
"description": "Provide a new storage account key.",
|
||||
"title": "Reauthenticate Azure storage account"
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"container_name": "[%key:component::azure_storage::config::step::user::data::container_name%]",
|
||||
"storage_account_key": "[%key:component::azure_storage::config::step::user::data::storage_account_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"container_name": "[%key:component::azure_storage::config::step::user::data_description::container_name%]",
|
||||
"storage_account_key": "[%key:component::azure_storage::config::step::user::data_description::storage_account_key%]"
|
||||
},
|
||||
"description": "Change the settings of the Azure storage integration.",
|
||||
"title": "Reconfigure Azure storage account"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
|
||||
@@ -41,6 +41,8 @@ class BackupAgent(abc.ABC):
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file.
|
||||
|
||||
Raises BackupNotFound if the backup does not exist.
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
:return: An async iterator that yields bytes.
|
||||
"""
|
||||
@@ -67,6 +69,8 @@ class BackupAgent(abc.ABC):
|
||||
) -> None:
|
||||
"""Delete a backup file.
|
||||
|
||||
Raises BackupNotFound if the backup does not exist.
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
"""
|
||||
|
||||
@@ -79,8 +83,11 @@ class BackupAgent(abc.ABC):
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
) -> AgentBackup:
|
||||
"""Return a backup.
|
||||
|
||||
Raises BackupNotFound if the backup does not exist.
|
||||
"""
|
||||
|
||||
|
||||
class LocalBackupAgent(BackupAgent):
|
||||
|
||||
@@ -88,13 +88,13 @@ class CoreLocalBackupAgent(LocalBackupAgent):
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup | None:
|
||||
) -> AgentBackup:
|
||||
"""Return a backup."""
|
||||
if not self._loaded_backups:
|
||||
await self._load_backups()
|
||||
|
||||
if backup_id not in self._backups:
|
||||
return None
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
backup, backup_path = self._backups[backup_id]
|
||||
if not await self._hass.async_add_executor_job(backup_path.exists):
|
||||
@@ -107,7 +107,7 @@ class CoreLocalBackupAgent(LocalBackupAgent):
|
||||
backup_path,
|
||||
)
|
||||
self._backups.pop(backup_id)
|
||||
return None
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
return backup
|
||||
|
||||
@@ -130,10 +130,7 @@ class CoreLocalBackupAgent(LocalBackupAgent):
|
||||
if not self._loaded_backups:
|
||||
await self._load_backups()
|
||||
|
||||
try:
|
||||
backup_path = self.get_backup_path(backup_id)
|
||||
except BackupNotFound:
|
||||
return
|
||||
backup_path = self.get_backup_path(backup_id)
|
||||
await self._hass.async_add_executor_job(backup_path.unlink, True)
|
||||
LOGGER.debug("Deleted backup located at %s", backup_path)
|
||||
self._backups.pop(backup_id)
|
||||
|
||||
@@ -15,6 +15,7 @@ from multidict import istr
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import frame
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from . import util
|
||||
@@ -59,11 +60,19 @@ class DownloadBackupView(HomeAssistantView):
|
||||
if agent_id not in manager.backup_agents:
|
||||
return Response(status=HTTPStatus.BAD_REQUEST)
|
||||
agent = manager.backup_agents[agent_id]
|
||||
backup = await agent.async_get_backup(backup_id)
|
||||
try:
|
||||
backup = await agent.async_get_backup(backup_id)
|
||||
except BackupNotFound:
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
|
||||
# We don't need to check if the path exists, aiohttp.FileResponse will handle
|
||||
# that
|
||||
if backup is None:
|
||||
# Check for None to be backwards compatible with the old BackupAgent API,
|
||||
# this can be removed in HA Core 2025.10
|
||||
if not backup:
|
||||
frame.report_usage(
|
||||
"returns None from BackupAgent.async_get_backup",
|
||||
breaks_in_ha_version="2025.10",
|
||||
integration_domain=agent_id.partition(".")[0],
|
||||
)
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
|
||||
headers = {
|
||||
@@ -92,6 +101,8 @@ class DownloadBackupView(HomeAssistantView):
|
||||
) -> StreamResponse | FileResponse | Response:
|
||||
if agent_id in manager.local_backup_agents:
|
||||
local_agent = manager.local_backup_agents[agent_id]
|
||||
# We don't need to check if the path exists, aiohttp.FileResponse will
|
||||
# handle that
|
||||
path = local_agent.get_backup_path(backup_id)
|
||||
return FileResponse(path=path.as_posix(), headers=headers)
|
||||
|
||||
|
||||
@@ -30,6 +30,7 @@ from homeassistant.backup_restore import (
|
||||
from homeassistant.const import __version__ as HAVERSION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import (
|
||||
frame,
|
||||
instance_id,
|
||||
integration_platform,
|
||||
issue_registry as ir,
|
||||
@@ -64,6 +65,7 @@ from .models import (
|
||||
AgentBackup,
|
||||
BackupError,
|
||||
BackupManagerError,
|
||||
BackupNotFound,
|
||||
BackupReaderWriterError,
|
||||
BaseBackup,
|
||||
Folder,
|
||||
@@ -648,6 +650,8 @@ class BackupManager:
|
||||
)
|
||||
for idx, result in enumerate(get_backup_results):
|
||||
agent_id = agent_ids[idx]
|
||||
if isinstance(result, BackupNotFound):
|
||||
continue
|
||||
if isinstance(result, BackupAgentError):
|
||||
agent_errors[agent_id] = result
|
||||
continue
|
||||
@@ -659,7 +663,14 @@ class BackupManager:
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result # unexpected error
|
||||
# Check for None to be backwards compatible with the old BackupAgent API,
|
||||
# this can be removed in HA Core 2025.10
|
||||
if not result:
|
||||
frame.report_usage(
|
||||
"returns None from BackupAgent.async_get_backup",
|
||||
breaks_in_ha_version="2025.10",
|
||||
integration_domain=agent_id.partition(".")[0],
|
||||
)
|
||||
continue
|
||||
if backup is None:
|
||||
if known_backup := self.known_backups.get(backup_id):
|
||||
@@ -723,6 +734,8 @@ class BackupManager:
|
||||
)
|
||||
for idx, result in enumerate(delete_backup_results):
|
||||
agent_id = agent_ids[idx]
|
||||
if isinstance(result, BackupNotFound):
|
||||
continue
|
||||
if isinstance(result, BackupAgentError):
|
||||
agent_errors[agent_id] = result
|
||||
continue
|
||||
@@ -832,7 +845,7 @@ class BackupManager:
|
||||
agent_errors = {
|
||||
backup_id: error
|
||||
for backup_id, error in zip(backup_ids, delete_results, strict=True)
|
||||
if error
|
||||
if error and not isinstance(error, BackupNotFound)
|
||||
}
|
||||
if agent_errors:
|
||||
LOGGER.error(
|
||||
@@ -1264,7 +1277,20 @@ class BackupManager:
|
||||
) -> None:
|
||||
"""Initiate restoring a backup."""
|
||||
agent = self.backup_agents[agent_id]
|
||||
if not await agent.async_get_backup(backup_id):
|
||||
try:
|
||||
backup = await agent.async_get_backup(backup_id)
|
||||
except BackupNotFound as err:
|
||||
raise BackupManagerError(
|
||||
f"Backup {backup_id} not found in agent {agent_id}"
|
||||
) from err
|
||||
# Check for None to be backwards compatible with the old BackupAgent API,
|
||||
# this can be removed in HA Core 2025.10
|
||||
if not backup:
|
||||
frame.report_usage(
|
||||
"returns None from BackupAgent.async_get_backup",
|
||||
breaks_in_ha_version="2025.10",
|
||||
integration_domain=agent_id.partition(".")[0],
|
||||
)
|
||||
raise BackupManagerError(
|
||||
f"Backup {backup_id} not found in agent {agent_id}"
|
||||
)
|
||||
@@ -1352,7 +1378,20 @@ class BackupManager:
|
||||
agent = self.backup_agents[agent_id]
|
||||
except KeyError as err:
|
||||
raise BackupManagerError(f"Invalid agent selected: {agent_id}") from err
|
||||
if not await agent.async_get_backup(backup_id):
|
||||
try:
|
||||
backup = await agent.async_get_backup(backup_id)
|
||||
except BackupNotFound as err:
|
||||
raise BackupManagerError(
|
||||
f"Backup {backup_id} not found in agent {agent_id}"
|
||||
) from err
|
||||
# Check for None to be backwards compatible with the old BackupAgent API,
|
||||
# this can be removed in HA Core 2025.10
|
||||
if not backup:
|
||||
frame.report_usage(
|
||||
"returns None from BackupAgent.async_get_backup",
|
||||
breaks_in_ha_version="2025.10",
|
||||
integration_domain=agent_id.partition(".")[0],
|
||||
)
|
||||
raise BackupManagerError(
|
||||
f"Backup {backup_id} not found in agent {agent_id}"
|
||||
)
|
||||
|
||||
@@ -21,6 +21,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CLIMATE,
|
||||
Platform.EVENT,
|
||||
Platform.FAN,
|
||||
Platform.LIGHT,
|
||||
Platform.SELECT,
|
||||
@@ -28,7 +29,6 @@ PLATFORMS = [
|
||||
Platform.TIME,
|
||||
]
|
||||
|
||||
|
||||
KEEP_ALIVE_INTERVAL = timedelta(minutes=1)
|
||||
SYNC_TIME_INTERVAL = timedelta(hours=1)
|
||||
|
||||
|
||||
@@ -0,0 +1,91 @@
|
||||
"""Support for Balboa events."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from pybalboa import EVENT_UPDATE, SpaClient
|
||||
|
||||
from homeassistant.components.event import EventEntity
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
|
||||
from . import BalboaConfigEntry
|
||||
from .entity import BalboaEntity
|
||||
|
||||
FAULT = "fault"
|
||||
FAULT_DATE = "fault_date"
|
||||
REQUEST_FAULT_LOG_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
FAULT_MESSAGE_CODE_MAP: dict[int, str] = {
|
||||
15: "sensor_out_of_sync",
|
||||
16: "low_flow",
|
||||
17: "flow_failed",
|
||||
18: "settings_reset",
|
||||
19: "priming_mode",
|
||||
20: "clock_failed",
|
||||
21: "settings_reset",
|
||||
22: "memory_failure",
|
||||
26: "service_sensor_sync",
|
||||
27: "heater_dry",
|
||||
28: "heater_may_be_dry",
|
||||
29: "water_too_hot",
|
||||
30: "heater_too_hot",
|
||||
31: "sensor_a_fault",
|
||||
32: "sensor_b_fault",
|
||||
34: "pump_stuck",
|
||||
35: "hot_fault",
|
||||
36: "gfci_test_failed",
|
||||
37: "standby_mode",
|
||||
}
|
||||
FAULT_EVENT_TYPES = sorted(set(FAULT_MESSAGE_CODE_MAP.values()))
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: BalboaConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the spa's events."""
|
||||
async_add_entities([BalboaEventEntity(entry.runtime_data)])
|
||||
|
||||
|
||||
class BalboaEventEntity(BalboaEntity, EventEntity):
|
||||
"""Representation of a Balboa event entity."""
|
||||
|
||||
_attr_event_types = FAULT_EVENT_TYPES
|
||||
_attr_translation_key = FAULT
|
||||
|
||||
def __init__(self, spa: SpaClient) -> None:
|
||||
"""Initialize a Balboa event entity."""
|
||||
super().__init__(spa, FAULT)
|
||||
|
||||
@callback
|
||||
def _async_handle_event(self) -> None:
|
||||
"""Handle the fault event."""
|
||||
if not (fault := self._client.fault):
|
||||
return
|
||||
fault_date = fault.fault_datetime.isoformat()
|
||||
if self.state_attributes.get(FAULT_DATE) != fault_date:
|
||||
self._trigger_event(
|
||||
FAULT_MESSAGE_CODE_MAP.get(fault.message_code, fault.message),
|
||||
{FAULT_DATE: fault_date, "code": fault.message_code},
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self.async_on_remove(self._client.on(EVENT_UPDATE, self._async_handle_event))
|
||||
|
||||
async def request_fault_log(now: datetime | None = None) -> None:
|
||||
"""Request the most recent fault log."""
|
||||
await self._client.request_fault_log()
|
||||
|
||||
await request_fault_log()
|
||||
self.async_on_remove(
|
||||
async_track_time_interval(
|
||||
self.hass, request_fault_log, REQUEST_FAULT_LOG_INTERVAL
|
||||
)
|
||||
)
|
||||
@@ -57,6 +57,35 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"event": {
|
||||
"fault": {
|
||||
"name": "Fault",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"sensor_out_of_sync": "Sensors are out of sync",
|
||||
"low_flow": "The water flow is low",
|
||||
"flow_failed": "The water flow has failed",
|
||||
"settings_reset": "The settings have been reset",
|
||||
"priming_mode": "Priming mode",
|
||||
"clock_failed": "The clock has failed",
|
||||
"memory_failure": "Program memory failure",
|
||||
"service_sensor_sync": "Sensors are out of sync -- call for service",
|
||||
"heater_dry": "The heater is dry",
|
||||
"heater_may_be_dry": "The heater may be dry",
|
||||
"water_too_hot": "The water is too hot",
|
||||
"heater_too_hot": "The heater is too hot",
|
||||
"sensor_a_fault": "Sensor A fault",
|
||||
"sensor_b_fault": "Sensor B fault",
|
||||
"pump_stuck": "A pump may be stuck on",
|
||||
"hot_fault": "Hot fault",
|
||||
"gfci_test_failed": "The GFCI test failed",
|
||||
"standby_mode": "Standby mode (hold mode)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"fan": {
|
||||
"pump": {
|
||||
"name": "Pump {index}"
|
||||
|
||||
@@ -311,11 +311,24 @@ async def async_update_device(
|
||||
update the device with the new location so they can
|
||||
figure out where the adapter is.
|
||||
"""
|
||||
address = details[ADAPTER_ADDRESS]
|
||||
connections = {(dr.CONNECTION_BLUETOOTH, address)}
|
||||
device_registry = dr.async_get(hass)
|
||||
# We only have one device for the config entry
|
||||
# so if the address has been corrected, make
|
||||
# sure the device entry reflects the correct
|
||||
# address
|
||||
for device in dr.async_entries_for_config_entry(device_registry, entry.entry_id):
|
||||
for conn_type, conn_value in device.connections:
|
||||
if conn_type == dr.CONNECTION_BLUETOOTH and conn_value != address:
|
||||
device_registry.async_update_device(
|
||||
device.id, new_connections=connections
|
||||
)
|
||||
break
|
||||
device_entry = device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
name=adapter_human_name(adapter, details[ADAPTER_ADDRESS]),
|
||||
connections={(dr.CONNECTION_BLUETOOTH, details[ADAPTER_ADDRESS])},
|
||||
name=adapter_human_name(adapter, address),
|
||||
connections=connections,
|
||||
manufacturer=details[ADAPTER_MANUFACTURER],
|
||||
model=adapter_model(details),
|
||||
sw_version=details.get(ADAPTER_SW_VERSION),
|
||||
@@ -342,9 +355,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
)
|
||||
)
|
||||
return True
|
||||
address = entry.unique_id
|
||||
assert address is not None
|
||||
assert source_entry is not None
|
||||
source_domain = entry.data[CONF_SOURCE_DOMAIN]
|
||||
if mac_manufacturer := await get_manufacturer_from_mac(address):
|
||||
manufacturer = f"{mac_manufacturer} ({source_domain})"
|
||||
|
||||
@@ -186,16 +186,28 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a flow initialized by an external scanner."""
|
||||
source = user_input[CONF_SOURCE]
|
||||
await self.async_set_unique_id(source)
|
||||
source_config_entry_id = user_input[CONF_SOURCE_CONFIG_ENTRY_ID]
|
||||
data = {
|
||||
CONF_SOURCE: source,
|
||||
CONF_SOURCE_MODEL: user_input[CONF_SOURCE_MODEL],
|
||||
CONF_SOURCE_DOMAIN: user_input[CONF_SOURCE_DOMAIN],
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID: user_input[CONF_SOURCE_CONFIG_ENTRY_ID],
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID: source_config_entry_id,
|
||||
CONF_SOURCE_DEVICE_ID: user_input[CONF_SOURCE_DEVICE_ID],
|
||||
}
|
||||
self._abort_if_unique_id_configured(updates=data)
|
||||
manager = get_manager()
|
||||
scanner = manager.async_scanner_by_source(source)
|
||||
for entry in self._async_current_entries(include_ignore=False):
|
||||
# If the mac address needs to be corrected, migrate
|
||||
# the config entry to the new mac address
|
||||
if (
|
||||
entry.data.get(CONF_SOURCE_CONFIG_ENTRY_ID) == source_config_entry_id
|
||||
and entry.unique_id != source
|
||||
):
|
||||
self.hass.config_entries.async_update_entry(
|
||||
entry, unique_id=source, data={**entry.data, **data}
|
||||
)
|
||||
self.hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
return self.async_abort(reason="already_configured")
|
||||
scanner = get_manager().async_scanner_by_source(source)
|
||||
assert scanner is not None
|
||||
return self.async_create_entry(title=scanner.name, data=data)
|
||||
|
||||
|
||||
@@ -19,8 +19,8 @@
|
||||
"bleak-retry-connector==3.9.0",
|
||||
"bluetooth-adapters==0.21.4",
|
||||
"bluetooth-auto-recovery==1.4.4",
|
||||
"bluetooth-data-tools==1.23.4",
|
||||
"dbus-fast==2.33.0",
|
||||
"habluetooth==3.24.1"
|
||||
"bluetooth-data-tools==1.25.0",
|
||||
"dbus-fast==2.39.3",
|
||||
"habluetooth==3.25.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -138,6 +138,8 @@ class WebDavTodoListEntity(TodoListEntity):
|
||||
await self.hass.async_add_executor_job(
|
||||
partial(self._calendar.save_todo, **item_data),
|
||||
)
|
||||
# refreshing async otherwise it would take too much time
|
||||
self.hass.async_create_task(self.async_update_ha_state(force_refresh=True))
|
||||
except (requests.ConnectionError, DAVError) as err:
|
||||
raise HomeAssistantError(f"CalDAV save error: {err}") from err
|
||||
|
||||
@@ -172,6 +174,8 @@ class WebDavTodoListEntity(TodoListEntity):
|
||||
obj_type="todo",
|
||||
),
|
||||
)
|
||||
# refreshing async otherwise it would take too much time
|
||||
self.hass.async_create_task(self.async_update_ha_state(force_refresh=True))
|
||||
except (requests.ConnectionError, DAVError) as err:
|
||||
raise HomeAssistantError(f"CalDAV save error: {err}") from err
|
||||
|
||||
@@ -195,3 +199,5 @@ class WebDavTodoListEntity(TodoListEntity):
|
||||
await self.hass.async_add_executor_job(item.delete)
|
||||
except (requests.ConnectionError, DAVError) as err:
|
||||
raise HomeAssistantError(f"CalDAV delete error: {err}") from err
|
||||
# refreshing async otherwise it would take too much time
|
||||
self.hass.async_create_task(self.async_update_ha_state(force_refresh=True))
|
||||
|
||||
@@ -18,7 +18,12 @@ from hass_nabucasa.cloud_api import (
|
||||
)
|
||||
from hass_nabucasa.files import FilesError, StorageType, calculate_b64md5
|
||||
|
||||
from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
|
||||
from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
BackupNotFound,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import ChunkAsyncStreamIterator
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
@@ -90,9 +95,7 @@ class CloudBackupAgent(BackupAgent):
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
:return: An async iterator that yields bytes.
|
||||
"""
|
||||
if not (backup := await self._async_get_backup(backup_id)):
|
||||
raise BackupAgentError("Backup not found")
|
||||
|
||||
backup = await self._async_get_backup(backup_id)
|
||||
try:
|
||||
content = await self._cloud.files.download(
|
||||
storage_type=StorageType.BACKUP,
|
||||
@@ -171,9 +174,7 @@ class CloudBackupAgent(BackupAgent):
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
"""
|
||||
if not (backup := await self._async_get_backup(backup_id)):
|
||||
return
|
||||
|
||||
backup = await self._async_get_backup(backup_id)
|
||||
try:
|
||||
await async_files_delete_file(
|
||||
self._cloud,
|
||||
@@ -204,16 +205,12 @@ class CloudBackupAgent(BackupAgent):
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup | None:
|
||||
) -> AgentBackup:
|
||||
"""Return a backup."""
|
||||
if not (backup := await self._async_get_backup(backup_id)):
|
||||
return None
|
||||
backup = await self._async_get_backup(backup_id)
|
||||
return AgentBackup.from_dict(backup["Metadata"])
|
||||
|
||||
async def _async_get_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
) -> FilesHandlerListEntry | None:
|
||||
async def _async_get_backup(self, backup_id: str) -> FilesHandlerListEntry:
|
||||
"""Return a backup."""
|
||||
backups = await self._async_list_backups()
|
||||
|
||||
@@ -221,4 +218,4 @@ class CloudBackupAgent(BackupAgent):
|
||||
if backup["Metadata"]["backup_id"] == backup_id:
|
||||
return backup
|
||||
|
||||
return None
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==0.92.0"],
|
||||
"requirements": ["hass-nabucasa==0.94.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ import logging
|
||||
from typing import cast
|
||||
|
||||
from aiocomelit.api import ComelitVedoAreaObject
|
||||
from aiocomelit.const import ALARM_AREAS, AlarmAreaState
|
||||
from aiocomelit.const import AlarmAreaState
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelEntity,
|
||||
@@ -56,7 +56,7 @@ async def async_setup_entry(
|
||||
|
||||
async_add_entities(
|
||||
ComelitAlarmEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[ALARM_AREAS].values()
|
||||
for device in coordinator.data["alarm_areas"].values()
|
||||
)
|
||||
|
||||
|
||||
@@ -92,7 +92,7 @@ class ComelitAlarmEntity(CoordinatorEntity[ComelitVedoSystem], AlarmControlPanel
|
||||
@property
|
||||
def _area(self) -> ComelitVedoAreaObject:
|
||||
"""Return area object."""
|
||||
return self.coordinator.data[ALARM_AREAS][self._area_index]
|
||||
return self.coordinator.data["alarm_areas"][self._area_index]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
from typing import cast
|
||||
|
||||
from aiocomelit import ComelitVedoZoneObject
|
||||
from aiocomelit.const import ALARM_ZONES
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
@@ -29,7 +28,7 @@ async def async_setup_entry(
|
||||
|
||||
async_add_entities(
|
||||
ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[ALARM_ZONES].values()
|
||||
for device in coordinator.data["alarm_zones"].values()
|
||||
)
|
||||
|
||||
|
||||
@@ -49,7 +48,7 @@ class ComelitVedoBinarySensorEntity(
|
||||
) -> None:
|
||||
"""Init sensor entity."""
|
||||
self._api = coordinator.api
|
||||
self._zone = zone
|
||||
self._zone_index = zone.index
|
||||
super().__init__(coordinator)
|
||||
# Use config_entry.entry_id as base for unique_id
|
||||
# because no serial number or mac is available
|
||||
@@ -59,4 +58,6 @@ class ComelitVedoBinarySensorEntity(
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Presence detected."""
|
||||
return self.coordinator.data[ALARM_ZONES][self._zone.index].status_api == "0001"
|
||||
return (
|
||||
self.coordinator.data["alarm_zones"][self._zone_index].status_api == "0001"
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import StrEnum
|
||||
from typing import Any, cast
|
||||
from typing import Any, TypedDict, cast
|
||||
|
||||
from aiocomelit import ComelitSerialBridgeObject
|
||||
from aiocomelit.const import CLIMATE
|
||||
@@ -16,7 +16,8 @@ from homeassistant.components.climate import (
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_TENTHS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
@@ -42,22 +43,23 @@ class ClimaComelitCommand(StrEnum):
|
||||
AUTO = "auto"
|
||||
|
||||
|
||||
API_STATUS: dict[str, dict[str, Any]] = {
|
||||
ClimaComelitMode.OFF: {
|
||||
"action": "off",
|
||||
"hvac_mode": HVACMode.OFF,
|
||||
"hvac_action": HVACAction.OFF,
|
||||
},
|
||||
ClimaComelitMode.LOWER: {
|
||||
"action": "lower",
|
||||
"hvac_mode": HVACMode.COOL,
|
||||
"hvac_action": HVACAction.COOLING,
|
||||
},
|
||||
ClimaComelitMode.UPPER: {
|
||||
"action": "upper",
|
||||
"hvac_mode": HVACMode.HEAT,
|
||||
"hvac_action": HVACAction.HEATING,
|
||||
},
|
||||
class ClimaComelitApiStatus(TypedDict):
|
||||
"""Comelit Clima API status."""
|
||||
|
||||
hvac_mode: HVACMode
|
||||
hvac_action: HVACAction
|
||||
|
||||
|
||||
API_STATUS: dict[str, ClimaComelitApiStatus] = {
|
||||
ClimaComelitMode.OFF: ClimaComelitApiStatus(
|
||||
hvac_mode=HVACMode.OFF, hvac_action=HVACAction.OFF
|
||||
),
|
||||
ClimaComelitMode.LOWER: ClimaComelitApiStatus(
|
||||
hvac_mode=HVACMode.COOL, hvac_action=HVACAction.COOLING
|
||||
),
|
||||
ClimaComelitMode.UPPER: ClimaComelitApiStatus(
|
||||
hvac_mode=HVACMode.HEAT, hvac_action=HVACAction.HEATING
|
||||
),
|
||||
}
|
||||
|
||||
MODE_TO_ACTION: dict[HVACMode, ClimaComelitCommand] = {
|
||||
@@ -114,69 +116,41 @@ class ComelitClimateEntity(CoordinatorEntity[ComelitSerialBridge], ClimateEntity
|
||||
self._attr_unique_id = f"{config_entry_entry_id}-{device.index}"
|
||||
self._attr_device_info = coordinator.platform_device_info(device, device.type)
|
||||
|
||||
@property
|
||||
def _clima(self) -> list[Any]:
|
||||
"""Return clima device data."""
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
device = self.coordinator.data[CLIMATE][self._device.index]
|
||||
if not isinstance(device.val, list):
|
||||
raise HomeAssistantError("Invalid clima data")
|
||||
|
||||
# CLIMATE has a 2 item tuple:
|
||||
# - first for Clima
|
||||
# - second for Humidifier
|
||||
return self.coordinator.data[CLIMATE][self._device.index].val[0]
|
||||
values = device.val[0]
|
||||
|
||||
@property
|
||||
def _api_mode(self) -> str:
|
||||
"""Return device mode."""
|
||||
# Values from API: "O", "L", "U"
|
||||
return self._clima[2]
|
||||
_active = values[1]
|
||||
_mode = values[2] # Values from API: "O", "L", "U"
|
||||
_automatic = values[3] == ClimaComelitMode.AUTO
|
||||
|
||||
@property
|
||||
def _api_active(self) -> bool:
|
||||
"Return device active/idle."
|
||||
return self._clima[1]
|
||||
self._attr_current_temperature = values[0] / 10
|
||||
|
||||
@property
|
||||
def _api_automatic(self) -> bool:
|
||||
"""Return device in automatic/manual mode."""
|
||||
return self._clima[3] == ClimaComelitMode.AUTO
|
||||
self._attr_hvac_action = None
|
||||
if _mode == ClimaComelitMode.OFF:
|
||||
self._attr_hvac_action = HVACAction.OFF
|
||||
if not _active:
|
||||
self._attr_hvac_action = HVACAction.IDLE
|
||||
if _mode in API_STATUS:
|
||||
self._attr_hvac_action = API_STATUS[_mode]["hvac_action"]
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float:
|
||||
"""Set target temperature."""
|
||||
return self._clima[4] / 10
|
||||
self._attr_hvac_mode = None
|
||||
if _mode == ClimaComelitMode.OFF:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
if _automatic:
|
||||
self._attr_hvac_mode = HVACMode.AUTO
|
||||
if _mode in API_STATUS:
|
||||
self._attr_hvac_mode = API_STATUS[_mode]["hvac_mode"]
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float:
|
||||
"""Return current temperature."""
|
||||
return self._clima[0] / 10
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
"""HVAC current mode."""
|
||||
|
||||
if self._api_mode == ClimaComelitMode.OFF:
|
||||
return HVACMode.OFF
|
||||
|
||||
if self._api_automatic:
|
||||
return HVACMode.AUTO
|
||||
|
||||
if self._api_mode in API_STATUS:
|
||||
return API_STATUS[self._api_mode]["hvac_mode"]
|
||||
|
||||
return None
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
"""HVAC current action."""
|
||||
|
||||
if self._api_mode == ClimaComelitMode.OFF:
|
||||
return HVACAction.OFF
|
||||
|
||||
if not self._api_active:
|
||||
return HVACAction.IDLE
|
||||
|
||||
if self._api_mode in API_STATUS:
|
||||
return API_STATUS[self._api_mode]["hvac_action"]
|
||||
|
||||
return None
|
||||
self._attr_target_temperature = values[4] / 10
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
|
||||
@@ -2,18 +2,19 @@
|
||||
|
||||
from abc import abstractmethod
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
from typing import TypeVar
|
||||
|
||||
from aiocomelit import (
|
||||
from aiocomelit.api import (
|
||||
AlarmDataObject,
|
||||
ComelitCommonApi,
|
||||
ComeliteSerialBridgeApi,
|
||||
ComelitSerialBridgeObject,
|
||||
ComelitVedoApi,
|
||||
ComelitVedoAreaObject,
|
||||
ComelitVedoZoneObject,
|
||||
exceptions,
|
||||
)
|
||||
from aiocomelit.api import ComelitCommonApi
|
||||
from aiocomelit.const import BRIDGE, VEDO
|
||||
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -26,7 +27,13 @@ from .const import _LOGGER, DOMAIN
|
||||
type ComelitConfigEntry = ConfigEntry[ComelitBaseCoordinator]
|
||||
|
||||
|
||||
class ComelitBaseCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
T = TypeVar(
|
||||
"T",
|
||||
bound=dict[str, dict[int, ComelitSerialBridgeObject]] | AlarmDataObject,
|
||||
)
|
||||
|
||||
|
||||
class ComelitBaseCoordinator(DataUpdateCoordinator[T]):
|
||||
"""Base coordinator for Comelit Devices."""
|
||||
|
||||
_hw_version: str
|
||||
@@ -81,23 +88,25 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
hw_version=self._hw_version,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
async def _async_update_data(self) -> T:
|
||||
"""Update device data."""
|
||||
_LOGGER.debug("Polling Comelit %s host: %s", self._device, self._host)
|
||||
try:
|
||||
await self.api.login()
|
||||
return await self._async_update_system_data()
|
||||
except (exceptions.CannotConnect, exceptions.CannotRetrieveData) as err:
|
||||
except (CannotConnect, CannotRetrieveData) as err:
|
||||
raise UpdateFailed(repr(err)) from err
|
||||
except exceptions.CannotAuthenticate as err:
|
||||
except CannotAuthenticate as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
|
||||
@abstractmethod
|
||||
async def _async_update_system_data(self) -> dict[str, Any]:
|
||||
async def _async_update_system_data(self) -> T:
|
||||
"""Class method for updating data."""
|
||||
|
||||
|
||||
class ComelitSerialBridge(ComelitBaseCoordinator):
|
||||
class ComelitSerialBridge(
|
||||
ComelitBaseCoordinator[dict[str, dict[int, ComelitSerialBridgeObject]]]
|
||||
):
|
||||
"""Queries Comelit Serial Bridge."""
|
||||
|
||||
_hw_version = "20003101"
|
||||
@@ -115,12 +124,14 @@ class ComelitSerialBridge(ComelitBaseCoordinator):
|
||||
self.api = ComeliteSerialBridgeApi(host, port, pin)
|
||||
super().__init__(hass, entry, BRIDGE, host)
|
||||
|
||||
async def _async_update_system_data(self) -> dict[str, Any]:
|
||||
async def _async_update_system_data(
|
||||
self,
|
||||
) -> dict[str, dict[int, ComelitSerialBridgeObject]]:
|
||||
"""Specific method for updating data."""
|
||||
return await self.api.get_all_devices()
|
||||
|
||||
|
||||
class ComelitVedoSystem(ComelitBaseCoordinator):
|
||||
class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
|
||||
"""Queries Comelit VEDO system."""
|
||||
|
||||
_hw_version = "VEDO IP"
|
||||
@@ -138,6 +149,8 @@ class ComelitVedoSystem(ComelitBaseCoordinator):
|
||||
self.api = ComelitVedoApi(host, port, pin)
|
||||
super().__init__(hass, entry, VEDO, host)
|
||||
|
||||
async def _async_update_system_data(self) -> dict[str, Any]:
|
||||
async def _async_update_system_data(
|
||||
self,
|
||||
) -> AlarmDataObject:
|
||||
"""Specific method for updating data."""
|
||||
return await self.api.get_all_areas_and_zones()
|
||||
|
||||
@@ -16,8 +16,8 @@ from homeassistant.components.humidifier import (
|
||||
HumidifierEntity,
|
||||
HumidifierEntityFeature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
@@ -122,61 +122,32 @@ class ComelitHumidifierEntity(CoordinatorEntity[ComelitSerialBridge], Humidifier
|
||||
self._active_action = active_action
|
||||
self._set_command = set_command
|
||||
|
||||
@property
|
||||
def _humidifier(self) -> list[Any]:
|
||||
"""Return humidifier device data."""
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
device = self.coordinator.data[CLIMATE][self._device.index]
|
||||
if not isinstance(device.val, list):
|
||||
raise HomeAssistantError("Invalid clima data")
|
||||
|
||||
# CLIMATE has a 2 item tuple:
|
||||
# - first for Clima
|
||||
# - second for Humidifier
|
||||
return self.coordinator.data[CLIMATE][self._device.index].val[1]
|
||||
values = device.val[1]
|
||||
|
||||
@property
|
||||
def _api_mode(self) -> str:
|
||||
"""Return device mode."""
|
||||
# Values from API: "O", "L", "U"
|
||||
return self._humidifier[2]
|
||||
_active = values[1]
|
||||
_mode = values[2] # Values from API: "O", "L", "U"
|
||||
_automatic = values[3] == HumidifierComelitMode.AUTO
|
||||
|
||||
@property
|
||||
def _api_active(self) -> bool:
|
||||
"Return device active/idle."
|
||||
return self._humidifier[1]
|
||||
self._attr_action = HumidifierAction.IDLE
|
||||
if _mode == HumidifierComelitMode.OFF:
|
||||
self._attr_action = HumidifierAction.OFF
|
||||
if _active and _mode == self._active_mode:
|
||||
self._attr_action = self._active_action
|
||||
|
||||
@property
|
||||
def _api_automatic(self) -> bool:
|
||||
"""Return device in automatic/manual mode."""
|
||||
return self._humidifier[3] == HumidifierComelitMode.AUTO
|
||||
|
||||
@property
|
||||
def target_humidity(self) -> float:
|
||||
"""Set target humidity."""
|
||||
return self._humidifier[4] / 10
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> float:
|
||||
"""Return current humidity."""
|
||||
return self._humidifier[0] / 10
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true is humidifier is on."""
|
||||
return self._api_mode == self._active_mode
|
||||
|
||||
@property
|
||||
def mode(self) -> str | None:
|
||||
"""Return current mode."""
|
||||
return MODE_AUTO if self._api_automatic else MODE_NORMAL
|
||||
|
||||
@property
|
||||
def action(self) -> HumidifierAction | None:
|
||||
"""Return current action."""
|
||||
|
||||
if self._api_mode == HumidifierComelitMode.OFF:
|
||||
return HumidifierAction.OFF
|
||||
|
||||
if self._api_active and self._api_mode == self._active_mode:
|
||||
return self._active_action
|
||||
|
||||
return HumidifierAction.IDLE
|
||||
self._attr_current_humidity = values[0] / 10
|
||||
self._attr_is_on = _mode == self._active_mode
|
||||
self._attr_mode = MODE_AUTO if _automatic else MODE_NORMAL
|
||||
self._attr_target_humidity = values[4] / 10
|
||||
|
||||
async def async_set_humidity(self, humidity: int) -> None:
|
||||
"""Set new target humidity."""
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"requirements": ["aiocomelit==0.10.1"]
|
||||
"requirements": ["aiocomelit==0.11.2"]
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from typing import Final, cast
|
||||
|
||||
from aiocomelit import ComelitSerialBridgeObject, ComelitVedoZoneObject
|
||||
from aiocomelit.const import ALARM_ZONES, BRIDGE, OTHER, AlarmZoneState
|
||||
from aiocomelit.const import BRIDGE, OTHER, AlarmZoneState
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@@ -82,7 +82,7 @@ async def async_setup_vedo_entry(
|
||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||
|
||||
entities: list[ComelitVedoSensorEntity] = []
|
||||
for device in coordinator.data[ALARM_ZONES].values():
|
||||
for device in coordinator.data["alarm_zones"].values():
|
||||
entities.extend(
|
||||
ComelitVedoSensorEntity(
|
||||
coordinator, device, config_entry.entry_id, sensor_desc
|
||||
@@ -119,9 +119,12 @@ class ComelitBridgeSensorEntity(CoordinatorEntity[ComelitSerialBridge], SensorEn
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Sensor value."""
|
||||
return getattr(
|
||||
self.coordinator.data[OTHER][self._device.index],
|
||||
self.entity_description.key,
|
||||
return cast(
|
||||
StateType,
|
||||
getattr(
|
||||
self.coordinator.data[OTHER][self._device.index],
|
||||
self.entity_description.key,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -139,7 +142,7 @@ class ComelitVedoSensorEntity(CoordinatorEntity[ComelitVedoSystem], SensorEntity
|
||||
) -> None:
|
||||
"""Init sensor entity."""
|
||||
self._api = coordinator.api
|
||||
self._zone = zone
|
||||
self._zone_index = zone.index
|
||||
super().__init__(coordinator)
|
||||
# Use config_entry.entry_id as base for unique_id
|
||||
# because no serial number or mac is available
|
||||
@@ -151,7 +154,7 @@ class ComelitVedoSensorEntity(CoordinatorEntity[ComelitVedoSystem], SensorEntity
|
||||
@property
|
||||
def _zone_object(self) -> ComelitVedoZoneObject:
|
||||
"""Zone object."""
|
||||
return self.coordinator.data[ALARM_ZONES][self._zone.index]
|
||||
return self.coordinator.data["alarm_zones"][self._zone_index]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
@@ -164,4 +167,4 @@ class ComelitVedoSensorEntity(CoordinatorEntity[ComelitVedoSystem], SensorEntity
|
||||
if (status := self._zone_object.human_status) == AlarmZoneState.UNKNOWN:
|
||||
return None
|
||||
|
||||
return status.value
|
||||
return cast(str, status.value)
|
||||
|
||||
@@ -77,7 +77,4 @@ class ComelitSwitchEntity(CoordinatorEntity[ComelitSerialBridge], SwitchEntity):
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return True if switch is on."""
|
||||
return (
|
||||
self.coordinator.data[self._device.type][self._device.index].status
|
||||
== STATE_ON
|
||||
)
|
||||
return self.coordinator.data[OTHER][self._device.index].status == STATE_ON
|
||||
|
||||
@@ -183,6 +183,25 @@ class ChatLog:
|
||||
llm_api: llm.APIInstance | None = None
|
||||
delta_listener: Callable[[ChatLog, dict], None] | None = None
|
||||
|
||||
@property
|
||||
def continue_conversation(self) -> bool:
|
||||
"""Return whether the conversation should continue."""
|
||||
if not self.content:
|
||||
return False
|
||||
|
||||
last_msg = self.content[-1]
|
||||
|
||||
return (
|
||||
last_msg.role == "assistant"
|
||||
and last_msg.content is not None # type: ignore[union-attr]
|
||||
and last_msg.content.strip().endswith( # type: ignore[union-attr]
|
||||
(
|
||||
"?",
|
||||
";", # Greek question mark
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def unresponded_tool_results(self) -> bool:
|
||||
"""Return if there are unresponded tool results."""
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.2.26"]
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.3.5"]
|
||||
}
|
||||
|
||||
@@ -62,12 +62,14 @@ class ConversationResult:
|
||||
|
||||
response: intent.IntentResponse
|
||||
conversation_id: str | None = None
|
||||
continue_conversation: bool = False
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return result as a dict."""
|
||||
return {
|
||||
"response": self.response.as_dict(),
|
||||
"conversation_id": self.conversation_id,
|
||||
"continue_conversation": self.continue_conversation,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -48,6 +48,7 @@ COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM = [
|
||||
Platform.TIME,
|
||||
Platform.UPDATE,
|
||||
Platform.VACUUM,
|
||||
Platform.VALVE,
|
||||
Platform.WATER_HEATER,
|
||||
Platform.WEATHER,
|
||||
]
|
||||
|
||||
@@ -0,0 +1,89 @@
|
||||
"""Demo valve platform that implements valves."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.valve import ValveEntity, ValveEntityFeature, ValveState
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
OPEN_CLOSE_DELAY = 2 # Used to give a realistic open/close experience in frontend
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Demo config entry."""
|
||||
async_add_entities(
|
||||
[
|
||||
DemoValve("Front Garden", ValveState.OPEN),
|
||||
DemoValve("Orchard", ValveState.CLOSED),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class DemoValve(ValveEntity):
|
||||
"""Representation of a Demo valve."""
|
||||
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
state: str,
|
||||
moveable: bool = True,
|
||||
) -> None:
|
||||
"""Initialize the valve."""
|
||||
self._attr_name = name
|
||||
if moveable:
|
||||
self._attr_supported_features = (
|
||||
ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE
|
||||
)
|
||||
self._state = state
|
||||
self._moveable = moveable
|
||||
|
||||
@property
|
||||
def is_open(self) -> bool:
|
||||
"""Return true if valve is open."""
|
||||
return self._state == ValveState.OPEN
|
||||
|
||||
@property
|
||||
def is_opening(self) -> bool:
|
||||
"""Return true if valve is opening."""
|
||||
return self._state == ValveState.OPENING
|
||||
|
||||
@property
|
||||
def is_closing(self) -> bool:
|
||||
"""Return true if valve is closing."""
|
||||
return self._state == ValveState.CLOSING
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool:
|
||||
"""Return true if valve is closed."""
|
||||
return self._state == ValveState.CLOSED
|
||||
|
||||
@property
|
||||
def reports_position(self) -> bool:
|
||||
"""Return True if entity reports position, False otherwise."""
|
||||
return False
|
||||
|
||||
async def async_open_valve(self, **kwargs: Any) -> None:
|
||||
"""Open the valve."""
|
||||
self._state = ValveState.OPENING
|
||||
self.async_write_ha_state()
|
||||
await asyncio.sleep(OPEN_CLOSE_DELAY)
|
||||
self._state = ValveState.OPEN
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_close_valve(self, **kwargs: Any) -> None:
|
||||
"""Close the valve."""
|
||||
self._state = ValveState.CLOSING
|
||||
self.async_write_ha_state()
|
||||
await asyncio.sleep(OPEN_CLOSE_DELAY)
|
||||
self._state = ValveState.CLOSED
|
||||
self.async_write_ha_state()
|
||||
@@ -16,6 +16,6 @@
|
||||
"requirements": [
|
||||
"aiodhcpwatcher==1.1.1",
|
||||
"aiodiscover==2.6.1",
|
||||
"cached-ipaddress==0.9.2"
|
||||
"cached-ipaddress==0.10.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["webhook"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecowitt",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aioecowitt==2024.2.1"]
|
||||
"requirements": ["aioecowitt==2025.3.1"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "Your Elevenlabs API key."
|
||||
"api_key": "Your ElevenLabs API key."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -72,5 +72,7 @@ class EpsonConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if projector:
|
||||
projector.close()
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(DATA_SCHEMA, user_input),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.8.0"]
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.11.0"]
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_NOISE_PSK, DATA_FFMPEG_PROXY, DOMAIN
|
||||
from .const import CONF_BLUETOOTH_MAC_ADDRESS, CONF_NOISE_PSK, DATA_FFMPEG_PROXY, DOMAIN
|
||||
from .dashboard import async_setup as async_setup_dashboard
|
||||
from .domain_data import DomainData
|
||||
|
||||
@@ -87,6 +87,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) ->
|
||||
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> None:
|
||||
"""Remove an esphome config entry."""
|
||||
if mac_address := entry.unique_id:
|
||||
async_remove_scanner(hass, mac_address.upper())
|
||||
if bluetooth_mac_address := entry.data.get(CONF_BLUETOOTH_MAC_ADDRESS):
|
||||
async_remove_scanner(hass, bluetooth_mac_address.upper())
|
||||
await DomainData.get(hass).get_or_create_store(hass, entry).async_remove()
|
||||
|
||||
@@ -284,7 +284,10 @@ class EsphomeAssistSatellite(
|
||||
elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END:
|
||||
assert event.data is not None
|
||||
data_to_send = {
|
||||
"conversation_id": event.data["intent_output"]["conversation_id"] or "",
|
||||
"conversation_id": event.data["intent_output"]["conversation_id"],
|
||||
"continue_conversation": str(
|
||||
int(event.data["intent_output"]["continue_conversation"])
|
||||
),
|
||||
}
|
||||
elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START:
|
||||
assert event.data is not None
|
||||
|
||||
@@ -8,6 +8,7 @@ CONF_ALLOW_SERVICE_CALLS = "allow_service_calls"
|
||||
CONF_SUBSCRIBE_LOGS = "subscribe_logs"
|
||||
CONF_DEVICE_NAME = "device_name"
|
||||
CONF_NOISE_PSK = "noise_psk"
|
||||
CONF_BLUETOOTH_MAC_ADDRESS = "bluetooth_mac_address"
|
||||
|
||||
DEFAULT_ALLOW_SERVICE_CALLS = True
|
||||
DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS = False
|
||||
|
||||
@@ -13,9 +13,7 @@ from . import CONF_NOISE_PSK
|
||||
from .dashboard import async_get_dashboard
|
||||
from .entry_data import ESPHomeConfigEntry
|
||||
|
||||
CONF_MAC_ADDRESS = "mac_address"
|
||||
|
||||
REDACT_KEYS = {CONF_NOISE_PSK, CONF_PASSWORD, CONF_MAC_ADDRESS}
|
||||
REDACT_KEYS = {CONF_NOISE_PSK, CONF_PASSWORD, "mac_address", "bluetooth_mac_address"}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
@@ -27,13 +25,17 @@ async def async_get_config_entry_diagnostics(
|
||||
diag["config"] = config_entry.as_dict()
|
||||
|
||||
entry_data = config_entry.runtime_data
|
||||
device_info = entry_data.device_info
|
||||
|
||||
if (storage_data := await entry_data.store.async_load()) is not None:
|
||||
diag["storage_data"] = storage_data
|
||||
|
||||
if (
|
||||
config_entry.unique_id
|
||||
and (scanner := async_scanner_by_source(hass, config_entry.unique_id.upper()))
|
||||
device_info
|
||||
and (
|
||||
scanner_mac := device_info.bluetooth_mac_address or device_info.mac_address
|
||||
)
|
||||
and (scanner := async_scanner_by_source(hass, scanner_mac.upper()))
|
||||
and (bluetooth_device := entry_data.bluetooth_device)
|
||||
):
|
||||
diag["bluetooth"] = {
|
||||
|
||||
@@ -63,6 +63,7 @@ from homeassistant.util.async_ import create_eager_task
|
||||
from .bluetooth import async_connect_scanner
|
||||
from .const import (
|
||||
CONF_ALLOW_SERVICE_CALLS,
|
||||
CONF_BLUETOOTH_MAC_ADDRESS,
|
||||
CONF_DEVICE_NAME,
|
||||
CONF_SUBSCRIBE_LOGS,
|
||||
DEFAULT_ALLOW_SERVICE_CALLS,
|
||||
@@ -431,6 +432,13 @@ class ESPHomeManager:
|
||||
|
||||
device_mac = format_mac(device_info.mac_address)
|
||||
mac_address_matches = unique_id == device_mac
|
||||
if (
|
||||
bluetooth_mac_address := device_info.bluetooth_mac_address
|
||||
) and entry.data.get(CONF_BLUETOOTH_MAC_ADDRESS) != bluetooth_mac_address:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={**entry.data, CONF_BLUETOOTH_MAC_ADDRESS: bluetooth_mac_address},
|
||||
)
|
||||
#
|
||||
# Migrate config entry to new unique ID if the current
|
||||
# unique id is not a mac address.
|
||||
@@ -498,7 +506,9 @@ class ESPHomeManager:
|
||||
)
|
||||
)
|
||||
else:
|
||||
bluetooth.async_remove_scanner(hass, device_info.mac_address)
|
||||
bluetooth.async_remove_scanner(
|
||||
hass, device_info.bluetooth_mac_address or device_info.mac_address
|
||||
)
|
||||
|
||||
if device_info.voice_assistant_feature_flags_compat(api_version) and (
|
||||
Platform.ASSIST_SATELLITE not in entry_data.loaded_platforms
|
||||
@@ -617,11 +627,22 @@ class ESPHomeManager:
|
||||
)
|
||||
_setup_services(hass, entry_data, services)
|
||||
|
||||
if entry_data.device_info is not None and entry_data.device_info.name:
|
||||
reconnect_logic.name = entry_data.device_info.name
|
||||
if (device_info := entry_data.device_info) is not None:
|
||||
if device_info.name:
|
||||
reconnect_logic.name = device_info.name
|
||||
if (
|
||||
bluetooth_mac_address := device_info.bluetooth_mac_address
|
||||
) and entry.data.get(CONF_BLUETOOTH_MAC_ADDRESS) != bluetooth_mac_address:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_BLUETOOTH_MAC_ADDRESS: bluetooth_mac_address,
|
||||
},
|
||||
)
|
||||
if entry.unique_id is None:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, unique_id=format_mac(entry_data.device_info.mac_address)
|
||||
entry, unique_id=format_mac(device_info.mac_address)
|
||||
)
|
||||
|
||||
await reconnect_logic.start()
|
||||
|
||||
@@ -16,9 +16,9 @@
|
||||
"loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"],
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"requirements": [
|
||||
"aioesphomeapi==29.2.0",
|
||||
"aioesphomeapi==29.4.0",
|
||||
"esphome-dashboard-api==1.2.3",
|
||||
"bleak-esphome==2.8.0"
|
||||
"bleak-esphome==2.11.0"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ from typing import Any
|
||||
import evohomeasync as ec1
|
||||
import evohomeasync2 as ec2
|
||||
from evohomeasync2.const import (
|
||||
SZ_DHW,
|
||||
SZ_GATEWAY_ID,
|
||||
SZ_GATEWAY_INFO,
|
||||
SZ_GATEWAYS,
|
||||
@@ -19,8 +20,9 @@ from evohomeasync2.const import (
|
||||
SZ_TEMPERATURE_CONTROL_SYSTEMS,
|
||||
SZ_TIME_ZONE,
|
||||
SZ_USE_DAYLIGHT_SAVE_SWITCHING,
|
||||
SZ_ZONES,
|
||||
)
|
||||
from evohomeasync2.schemas.typedefs import EvoLocStatusResponseT
|
||||
from evohomeasync2.schemas.typedefs import EvoLocStatusResponseT, EvoTcsConfigResponseT
|
||||
|
||||
from homeassistant.const import CONF_SCAN_INTERVAL
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -113,17 +115,19 @@ class EvoDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
SZ_USE_DAYLIGHT_SAVE_SWITCHING
|
||||
],
|
||||
}
|
||||
tcs_info: EvoTcsConfigResponseT = self.tcs.config # type: ignore[assignment]
|
||||
tcs_info[SZ_ZONES] = [zone.config for zone in self.tcs.zones]
|
||||
if self.tcs.hotwater:
|
||||
tcs_info[SZ_DHW] = self.tcs.hotwater.config
|
||||
gwy_info = {
|
||||
SZ_GATEWAY_ID: self.loc.gateways[0].id,
|
||||
SZ_TEMPERATURE_CONTROL_SYSTEMS: [
|
||||
self.loc.gateways[0].systems[0].config
|
||||
],
|
||||
SZ_TEMPERATURE_CONTROL_SYSTEMS: [tcs_info],
|
||||
}
|
||||
config = {
|
||||
SZ_LOCATION_INFO: loc_info,
|
||||
SZ_GATEWAYS: [{SZ_GATEWAY_INFO: gwy_info}],
|
||||
}
|
||||
self.logger.debug("Config = %s", config)
|
||||
self.logger.debug("Config = %s", [config])
|
||||
|
||||
async def call_client_api(
|
||||
self,
|
||||
@@ -203,10 +207,18 @@ class EvoDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
|
||||
async def _update_v2_schedules(self) -> None:
|
||||
for zone in self.tcs.zones:
|
||||
await zone.get_schedule()
|
||||
try:
|
||||
await zone.get_schedule()
|
||||
except ec2.InvalidScheduleError as err:
|
||||
self.logger.warning(
|
||||
"Zone '%s' has an invalid/missing schedule: %r", zone.name, err
|
||||
)
|
||||
|
||||
if dhw := self.tcs.hotwater:
|
||||
await dhw.get_schedule()
|
||||
try:
|
||||
await dhw.get_schedule()
|
||||
except ec2.InvalidScheduleError as err:
|
||||
self.logger.warning("DHW has an invalid/missing schedule: %r", err)
|
||||
|
||||
async def _async_update_data(self) -> EvoLocStatusResponseT: # type: ignore[override]
|
||||
"""Fetch the latest state of an entire TCC Location.
|
||||
|
||||
@@ -6,6 +6,7 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
import evohomeasync2 as evo
|
||||
from evohomeasync2.schemas.typedefs import DayOfWeekDhwT
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
@@ -102,7 +103,7 @@ class EvoChild(EvoEntity):
|
||||
|
||||
self._evo_tcs = evo_device.tcs
|
||||
|
||||
self._schedule: dict[str, Any] | None = None
|
||||
self._schedule: list[DayOfWeekDhwT] | None = None
|
||||
self._setpoints: dict[str, Any] = {}
|
||||
|
||||
@property
|
||||
@@ -123,6 +124,9 @@ class EvoChild(EvoEntity):
|
||||
Only Zones & DHW controllers (but not the TCS) can have schedules.
|
||||
"""
|
||||
|
||||
if not self._schedule:
|
||||
return self._setpoints
|
||||
|
||||
this_sp_dtm, this_sp_val = self._evo_device.this_switchpoint
|
||||
next_sp_dtm, next_sp_val = self._evo_device.next_switchpoint
|
||||
|
||||
@@ -152,10 +156,10 @@ class EvoChild(EvoEntity):
|
||||
self._evo_device,
|
||||
err,
|
||||
)
|
||||
self._schedule = {}
|
||||
self._schedule = []
|
||||
return
|
||||
else:
|
||||
self._schedule = schedule or {} # mypy hint
|
||||
self._schedule = schedule # type: ignore[assignment]
|
||||
|
||||
_LOGGER.debug("Schedule['%s'] = %s", self.name, schedule)
|
||||
|
||||
|
||||
@@ -7,21 +7,21 @@ from collections.abc import Callable, Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyfibaro.fibaro_client import FibaroClient
|
||||
from pyfibaro.fibaro_client import (
|
||||
FibaroAuthenticationFailed,
|
||||
FibaroClient,
|
||||
FibaroConnectFailed,
|
||||
)
|
||||
from pyfibaro.fibaro_data_helper import read_rooms
|
||||
from pyfibaro.fibaro_device import DeviceModel
|
||||
from pyfibaro.fibaro_room import RoomModel
|
||||
from pyfibaro.fibaro_info import InfoModel
|
||||
from pyfibaro.fibaro_scene import SceneModel
|
||||
from pyfibaro.fibaro_state_resolver import FibaroEvent, FibaroStateResolver
|
||||
from requests.exceptions import HTTPError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryNotReady,
|
||||
HomeAssistantError,
|
||||
)
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceEntry, DeviceInfo
|
||||
from homeassistant.util import slugify
|
||||
@@ -74,63 +74,31 @@ FIBARO_TYPEMAP = {
|
||||
class FibaroController:
|
||||
"""Initiate Fibaro Controller Class."""
|
||||
|
||||
def __init__(self, config: Mapping[str, Any]) -> None:
|
||||
def __init__(
|
||||
self, fibaro_client: FibaroClient, info: InfoModel, import_plugins: bool
|
||||
) -> None:
|
||||
"""Initialize the Fibaro controller."""
|
||||
|
||||
# The FibaroClient uses the correct API version automatically
|
||||
self._client = FibaroClient(config[CONF_URL])
|
||||
self._client.set_authentication(config[CONF_USERNAME], config[CONF_PASSWORD])
|
||||
self._client = fibaro_client
|
||||
self._fibaro_info = info
|
||||
|
||||
# Whether to import devices from plugins
|
||||
self._import_plugins = config[CONF_IMPORT_PLUGINS]
|
||||
self._room_map: dict[int, RoomModel] # Mapping roomId to room object
|
||||
self._import_plugins = import_plugins
|
||||
# Mapping roomId to room object
|
||||
self._room_map = read_rooms(fibaro_client)
|
||||
self._device_map: dict[int, DeviceModel] # Mapping deviceId to device object
|
||||
self.fibaro_devices: dict[Platform, list[DeviceModel]] = defaultdict(
|
||||
list
|
||||
) # List of devices by entity platform
|
||||
# All scenes
|
||||
self._scenes: list[SceneModel] = []
|
||||
self._scenes = self._client.read_scenes()
|
||||
self._callbacks: dict[int, list[Any]] = {} # Update value callbacks by deviceId
|
||||
# Event callbacks by device id
|
||||
self._event_callbacks: dict[int, list[Callable[[FibaroEvent], None]]] = {}
|
||||
self.hub_serial: str # Unique serial number of the hub
|
||||
self.hub_name: str # The friendly name of the hub
|
||||
self.hub_model: str
|
||||
self.hub_software_version: str
|
||||
self.hub_api_url: str = config[CONF_URL]
|
||||
# Unique serial number of the hub
|
||||
self.hub_serial = info.serial_number
|
||||
# Device infos by fibaro device id
|
||||
self._device_infos: dict[int, DeviceInfo] = {}
|
||||
|
||||
def connect(self) -> None:
|
||||
"""Start the communication with the Fibaro controller."""
|
||||
|
||||
# Return value doesn't need to be checked,
|
||||
# it is only relevant when connecting without credentials
|
||||
self._client.connect()
|
||||
info = self._client.read_info()
|
||||
self.hub_serial = info.serial_number
|
||||
self.hub_name = info.hc_name
|
||||
self.hub_model = info.platform
|
||||
self.hub_software_version = info.current_version
|
||||
|
||||
self._room_map = {room.fibaro_id: room for room in self._client.read_rooms()}
|
||||
self._read_devices()
|
||||
self._scenes = self._client.read_scenes()
|
||||
|
||||
def connect_with_error_handling(self) -> None:
|
||||
"""Translate connect errors to easily differentiate auth and connect failures.
|
||||
|
||||
When there is a better error handling in the used library this can be improved.
|
||||
"""
|
||||
try:
|
||||
self.connect()
|
||||
except HTTPError as http_ex:
|
||||
if http_ex.response.status_code == 403:
|
||||
raise FibaroAuthFailed from http_ex
|
||||
|
||||
raise FibaroConnectFailed from http_ex
|
||||
except Exception as ex:
|
||||
raise FibaroConnectFailed from ex
|
||||
|
||||
def enable_state_handler(self) -> None:
|
||||
"""Start StateHandler thread for monitoring updates."""
|
||||
@@ -302,14 +270,20 @@ class FibaroController:
|
||||
|
||||
def get_room_name(self, room_id: int) -> str | None:
|
||||
"""Get the room name by room id."""
|
||||
assert self._room_map
|
||||
room = self._room_map.get(room_id)
|
||||
return room.name if room else None
|
||||
return self._room_map.get(room_id)
|
||||
|
||||
def read_scenes(self) -> list[SceneModel]:
|
||||
"""Return list of scenes."""
|
||||
return self._scenes
|
||||
|
||||
def read_fibaro_info(self) -> InfoModel:
|
||||
"""Return the general info about the hub."""
|
||||
return self._fibaro_info
|
||||
|
||||
def get_frontend_url(self) -> str:
|
||||
"""Return the url to the Fibaro hub web UI."""
|
||||
return self._client.frontend_url()
|
||||
|
||||
def _read_devices(self) -> None:
|
||||
"""Read and process the device list."""
|
||||
devices = self._client.read_devices()
|
||||
@@ -319,20 +293,17 @@ class FibaroController:
|
||||
for device in devices:
|
||||
try:
|
||||
device.fibaro_controller = self
|
||||
if device.room_id == 0:
|
||||
room_name = self.get_room_name(device.room_id)
|
||||
if not room_name:
|
||||
room_name = "Unknown"
|
||||
else:
|
||||
room_name = self._room_map[device.room_id].name
|
||||
device.room_name = room_name
|
||||
device.friendly_name = f"{room_name} {device.name}"
|
||||
device.ha_id = (
|
||||
f"{slugify(room_name)}_{slugify(device.name)}_{device.fibaro_id}"
|
||||
)
|
||||
if device.enabled and (not device.is_plugin or self._import_plugins):
|
||||
device.mapped_platform = self._map_device_to_platform(device)
|
||||
else:
|
||||
device.mapped_platform = None
|
||||
if (platform := device.mapped_platform) is None:
|
||||
platform = self._map_device_to_platform(device)
|
||||
if platform is None:
|
||||
continue
|
||||
device.unique_id_str = f"{slugify(self.hub_serial)}.{device.fibaro_id}"
|
||||
self._create_device_info(device, devices)
|
||||
@@ -375,11 +346,17 @@ class FibaroController:
|
||||
pass
|
||||
|
||||
|
||||
def connect_fibaro_client(data: Mapping[str, Any]) -> tuple[InfoModel, FibaroClient]:
|
||||
"""Connect to the fibaro hub and read some basic data."""
|
||||
client = FibaroClient(data[CONF_URL])
|
||||
info = client.connect_with_credentials(data[CONF_USERNAME], data[CONF_PASSWORD])
|
||||
return (info, client)
|
||||
|
||||
|
||||
def init_controller(data: Mapping[str, Any]) -> FibaroController:
|
||||
"""Validate the user input allows us to connect to fibaro."""
|
||||
controller = FibaroController(data)
|
||||
controller.connect_with_error_handling()
|
||||
return controller
|
||||
"""Connect to the fibaro hub and init the controller."""
|
||||
info, client = connect_fibaro_client(data)
|
||||
return FibaroController(client, info, data[CONF_IMPORT_PLUGINS])
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: FibaroConfigEntry) -> bool:
|
||||
@@ -393,22 +370,24 @@ async def async_setup_entry(hass: HomeAssistant, entry: FibaroConfigEntry) -> bo
|
||||
raise ConfigEntryNotReady(
|
||||
f"Could not connect to controller at {entry.data[CONF_URL]}"
|
||||
) from connect_ex
|
||||
except FibaroAuthFailed as auth_ex:
|
||||
except FibaroAuthenticationFailed as auth_ex:
|
||||
raise ConfigEntryAuthFailed from auth_ex
|
||||
|
||||
entry.runtime_data = controller
|
||||
|
||||
# register the hub device info separately as the hub has sometimes no entities
|
||||
fibaro_info = controller.read_fibaro_info()
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, controller.hub_serial)},
|
||||
serial_number=controller.hub_serial,
|
||||
manufacturer="Fibaro",
|
||||
name=controller.hub_name,
|
||||
model=controller.hub_model,
|
||||
sw_version=controller.hub_software_version,
|
||||
configuration_url=controller.hub_api_url.removesuffix("/api/"),
|
||||
manufacturer=fibaro_info.manufacturer_name,
|
||||
name=fibaro_info.hc_name,
|
||||
model=fibaro_info.model_name,
|
||||
sw_version=fibaro_info.current_version,
|
||||
configuration_url=controller.get_frontend_url(),
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, fibaro_info.mac_address)},
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
@@ -443,11 +422,3 @@ async def async_remove_config_entry_device(
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class FibaroConnectFailed(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect to fibaro home center."""
|
||||
|
||||
|
||||
class FibaroAuthFailed(HomeAssistantError):
|
||||
"""Error to indicate that authentication failed on fibaro home center."""
|
||||
|
||||
@@ -129,13 +129,13 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
def __init__(self, fibaro_device: DeviceModel) -> None:
|
||||
"""Initialize the Fibaro device."""
|
||||
super().__init__(fibaro_device)
|
||||
self._temp_sensor_device: FibaroEntity | None = None
|
||||
self._target_temp_device: FibaroEntity | None = None
|
||||
self._op_mode_device: FibaroEntity | None = None
|
||||
self._fan_mode_device: FibaroEntity | None = None
|
||||
self._temp_sensor_device: DeviceModel | None = None
|
||||
self._target_temp_device: DeviceModel | None = None
|
||||
self._op_mode_device: DeviceModel | None = None
|
||||
self._fan_mode_device: DeviceModel | None = None
|
||||
self.entity_id = ENTITY_ID_FORMAT.format(self.ha_id)
|
||||
|
||||
siblings = fibaro_device.fibaro_controller.get_siblings(fibaro_device)
|
||||
siblings = self.controller.get_siblings(fibaro_device)
|
||||
_LOGGER.debug("%s siblings: %s", fibaro_device.ha_id, siblings)
|
||||
tempunit = "C"
|
||||
for device in siblings:
|
||||
@@ -147,23 +147,23 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
and (device.value.has_value or device.has_heating_thermostat_setpoint)
|
||||
and device.unit in ("C", "F")
|
||||
):
|
||||
self._temp_sensor_device = FibaroEntity(device)
|
||||
self._temp_sensor_device = device
|
||||
tempunit = device.unit
|
||||
|
||||
if any(
|
||||
action for action in TARGET_TEMP_ACTIONS if action in device.actions
|
||||
):
|
||||
self._target_temp_device = FibaroEntity(device)
|
||||
self._target_temp_device = device
|
||||
self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
if device.has_unit:
|
||||
tempunit = device.unit
|
||||
|
||||
if any(action for action in OP_MODE_ACTIONS if action in device.actions):
|
||||
self._op_mode_device = FibaroEntity(device)
|
||||
self._op_mode_device = device
|
||||
self._attr_supported_features |= ClimateEntityFeature.PRESET_MODE
|
||||
|
||||
if "setFanMode" in device.actions:
|
||||
self._fan_mode_device = FibaroEntity(device)
|
||||
self._fan_mode_device = device
|
||||
self._attr_supported_features |= ClimateEntityFeature.FAN_MODE
|
||||
|
||||
if tempunit == "F":
|
||||
@@ -172,7 +172,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
self._attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
if self._fan_mode_device:
|
||||
fan_modes = self._fan_mode_device.fibaro_device.supported_modes
|
||||
fan_modes = self._fan_mode_device.supported_modes
|
||||
self._attr_fan_modes = []
|
||||
for mode in fan_modes:
|
||||
if mode not in FANMODES:
|
||||
@@ -184,7 +184,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if self._op_mode_device:
|
||||
self._attr_preset_modes = []
|
||||
self._attr_hvac_modes: list[HVACMode] = []
|
||||
device = self._op_mode_device.fibaro_device
|
||||
device = self._op_mode_device
|
||||
if device.has_supported_thermostat_modes:
|
||||
for mode in device.supported_thermostat_modes:
|
||||
try:
|
||||
@@ -222,15 +222,15 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
"- _fan_mode_device %s"
|
||||
),
|
||||
self.ha_id,
|
||||
self._temp_sensor_device.ha_id if self._temp_sensor_device else "None",
|
||||
self._target_temp_device.ha_id if self._target_temp_device else "None",
|
||||
self._op_mode_device.ha_id if self._op_mode_device else "None",
|
||||
self._fan_mode_device.ha_id if self._fan_mode_device else "None",
|
||||
self._temp_sensor_device.fibaro_id if self._temp_sensor_device else "None",
|
||||
self._target_temp_device.fibaro_id if self._target_temp_device else "None",
|
||||
self._op_mode_device.fibaro_id if self._op_mode_device else "None",
|
||||
self._fan_mode_device.fibaro_id if self._fan_mode_device else "None",
|
||||
)
|
||||
await super().async_added_to_hass()
|
||||
|
||||
# Register update callback for child devices
|
||||
siblings = self.fibaro_device.fibaro_controller.get_siblings(self.fibaro_device)
|
||||
siblings = self.controller.get_siblings(self.fibaro_device)
|
||||
for device in siblings:
|
||||
if device != self.fibaro_device:
|
||||
self.controller.register(device.fibaro_id, self._update_callback)
|
||||
@@ -240,14 +240,14 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
"""Return the fan setting."""
|
||||
if not self._fan_mode_device:
|
||||
return None
|
||||
mode = self._fan_mode_device.fibaro_device.mode
|
||||
mode = self._fan_mode_device.mode
|
||||
return FANMODES[mode]
|
||||
|
||||
def set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set new target fan mode."""
|
||||
if not self._fan_mode_device:
|
||||
return
|
||||
self._fan_mode_device.action("setFanMode", HA_FANMODES[fan_mode])
|
||||
self._fan_mode_device.execute_action("setFanMode", [HA_FANMODES[fan_mode]])
|
||||
|
||||
@property
|
||||
def fibaro_op_mode(self) -> str | int:
|
||||
@@ -255,7 +255,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if not self._op_mode_device:
|
||||
return HA_OPMODES_HVAC[HVACMode.AUTO]
|
||||
|
||||
device = self._op_mode_device.fibaro_device
|
||||
device = self._op_mode_device
|
||||
|
||||
if device.has_operating_mode:
|
||||
return device.operating_mode
|
||||
@@ -281,17 +281,17 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if not self._op_mode_device:
|
||||
return
|
||||
|
||||
if "setOperatingMode" in self._op_mode_device.fibaro_device.actions:
|
||||
self._op_mode_device.action("setOperatingMode", HA_OPMODES_HVAC[hvac_mode])
|
||||
elif "setThermostatMode" in self._op_mode_device.fibaro_device.actions:
|
||||
device = self._op_mode_device.fibaro_device
|
||||
device = self._op_mode_device
|
||||
if "setOperatingMode" in device.actions:
|
||||
device.execute_action("setOperatingMode", [HA_OPMODES_HVAC[hvac_mode]])
|
||||
elif "setThermostatMode" in device.actions:
|
||||
if device.has_supported_thermostat_modes:
|
||||
for mode in device.supported_thermostat_modes:
|
||||
if mode.lower() == hvac_mode:
|
||||
self._op_mode_device.action("setThermostatMode", mode)
|
||||
device.execute_action("setThermostatMode", [mode])
|
||||
break
|
||||
elif "setMode" in self._op_mode_device.fibaro_device.actions:
|
||||
self._op_mode_device.action("setMode", HA_OPMODES_HVAC[hvac_mode])
|
||||
elif "setMode" in device.actions:
|
||||
device.execute_action("setMode", [HA_OPMODES_HVAC[hvac_mode]])
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
@@ -299,7 +299,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if not self._op_mode_device:
|
||||
return None
|
||||
|
||||
device = self._op_mode_device.fibaro_device
|
||||
device = self._op_mode_device
|
||||
if device.has_thermostat_operating_state:
|
||||
with suppress(ValueError):
|
||||
return HVACAction(device.thermostat_operating_state.lower())
|
||||
@@ -315,15 +315,15 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if not self._op_mode_device:
|
||||
return None
|
||||
|
||||
if self._op_mode_device.fibaro_device.has_thermostat_mode:
|
||||
mode = self._op_mode_device.fibaro_device.thermostat_mode
|
||||
if self._op_mode_device.has_thermostat_mode:
|
||||
mode = self._op_mode_device.thermostat_mode
|
||||
if self.preset_modes is not None and mode in self.preset_modes:
|
||||
return mode
|
||||
return None
|
||||
if self._op_mode_device.fibaro_device.has_operating_mode:
|
||||
mode = self._op_mode_device.fibaro_device.operating_mode
|
||||
if self._op_mode_device.has_operating_mode:
|
||||
mode = self._op_mode_device.operating_mode
|
||||
else:
|
||||
mode = self._op_mode_device.fibaro_device.mode
|
||||
mode = self._op_mode_device.mode
|
||||
|
||||
if mode not in OPMODES_PRESET:
|
||||
return None
|
||||
@@ -334,20 +334,22 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if self._op_mode_device is None:
|
||||
return
|
||||
|
||||
if "setThermostatMode" in self._op_mode_device.fibaro_device.actions:
|
||||
self._op_mode_device.action("setThermostatMode", preset_mode)
|
||||
elif "setOperatingMode" in self._op_mode_device.fibaro_device.actions:
|
||||
self._op_mode_device.action(
|
||||
"setOperatingMode", HA_OPMODES_PRESET[preset_mode]
|
||||
if "setThermostatMode" in self._op_mode_device.actions:
|
||||
self._op_mode_device.execute_action("setThermostatMode", [preset_mode])
|
||||
elif "setOperatingMode" in self._op_mode_device.actions:
|
||||
self._op_mode_device.execute_action(
|
||||
"setOperatingMode", [HA_OPMODES_PRESET[preset_mode]]
|
||||
)
|
||||
elif "setMode" in self._op_mode_device.actions:
|
||||
self._op_mode_device.execute_action(
|
||||
"setMode", [HA_OPMODES_PRESET[preset_mode]]
|
||||
)
|
||||
elif "setMode" in self._op_mode_device.fibaro_device.actions:
|
||||
self._op_mode_device.action("setMode", HA_OPMODES_PRESET[preset_mode])
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
if self._temp_sensor_device:
|
||||
device = self._temp_sensor_device.fibaro_device
|
||||
device = self._temp_sensor_device
|
||||
if device.has_heating_thermostat_setpoint:
|
||||
return device.heating_thermostat_setpoint
|
||||
return device.value.float_value()
|
||||
@@ -357,7 +359,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the temperature we try to reach."""
|
||||
if self._target_temp_device:
|
||||
device = self._target_temp_device.fibaro_device
|
||||
device = self._target_temp_device
|
||||
if device.has_heating_thermostat_setpoint_future:
|
||||
return device.heating_thermostat_setpoint_future
|
||||
return device.target_level
|
||||
@@ -368,9 +370,11 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
temperature = kwargs.get(ATTR_TEMPERATURE)
|
||||
target = self._target_temp_device
|
||||
if target is not None and temperature is not None:
|
||||
if "setThermostatSetpoint" in target.fibaro_device.actions:
|
||||
target.action("setThermostatSetpoint", self.fibaro_op_mode, temperature)
|
||||
elif "setHeatingThermostatSetpoint" in target.fibaro_device.actions:
|
||||
target.action("setHeatingThermostatSetpoint", temperature)
|
||||
if "setThermostatSetpoint" in target.actions:
|
||||
target.execute_action(
|
||||
"setThermostatSetpoint", [self.fibaro_op_mode, temperature]
|
||||
)
|
||||
elif "setHeatingThermostatSetpoint" in target.actions:
|
||||
target.execute_action("setHeatingThermostatSetpoint", [temperature])
|
||||
else:
|
||||
target.action("setTargetLevel", temperature)
|
||||
target.execute_action("setTargetLevel", [temperature])
|
||||
|
||||
@@ -6,6 +6,7 @@ from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyfibaro.fibaro_client import FibaroAuthenticationFailed, FibaroConnectFailed
|
||||
from slugify import slugify
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -13,7 +14,7 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_URL, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import FibaroAuthFailed, FibaroConnectFailed, init_controller
|
||||
from . import connect_fibaro_client
|
||||
from .const import CONF_IMPORT_PLUGINS, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -33,16 +34,16 @@ async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str
|
||||
|
||||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
controller = await hass.async_add_executor_job(init_controller, data)
|
||||
info, _ = await hass.async_add_executor_job(connect_fibaro_client, data)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Successfully connected to fibaro home center %s with name %s",
|
||||
controller.hub_serial,
|
||||
controller.hub_name,
|
||||
info.serial_number,
|
||||
info.hc_name,
|
||||
)
|
||||
return {
|
||||
"serial_number": slugify(controller.hub_serial),
|
||||
"name": controller.hub_name,
|
||||
"serial_number": slugify(info.serial_number),
|
||||
"name": info.hc_name,
|
||||
}
|
||||
|
||||
|
||||
@@ -75,7 +76,7 @@ class FibaroConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
info = await _validate_input(self.hass, user_input)
|
||||
except FibaroConnectFailed:
|
||||
errors["base"] = "cannot_connect"
|
||||
except FibaroAuthFailed:
|
||||
except FibaroAuthenticationFailed:
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
await self.async_set_unique_id(info["serial_number"])
|
||||
@@ -106,7 +107,7 @@ class FibaroConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await _validate_input(self.hass, new_data)
|
||||
except FibaroConnectFailed:
|
||||
errors["base"] = "cannot_connect"
|
||||
except FibaroAuthFailed:
|
||||
except FibaroAuthenticationFailed:
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
|
||||
@@ -11,6 +11,8 @@ from pyfibaro.fibaro_device import DeviceModel
|
||||
from homeassistant.const import ATTR_ARMED, ATTR_BATTERY_LEVEL
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from . import FibaroController
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -22,7 +24,7 @@ class FibaroEntity(Entity):
|
||||
def __init__(self, fibaro_device: DeviceModel) -> None:
|
||||
"""Initialize the device."""
|
||||
self.fibaro_device = fibaro_device
|
||||
self.controller = fibaro_device.fibaro_controller
|
||||
self.controller: FibaroController = fibaro_device.fibaro_controller
|
||||
self.ha_id = fibaro_device.ha_id
|
||||
self._attr_name = fibaro_device.friendly_name
|
||||
self._attr_unique_id = fibaro_device.unique_id_str
|
||||
@@ -54,15 +56,6 @@ class FibaroEntity(Entity):
|
||||
return self.fibaro_device.value_2.int_value()
|
||||
return None
|
||||
|
||||
def dont_know_message(self, cmd: str) -> None:
|
||||
"""Make a warning in case we don't know how to perform an action."""
|
||||
_LOGGER.warning(
|
||||
"Not sure how to %s: %s (available actions: %s)",
|
||||
cmd,
|
||||
str(self.ha_id),
|
||||
str(self.fibaro_device.actions),
|
||||
)
|
||||
|
||||
def set_level(self, level: int) -> None:
|
||||
"""Set the level of Fibaro device."""
|
||||
self.action("setValue", level)
|
||||
@@ -97,11 +90,7 @@ class FibaroEntity(Entity):
|
||||
|
||||
def action(self, cmd: str, *args: Any) -> None:
|
||||
"""Perform an action on the Fibaro HC."""
|
||||
if cmd in self.fibaro_device.actions:
|
||||
self.fibaro_device.execute_action(cmd, args)
|
||||
_LOGGER.debug("-> %s.%s%s called", str(self.ha_id), str(cmd), str(args))
|
||||
else:
|
||||
self.dont_know_message(cmd)
|
||||
self.fibaro_device.execute_action(cmd, args)
|
||||
|
||||
@property
|
||||
def current_binary_state(self) -> bool:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyfibaro"],
|
||||
"requirements": ["pyfibaro==0.8.0"]
|
||||
"requirements": ["pyfibaro==0.8.2"]
|
||||
}
|
||||
|
||||
@@ -1,29 +1,36 @@
|
||||
"""The forked_daapd component."""
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from pyforked_daapd import ForkedDaapdAPI
|
||||
|
||||
from .const import DOMAIN, HASS_DATA_UPDATER_KEY
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import ForkedDaapdConfigEntry, ForkedDaapdUpdater
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ForkedDaapdConfigEntry) -> bool:
|
||||
"""Set up forked-daapd from a config entry by forwarding to platform."""
|
||||
host: str = entry.data[CONF_HOST]
|
||||
port: int = entry.data[CONF_PORT]
|
||||
password: str = entry.data[CONF_PASSWORD]
|
||||
forked_daapd_api = ForkedDaapdAPI(
|
||||
async_get_clientsession(hass), host, port, password
|
||||
)
|
||||
forked_daapd_updater = ForkedDaapdUpdater(hass, forked_daapd_api, entry.entry_id)
|
||||
entry.runtime_data = forked_daapd_updater
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: ForkedDaapdConfigEntry
|
||||
) -> bool:
|
||||
"""Remove forked-daapd component."""
|
||||
status = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if status and hass.data.get(DOMAIN) and hass.data[DOMAIN].get(entry.entry_id):
|
||||
if websocket_handler := hass.data[DOMAIN][entry.entry_id][
|
||||
HASS_DATA_UPDATER_KEY
|
||||
].websocket_handler:
|
||||
if status:
|
||||
if websocket_handler := entry.runtime_data.websocket_handler:
|
||||
websocket_handler.cancel()
|
||||
del hass.data[DOMAIN][entry.entry_id]
|
||||
if not hass.data[DOMAIN]:
|
||||
del hass.data[DOMAIN]
|
||||
return status
|
||||
|
||||
@@ -7,12 +7,7 @@ from typing import Any
|
||||
from pyforked_daapd import ForkedDaapdAPI
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
||||
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -28,6 +23,7 @@ from .const import (
|
||||
DEFAULT_TTS_VOLUME,
|
||||
DOMAIN,
|
||||
)
|
||||
from .coordinator import ForkedDaapdConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -115,7 +111,7 @@ class ForkedDaapdFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: ForkedDaapdConfigEntry,
|
||||
) -> ForkedDaapdOptionsFlowHandler:
|
||||
"""Return options flow handler."""
|
||||
return ForkedDaapdOptionsFlowHandler()
|
||||
|
||||
@@ -30,9 +30,8 @@ DEFAULT_SERVER_NAME = "My Server"
|
||||
DEFAULT_TTS_PAUSE_TIME = 1.2
|
||||
DEFAULT_TTS_VOLUME = 0.8
|
||||
DEFAULT_UNMUTE_VOLUME = 0.6
|
||||
DOMAIN = "forked_daapd" # key for hass.data
|
||||
DOMAIN = "forked_daapd"
|
||||
FD_NAME = "OwnTone"
|
||||
HASS_DATA_UPDATER_KEY = "UPDATER"
|
||||
KNOWN_PIPES = {"librespot-java"}
|
||||
PIPE_FUNCTION_MAP = {
|
||||
"librespot-java": {
|
||||
|
||||
@@ -9,6 +9,7 @@ from typing import Any
|
||||
|
||||
from pyforked_daapd import ForkedDaapdAPI
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
@@ -22,6 +23,8 @@ from .const import (
|
||||
SIGNAL_UPDATE_QUEUE,
|
||||
)
|
||||
|
||||
type ForkedDaapdConfigEntry = ConfigEntry[ForkedDaapdUpdater]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
WS_NOTIFY_EVENT_TYPES = ["player", "outputs", "volume", "options", "queue", "database"]
|
||||
@@ -39,6 +42,11 @@ class ForkedDaapdUpdater:
|
||||
self._all_output_ids: set[str] = set()
|
||||
self._entry_id = entry_id
|
||||
|
||||
@property
|
||||
def api(self) -> ForkedDaapdAPI:
|
||||
"""Return the API object."""
|
||||
return self._api
|
||||
|
||||
async def async_init(self) -> None:
|
||||
"""Perform async portion of class initialization."""
|
||||
if not (server_config := await self._api.get_request("config")):
|
||||
|
||||
@@ -7,7 +7,6 @@ from collections import defaultdict
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyforked_daapd import ForkedDaapdAPI
|
||||
from pylibrespot_java import LibrespotJavaAPI
|
||||
|
||||
from homeassistant.components import media_source
|
||||
@@ -28,8 +27,7 @@ from homeassistant.components.spotify import (
|
||||
resolve_spotify_media_type,
|
||||
spotify_uri_from_media_browser_url,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
@@ -55,9 +53,7 @@ from .const import (
|
||||
DEFAULT_TTS_PAUSE_TIME,
|
||||
DEFAULT_TTS_VOLUME,
|
||||
DEFAULT_UNMUTE_VOLUME,
|
||||
DOMAIN,
|
||||
FD_NAME,
|
||||
HASS_DATA_UPDATER_KEY,
|
||||
KNOWN_PIPES,
|
||||
PIPE_FUNCTION_MAP,
|
||||
SIGNAL_ADD_ZONES,
|
||||
@@ -74,23 +70,21 @@ from .const import (
|
||||
SUPPORTED_FEATURES_ZONE,
|
||||
TTS_TIMEOUT,
|
||||
)
|
||||
from .coordinator import ForkedDaapdUpdater
|
||||
from .coordinator import ForkedDaapdConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: ForkedDaapdConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up forked-daapd from a config entry."""
|
||||
forked_daapd_updater = config_entry.runtime_data
|
||||
|
||||
host: str = config_entry.data[CONF_HOST]
|
||||
port: int = config_entry.data[CONF_PORT]
|
||||
password: str = config_entry.data[CONF_PASSWORD]
|
||||
forked_daapd_api = ForkedDaapdAPI(
|
||||
async_get_clientsession(hass), host, port, password
|
||||
)
|
||||
forked_daapd_api = forked_daapd_updater.api
|
||||
forked_daapd_master = ForkedDaapdMaster(
|
||||
clientsession=async_get_clientsession(hass),
|
||||
api=forked_daapd_api,
|
||||
@@ -111,20 +105,12 @@ async def async_setup_entry(
|
||||
)
|
||||
config_entry.async_on_unload(config_entry.add_update_listener(update_listener))
|
||||
|
||||
if not hass.data.get(DOMAIN):
|
||||
hass.data[DOMAIN] = {config_entry.entry_id: {}}
|
||||
|
||||
async_add_entities([forked_daapd_master], False)
|
||||
forked_daapd_updater = ForkedDaapdUpdater(
|
||||
hass, forked_daapd_api, config_entry.entry_id
|
||||
)
|
||||
hass.data[DOMAIN][config_entry.entry_id][HASS_DATA_UPDATER_KEY] = (
|
||||
forked_daapd_updater
|
||||
)
|
||||
|
||||
await forked_daapd_updater.async_init()
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
async def update_listener(hass: HomeAssistant, entry: ForkedDaapdConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
async_dispatcher_send(
|
||||
hass, SIGNAL_CONFIG_OPTIONS_UPDATE.format(entry.entry_id), entry.options
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
"""FrankEver virtual integration."""
|
||||
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "frankever",
|
||||
"name": "FrankEver",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "shelly"
|
||||
}
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250228.0"]
|
||||
"requirements": ["home-assistant-frontend==20250306.0"]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"common": {
|
||||
"data_description_password": "The Remote Admin Password from the Fully Kiosk Browser app settings.",
|
||||
"data_description_password": "The Remote Admin password from the Fully Kiosk Browser app settings.",
|
||||
"data_description_ssl": "Is the Fully Kiosk app configured to require SSL for the connection?",
|
||||
"data_description_verify_ssl": "Should SSL certificartes be verified? This should be off for self-signed certificates."
|
||||
},
|
||||
@@ -151,7 +151,7 @@
|
||||
}
|
||||
},
|
||||
"set_config": {
|
||||
"name": "Set Configuration",
|
||||
"name": "Set configuration",
|
||||
"description": "Sets a configuration parameter on Fully Kiosk Browser.",
|
||||
"fields": {
|
||||
"key": {
|
||||
@@ -165,7 +165,7 @@
|
||||
}
|
||||
},
|
||||
"start_application": {
|
||||
"name": "Start Application",
|
||||
"name": "Start application",
|
||||
"description": "Starts an application on the device running Fully Kiosk Browser.",
|
||||
"fields": {
|
||||
"application": {
|
||||
|
||||
@@ -45,7 +45,7 @@
|
||||
},
|
||||
"mode": {
|
||||
"name": "[%key:common::config_flow::data::mode%]",
|
||||
"description": "One of: off, timer or footprint."
|
||||
"description": "The zone's operating mode."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["application_credentials"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/geocaching",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["geocachingapi==0.2.1"]
|
||||
"requirements": ["geocachingapi==0.3.0"]
|
||||
}
|
||||
|
||||
@@ -4,9 +4,14 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
from gios import Gios
|
||||
from gios.exceptions import GiosError
|
||||
|
||||
from homeassistant.components.air_quality import DOMAIN as AIR_QUALITY_PLATFORM
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
@@ -36,8 +41,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: GiosConfigEntry) -> bool
|
||||
device_registry.async_update_device(device_entry.id, new_identifiers={new_ids})
|
||||
|
||||
websession = async_get_clientsession(hass)
|
||||
try:
|
||||
gios = await Gios.create(websession, station_id)
|
||||
except (GiosError, ConnectionError, ClientConnectorError) as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
coordinator = GiosDataUpdateCoordinator(hass, entry, websession, station_id)
|
||||
coordinator = GiosDataUpdateCoordinator(hass, entry, gios)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = GiosData(coordinator)
|
||||
|
||||
@@ -37,7 +37,7 @@ class GiosFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
websession = async_get_clientsession(self.hass)
|
||||
|
||||
async with asyncio.timeout(API_TIMEOUT):
|
||||
gios = Gios(user_input[CONF_STATION_ID], websession)
|
||||
gios = await Gios.create(websession, user_input[CONF_STATION_ID])
|
||||
await gios.async_update()
|
||||
|
||||
assert gios.station_name is not None
|
||||
|
||||
@@ -13,7 +13,7 @@ SCAN_INTERVAL: Final = timedelta(minutes=30)
|
||||
DOMAIN: Final = "gios"
|
||||
MANUFACTURER: Final = "Główny Inspektorat Ochrony Środowiska"
|
||||
|
||||
URL = "http://powietrze.gios.gov.pl/pjp/current/station_details/info/{station_id}"
|
||||
URL = "https://powietrze.gios.gov.pl/pjp/current/station_details/info/{station_id}"
|
||||
|
||||
API_TIMEOUT: Final = 30
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ import asyncio
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
from gios import Gios
|
||||
from gios.exceptions import GiosError
|
||||
@@ -39,11 +38,10 @@ class GiosDataUpdateCoordinator(DataUpdateCoordinator[GiosSensors]):
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: GiosConfigEntry,
|
||||
session: ClientSession,
|
||||
station_id: int,
|
||||
gios: Gios,
|
||||
) -> None:
|
||||
"""Class to manage fetching GIOS data API."""
|
||||
self.gios = Gios(station_id, session)
|
||||
self.gios = gios
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["dacite", "gios"],
|
||||
"requirements": ["gios==5.0.0"]
|
||||
"requirements": ["gios==6.0.0"]
|
||||
}
|
||||
|
||||
@@ -8,7 +8,12 @@ from typing import Any
|
||||
|
||||
from google_drive_api.exceptions import GoogleDriveApiError
|
||||
|
||||
from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
|
||||
from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
BackupNotFound,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import ChunkAsyncStreamIterator
|
||||
@@ -93,13 +98,13 @@ class GoogleDriveBackupAgent(BackupAgent):
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup | None:
|
||||
) -> AgentBackup:
|
||||
"""Return a backup."""
|
||||
backups = await self.async_list_backups()
|
||||
for backup in backups:
|
||||
if backup.backup_id == backup_id:
|
||||
return backup
|
||||
return None
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
async def async_download_backup(
|
||||
self,
|
||||
@@ -120,7 +125,7 @@ class GoogleDriveBackupAgent(BackupAgent):
|
||||
return ChunkAsyncStreamIterator(stream)
|
||||
except (GoogleDriveApiError, HomeAssistantError, TimeoutError) as err:
|
||||
raise BackupAgentError(f"Failed to download backup: {err}") from err
|
||||
raise BackupAgentError("Backup not found")
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
async def async_delete_backup(
|
||||
self,
|
||||
@@ -138,5 +143,7 @@ class GoogleDriveBackupAgent(BackupAgent):
|
||||
_LOGGER.debug("Deleting file_id: %s", file_id)
|
||||
await self._client.async_delete(file_id)
|
||||
_LOGGER.debug("Deleted backup_id: %s", backup_id)
|
||||
return
|
||||
except (GoogleDriveApiError, HomeAssistantError, TimeoutError) as err:
|
||||
raise BackupAgentError(f"Failed to delete backup: {err}") from err
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
@@ -65,9 +65,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
prompt_parts = [call.data[CONF_PROMPT]]
|
||||
|
||||
config_entry: GoogleGenerativeAIConfigEntry = hass.config_entries.async_entries(
|
||||
DOMAIN
|
||||
)[0]
|
||||
config_entry: GoogleGenerativeAIConfigEntry = (
|
||||
hass.config_entries.async_loaded_entries(DOMAIN)[0]
|
||||
)
|
||||
|
||||
client = config_entry.runtime_data
|
||||
|
||||
|
||||
@@ -64,28 +64,18 @@ async def async_setup_entry(
|
||||
|
||||
|
||||
SUPPORTED_SCHEMA_KEYS = {
|
||||
"min_items",
|
||||
"example",
|
||||
"property_ordering",
|
||||
"pattern",
|
||||
"minimum",
|
||||
"default",
|
||||
"any_of",
|
||||
"max_length",
|
||||
"title",
|
||||
"min_properties",
|
||||
"min_length",
|
||||
"max_items",
|
||||
"maximum",
|
||||
"nullable",
|
||||
"max_properties",
|
||||
# Gemini API does not support all of the OpenAPI schema
|
||||
# SoT: https://ai.google.dev/api/caching#Schema
|
||||
"type",
|
||||
"description",
|
||||
"enum",
|
||||
"format",
|
||||
"items",
|
||||
"description",
|
||||
"nullable",
|
||||
"enum",
|
||||
"max_items",
|
||||
"min_items",
|
||||
"properties",
|
||||
"required",
|
||||
"items",
|
||||
}
|
||||
|
||||
|
||||
@@ -109,9 +99,7 @@ def _format_schema(schema: dict[str, Any]) -> Schema:
|
||||
key = _camel_to_snake(key)
|
||||
if key not in SUPPORTED_SCHEMA_KEYS:
|
||||
continue
|
||||
if key == "any_of":
|
||||
val = [_format_schema(subschema) for subschema in val]
|
||||
elif key == "type":
|
||||
if key == "type":
|
||||
val = val.upper()
|
||||
elif key == "format":
|
||||
# Gemini API does not support all formats, see: https://ai.google.dev/api/caching#Schema
|
||||
@@ -471,7 +459,9 @@ class GoogleGenerativeAIConversationEntity(
|
||||
" ".join([part.text.strip() for part in response_parts if part.text])
|
||||
)
|
||||
return conversation.ConversationResult(
|
||||
response=response, conversation_id=chat_log.conversation_id
|
||||
response=response,
|
||||
conversation_id=chat_log.conversation_id,
|
||||
continue_conversation=chat_log.continue_conversation,
|
||||
)
|
||||
|
||||
async def _async_entry_update_listener(
|
||||
|
||||
@@ -40,6 +40,10 @@ ATTR_ALIAS = "alias"
|
||||
ATTR_PRIORITY = "priority"
|
||||
ATTR_COST = "cost"
|
||||
ATTR_NOTES = "notes"
|
||||
ATTR_UP_DOWN = "up_down"
|
||||
ATTR_FREQUENCY = "frequency"
|
||||
ATTR_COUNTER_UP = "counter_up"
|
||||
ATTR_COUNTER_DOWN = "counter_down"
|
||||
|
||||
SERVICE_CAST_SKILL = "cast_skill"
|
||||
SERVICE_START_QUEST = "start_quest"
|
||||
@@ -56,6 +60,9 @@ SERVICE_SCORE_REWARD = "score_reward"
|
||||
SERVICE_TRANSFORMATION = "transformation"
|
||||
|
||||
SERVICE_UPDATE_REWARD = "update_reward"
|
||||
SERVICE_CREATE_REWARD = "create_reward"
|
||||
SERVICE_UPDATE_HABIT = "update_habit"
|
||||
SERVICE_CREATE_HABIT = "create_habit"
|
||||
|
||||
DEVELOPER_ID = "4c4ca53f-c059-4ffa-966e-9d29dd405daf"
|
||||
X_CLIENT = f"{DEVELOPER_ID} - {APPLICATION_NAME} {__version__}"
|
||||
|
||||
@@ -224,6 +224,25 @@
|
||||
"tag_options": "mdi:tag",
|
||||
"developer_options": "mdi:test-tube"
|
||||
}
|
||||
},
|
||||
"create_reward": {
|
||||
"service": "mdi:treasure-chest-outline",
|
||||
"sections": {
|
||||
"developer_options": "mdi:test-tube"
|
||||
}
|
||||
},
|
||||
"update_habit": {
|
||||
"service": "mdi:contrast-box",
|
||||
"sections": {
|
||||
"tag_options": "mdi:tag",
|
||||
"developer_options": "mdi:test-tube"
|
||||
}
|
||||
},
|
||||
"create_habit": {
|
||||
"service": "mdi:contrast-box",
|
||||
"sections": {
|
||||
"developer_options": "mdi:test-tube"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ from uuid import UUID
|
||||
from aiohttp import ClientError
|
||||
from habiticalib import (
|
||||
Direction,
|
||||
Frequency,
|
||||
HabiticaException,
|
||||
NotAuthorizedError,
|
||||
NotFoundError,
|
||||
@@ -41,8 +42,11 @@ from .const import (
|
||||
ATTR_ARGS,
|
||||
ATTR_CONFIG_ENTRY,
|
||||
ATTR_COST,
|
||||
ATTR_COUNTER_DOWN,
|
||||
ATTR_COUNTER_UP,
|
||||
ATTR_DATA,
|
||||
ATTR_DIRECTION,
|
||||
ATTR_FREQUENCY,
|
||||
ATTR_ITEM,
|
||||
ATTR_KEYWORD,
|
||||
ATTR_NOTES,
|
||||
@@ -54,6 +58,7 @@ from .const import (
|
||||
ATTR_TARGET,
|
||||
ATTR_TASK,
|
||||
ATTR_TYPE,
|
||||
ATTR_UP_DOWN,
|
||||
DOMAIN,
|
||||
EVENT_API_CALL_SUCCESS,
|
||||
SERVICE_ABORT_QUEST,
|
||||
@@ -61,6 +66,8 @@ from .const import (
|
||||
SERVICE_API_CALL,
|
||||
SERVICE_CANCEL_QUEST,
|
||||
SERVICE_CAST_SKILL,
|
||||
SERVICE_CREATE_HABIT,
|
||||
SERVICE_CREATE_REWARD,
|
||||
SERVICE_GET_TASKS,
|
||||
SERVICE_LEAVE_QUEST,
|
||||
SERVICE_REJECT_QUEST,
|
||||
@@ -68,6 +75,7 @@ from .const import (
|
||||
SERVICE_SCORE_REWARD,
|
||||
SERVICE_START_QUEST,
|
||||
SERVICE_TRANSFORMATION,
|
||||
SERVICE_UPDATE_HABIT,
|
||||
SERVICE_UPDATE_REWARD,
|
||||
)
|
||||
from .coordinator import HabiticaConfigEntry
|
||||
@@ -112,18 +120,36 @@ SERVICE_TRANSFORMATION_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_UPDATE_TASK_SCHEMA = vol.Schema(
|
||||
BASE_TASK_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_TASK): cv.string,
|
||||
vol.Optional(ATTR_RENAME): cv.string,
|
||||
vol.Optional(ATTR_NOTES): cv.string,
|
||||
vol.Optional(ATTR_TAG): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_REMOVE_TAG): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_ALIAS): vol.All(
|
||||
cv.string, cv.matches_regex("^[a-zA-Z0-9-_]*$")
|
||||
),
|
||||
vol.Optional(ATTR_COST): vol.Coerce(float),
|
||||
vol.Optional(ATTR_COST): vol.All(vol.Coerce(float), vol.Range(0)),
|
||||
vol.Optional(ATTR_PRIORITY): vol.All(
|
||||
vol.Upper, vol.In(TaskPriority._member_names_)
|
||||
),
|
||||
vol.Optional(ATTR_UP_DOWN): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_COUNTER_UP): vol.All(int, vol.Range(0)),
|
||||
vol.Optional(ATTR_COUNTER_DOWN): vol.All(int, vol.Range(0)),
|
||||
vol.Optional(ATTR_FREQUENCY): vol.Coerce(Frequency),
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_UPDATE_TASK_SCHEMA = BASE_TASK_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(ATTR_TASK): cv.string,
|
||||
vol.Optional(ATTR_REMOVE_TAG): vol.All(cv.ensure_list, [str]),
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_CREATE_TASK_SCHEMA = BASE_TASK_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(ATTR_NAME): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -161,6 +187,13 @@ ITEMID_MAP = {
|
||||
"shiny_seed": Skill.SHINY_SEED,
|
||||
}
|
||||
|
||||
SERVICE_TASK_TYPE_MAP = {
|
||||
SERVICE_UPDATE_REWARD: TaskType.REWARD,
|
||||
SERVICE_CREATE_REWARD: TaskType.REWARD,
|
||||
SERVICE_UPDATE_HABIT: TaskType.HABIT,
|
||||
SERVICE_CREATE_HABIT: TaskType.HABIT,
|
||||
}
|
||||
|
||||
|
||||
def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry:
|
||||
"""Return config entry or raise if not found or not loaded."""
|
||||
@@ -539,33 +572,36 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
|
||||
return result
|
||||
|
||||
async def update_task(call: ServiceCall) -> ServiceResponse:
|
||||
"""Update task action."""
|
||||
async def create_or_update_task(call: ServiceCall) -> ServiceResponse: # noqa: C901
|
||||
"""Create or update task action."""
|
||||
entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY])
|
||||
coordinator = entry.runtime_data
|
||||
await coordinator.async_refresh()
|
||||
is_update = call.service in (SERVICE_UPDATE_REWARD, SERVICE_UPDATE_HABIT)
|
||||
current_task = None
|
||||
|
||||
try:
|
||||
current_task = next(
|
||||
task
|
||||
for task in coordinator.data.tasks
|
||||
if call.data[ATTR_TASK] in (str(task.id), task.alias, task.text)
|
||||
and task.Type is TaskType.REWARD
|
||||
)
|
||||
except StopIteration as e:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="task_not_found",
|
||||
translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"},
|
||||
) from e
|
||||
if is_update:
|
||||
try:
|
||||
current_task = next(
|
||||
task
|
||||
for task in coordinator.data.tasks
|
||||
if call.data[ATTR_TASK] in (str(task.id), task.alias, task.text)
|
||||
and task.Type is SERVICE_TASK_TYPE_MAP[call.service]
|
||||
)
|
||||
except StopIteration as e:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="task_not_found",
|
||||
translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"},
|
||||
) from e
|
||||
|
||||
task_id = current_task.id
|
||||
if TYPE_CHECKING:
|
||||
assert task_id
|
||||
data = Task()
|
||||
|
||||
if rename := call.data.get(ATTR_RENAME):
|
||||
data["text"] = rename
|
||||
if not is_update:
|
||||
data["type"] = SERVICE_TASK_TYPE_MAP[call.service]
|
||||
|
||||
if (text := call.data.get(ATTR_RENAME)) or (text := call.data.get(ATTR_NAME)):
|
||||
data["text"] = text
|
||||
|
||||
if (notes := call.data.get(ATTR_NOTES)) is not None:
|
||||
data["notes"] = notes
|
||||
@@ -574,7 +610,7 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
remove_tags = cast(list[str], call.data.get(ATTR_REMOVE_TAG))
|
||||
|
||||
if tags or remove_tags:
|
||||
update_tags = set(current_task.tags)
|
||||
update_tags = set(current_task.tags) if current_task else set()
|
||||
user_tags = {
|
||||
tag.name.lower(): tag.id
|
||||
for tag in coordinator.data.user.tags
|
||||
@@ -633,8 +669,30 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
if (cost := call.data.get(ATTR_COST)) is not None:
|
||||
data["value"] = cost
|
||||
|
||||
if priority := call.data.get(ATTR_PRIORITY):
|
||||
data["priority"] = TaskPriority[priority]
|
||||
|
||||
if frequency := call.data.get(ATTR_FREQUENCY):
|
||||
data["frequency"] = frequency
|
||||
|
||||
if up_down := call.data.get(ATTR_UP_DOWN):
|
||||
data["up"] = "up" in up_down
|
||||
data["down"] = "down" in up_down
|
||||
|
||||
if counter_up := call.data.get(ATTR_COUNTER_UP):
|
||||
data["counterUp"] = counter_up
|
||||
|
||||
if counter_down := call.data.get(ATTR_COUNTER_DOWN):
|
||||
data["counterDown"] = counter_down
|
||||
|
||||
try:
|
||||
response = await coordinator.habitica.update_task(task_id, data)
|
||||
if is_update:
|
||||
if TYPE_CHECKING:
|
||||
assert current_task
|
||||
assert current_task.id
|
||||
response = await coordinator.habitica.update_task(current_task.id, data)
|
||||
else:
|
||||
response = await coordinator.habitica.create_task(data)
|
||||
except TooManyRequestsError as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
@@ -659,10 +717,31 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_UPDATE_REWARD,
|
||||
update_task,
|
||||
create_or_update_task,
|
||||
schema=SERVICE_UPDATE_TASK_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_UPDATE_HABIT,
|
||||
create_or_update_task,
|
||||
schema=SERVICE_UPDATE_TASK_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_CREATE_REWARD,
|
||||
create_or_update_task,
|
||||
schema=SERVICE_CREATE_TASK_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_CREATE_HABIT,
|
||||
create_or_update_task,
|
||||
schema=SERVICE_CREATE_TASK_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_API_CALL,
|
||||
|
||||
@@ -144,26 +144,26 @@ update_reward:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
task: *task
|
||||
rename:
|
||||
rename: &rename
|
||||
selector:
|
||||
text:
|
||||
notes:
|
||||
notes: ¬es
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
multiline: true
|
||||
cost:
|
||||
required: false
|
||||
selector:
|
||||
selector: &cost_selector
|
||||
number:
|
||||
min: 0
|
||||
step: 0.01
|
||||
unit_of_measurement: "🪙"
|
||||
mode: box
|
||||
tag_options:
|
||||
tag_options: &tag_options
|
||||
collapsed: true
|
||||
fields:
|
||||
tag:
|
||||
tag: &tag
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
@@ -173,10 +173,92 @@ update_reward:
|
||||
selector:
|
||||
text:
|
||||
multiple: true
|
||||
developer_options:
|
||||
developer_options: &developer_options
|
||||
collapsed: true
|
||||
fields:
|
||||
alias:
|
||||
alias: &alias
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
create_reward:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
name: &name
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
notes: *notes
|
||||
cost:
|
||||
required: true
|
||||
selector: *cost_selector
|
||||
tag: *tag
|
||||
developer_options: *developer_options
|
||||
update_habit:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
task: *task
|
||||
rename: *rename
|
||||
notes: *notes
|
||||
up_down: &up_down
|
||||
required: false
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- value: up
|
||||
label: "➕"
|
||||
- value: down
|
||||
label: "➖"
|
||||
multiple: true
|
||||
mode: list
|
||||
priority: &priority
|
||||
required: false
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- "trivial"
|
||||
- "easy"
|
||||
- "medium"
|
||||
- "hard"
|
||||
mode: dropdown
|
||||
translation_key: "priority"
|
||||
frequency: &frequency
|
||||
required: false
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- "daily"
|
||||
- "weekly"
|
||||
- "monthly"
|
||||
translation_key: "frequency"
|
||||
mode: dropdown
|
||||
tag_options: *tag_options
|
||||
developer_options:
|
||||
collapsed: true
|
||||
fields:
|
||||
counter_up:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
step: 1
|
||||
unit_of_measurement: "➕"
|
||||
mode: box
|
||||
counter_down:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
step: 1
|
||||
unit_of_measurement: "➖"
|
||||
mode: box
|
||||
alias: *alias
|
||||
create_habit:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
name: *name
|
||||
notes: *notes
|
||||
up_down: *up_down
|
||||
priority: *priority
|
||||
frequency: *frequency
|
||||
tag: *tag
|
||||
developer_options: *developer_options
|
||||
|
||||
@@ -11,9 +11,9 @@
|
||||
"config_entry_description": "Select the Habitica account to update a task.",
|
||||
"task_description": "The name (or task ID) of the task you want to update.",
|
||||
"rename_name": "Rename",
|
||||
"rename_description": "The new title for the Habitica task.",
|
||||
"notes_name": "Update notes",
|
||||
"notes_description": "The new notes for the Habitica task.",
|
||||
"rename_description": "The title for the Habitica task.",
|
||||
"notes_name": "Notes",
|
||||
"notes_description": "The notes for the Habitica task.",
|
||||
"tag_name": "Add tags",
|
||||
"tag_description": "Add tags to the Habitica task. If a tag does not already exist, a new one will be created.",
|
||||
"remove_tag_name": "Remove tags",
|
||||
@@ -23,7 +23,15 @@
|
||||
"developer_options_name": "Advanced settings",
|
||||
"developer_options_description": "Additional features available in developer mode.",
|
||||
"tag_options_name": "Tags",
|
||||
"tag_options_description": "Add or remove tags from a task."
|
||||
"tag_options_description": "Add or remove tags from a task.",
|
||||
"name_description": "The title for the Habitica task.",
|
||||
"cost_name": "Cost",
|
||||
"difficulty_name": "Difficulty",
|
||||
"difficulty_description": "The difficulty of the task.",
|
||||
"frequency_name": "Counter reset",
|
||||
"frequency_description": "The frequency at which the habit's counter resets: daily at the start of a new day, weekly after Sunday night, or monthly at the beginning of a new month.",
|
||||
"up_down_name": "Rewards or losses",
|
||||
"up_down_description": "Whether the habit is good and rewarding (positive), bad and penalizing (negative), or both."
|
||||
},
|
||||
"config": {
|
||||
"abort": {
|
||||
@@ -707,7 +715,7 @@
|
||||
"description": "[%key:component::habitica::common::alias_description%]"
|
||||
},
|
||||
"cost": {
|
||||
"name": "Cost",
|
||||
"name": "[%key:component::habitica::common::cost_name%]",
|
||||
"description": "Update the cost of a reward."
|
||||
}
|
||||
},
|
||||
@@ -721,6 +729,150 @@
|
||||
"description": "[%key:component::habitica::common::developer_options_description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"create_reward": {
|
||||
"name": "Create reward",
|
||||
"description": "Adds a new custom reward.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
"description": "Select the Habitica account to create a reward."
|
||||
},
|
||||
"name": {
|
||||
"name": "[%key:component::habitica::common::task_name%]",
|
||||
"description": "[%key:component::habitica::common::name_description%]"
|
||||
},
|
||||
"notes": {
|
||||
"name": "[%key:component::habitica::common::notes_name%]",
|
||||
"description": "[%key:component::habitica::common::notes_description%]"
|
||||
},
|
||||
"tag": {
|
||||
"name": "[%key:component::habitica::common::tag_name%]",
|
||||
"description": "[%key:component::habitica::common::tag_description%]"
|
||||
},
|
||||
"alias": {
|
||||
"name": "[%key:component::habitica::common::alias_name%]",
|
||||
"description": "[%key:component::habitica::common::alias_description%]"
|
||||
},
|
||||
"cost": {
|
||||
"name": "[%key:component::habitica::common::cost_name%]",
|
||||
"description": "The cost of the reward."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"developer_options": {
|
||||
"name": "[%key:component::habitica::common::developer_options_name%]",
|
||||
"description": "[%key:component::habitica::common::developer_options_description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"update_habit": {
|
||||
"name": "Update a habit",
|
||||
"description": "Updates a specific habit for the selected Habitica character",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
"description": "Select the Habitica account to update a habit."
|
||||
},
|
||||
"task": {
|
||||
"name": "[%key:component::habitica::common::task_name%]",
|
||||
"description": "[%key:component::habitica::common::task_description%]"
|
||||
},
|
||||
"rename": {
|
||||
"name": "[%key:component::habitica::common::rename_name%]",
|
||||
"description": "[%key:component::habitica::common::rename_description%]"
|
||||
},
|
||||
"notes": {
|
||||
"name": "[%key:component::habitica::common::notes_name%]",
|
||||
"description": "[%key:component::habitica::common::notes_description%]"
|
||||
},
|
||||
"tag": {
|
||||
"name": "[%key:component::habitica::common::tag_name%]",
|
||||
"description": "[%key:component::habitica::common::tag_description%]"
|
||||
},
|
||||
"remove_tag": {
|
||||
"name": "[%key:component::habitica::common::remove_tag_name%]",
|
||||
"description": "[%key:component::habitica::common::remove_tag_description%]"
|
||||
},
|
||||
"alias": {
|
||||
"name": "[%key:component::habitica::common::alias_name%]",
|
||||
"description": "[%key:component::habitica::common::alias_description%]"
|
||||
},
|
||||
"priority": {
|
||||
"name": "[%key:component::habitica::common::difficulty_name%]",
|
||||
"description": "[%key:component::habitica::common::difficulty_description%]"
|
||||
},
|
||||
"frequency": {
|
||||
"name": "[%key:component::habitica::common::frequency_name%]",
|
||||
"description": "[%key:component::habitica::common::frequency_description%]"
|
||||
},
|
||||
"up_down": {
|
||||
"name": "[%key:component::habitica::common::up_down_name%]",
|
||||
"description": "[%key:component::habitica::common::up_down_description%]"
|
||||
},
|
||||
"counter_up": {
|
||||
"name": "Adjust positive counter",
|
||||
"description": "Update the up counter of a positive habit."
|
||||
},
|
||||
"counter_down": {
|
||||
"name": "Adjust negative counter",
|
||||
"description": "Update the down counter of a negative habit."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"tag_options": {
|
||||
"name": "[%key:component::habitica::common::tag_options_name%]",
|
||||
"description": "[%key:component::habitica::common::tag_options_description%]"
|
||||
},
|
||||
"developer_options": {
|
||||
"name": "[%key:component::habitica::common::developer_options_name%]",
|
||||
"description": "[%key:component::habitica::common::developer_options_description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"create_habit": {
|
||||
"name": "Create habit",
|
||||
"description": "Adds a new habit.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
"description": "Select the Habitica account to create a habit."
|
||||
},
|
||||
"name": {
|
||||
"name": "[%key:component::habitica::common::task_name%]",
|
||||
"description": "[%key:component::habitica::common::name_description%]"
|
||||
},
|
||||
"notes": {
|
||||
"name": "[%key:component::habitica::common::notes_name%]",
|
||||
"description": "[%key:component::habitica::common::notes_description%]"
|
||||
},
|
||||
"tag": {
|
||||
"name": "[%key:component::habitica::common::tag_name%]",
|
||||
"description": "[%key:component::habitica::common::tag_description%]"
|
||||
},
|
||||
"alias": {
|
||||
"name": "[%key:component::habitica::common::alias_name%]",
|
||||
"description": "[%key:component::habitica::common::alias_description%]"
|
||||
},
|
||||
"priority": {
|
||||
"name": "[%key:component::habitica::common::difficulty_name%]",
|
||||
"description": "[%key:component::habitica::common::difficulty_description%]"
|
||||
},
|
||||
"frequency": {
|
||||
"name": "[%key:component::habitica::common::frequency_name%]",
|
||||
"description": "[%key:component::habitica::common::frequency_description%]"
|
||||
},
|
||||
"up_down": {
|
||||
"name": "[%key:component::habitica::common::up_down_name%]",
|
||||
"description": "[%key:component::habitica::common::up_down_description%]"
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"developer_options": {
|
||||
"name": "[%key:component::habitica::common::developer_options_name%]",
|
||||
"description": "[%key:component::habitica::common::developer_options_description%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
@@ -755,6 +907,14 @@
|
||||
"medium": "Medium",
|
||||
"hard": "Hard"
|
||||
}
|
||||
},
|
||||
"frequency": {
|
||||
"options": {
|
||||
"daily": "Daily",
|
||||
"weekly": "Weekly",
|
||||
"monthly": "Monthly",
|
||||
"yearly": "Yearly"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path, PurePath
|
||||
@@ -173,7 +174,7 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
),
|
||||
)
|
||||
except SupervisorNotFoundError as err:
|
||||
raise BackupNotFound from err
|
||||
raise BackupNotFound(f"Backup {backup_id} not found") from err
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
@@ -186,13 +187,14 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
|
||||
The upload will be skipped if the backup already exists in the agent's location.
|
||||
"""
|
||||
if await self.async_get_backup(backup.backup_id):
|
||||
_LOGGER.debug(
|
||||
"Backup %s already exists in location %s",
|
||||
backup.backup_id,
|
||||
self.location,
|
||||
)
|
||||
return
|
||||
with suppress(BackupNotFound):
|
||||
if await self.async_get_backup(backup.backup_id):
|
||||
_LOGGER.debug(
|
||||
"Backup %s already exists in location %s",
|
||||
backup.backup_id,
|
||||
self.location,
|
||||
)
|
||||
return
|
||||
stream = await open_stream()
|
||||
upload_options = supervisor_backups.UploadBackupOptions(
|
||||
location={self.location},
|
||||
@@ -218,14 +220,14 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup | None:
|
||||
) -> AgentBackup:
|
||||
"""Return a backup."""
|
||||
try:
|
||||
details = await self._client.backups.backup_info(backup_id)
|
||||
except SupervisorNotFoundError:
|
||||
return None
|
||||
except SupervisorNotFoundError as err:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found") from err
|
||||
if self.location not in details.location_attributes:
|
||||
return None
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
return _backup_details_to_agent_backup(details, self.location)
|
||||
|
||||
async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None:
|
||||
@@ -237,8 +239,8 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
location={self.location}
|
||||
),
|
||||
)
|
||||
except SupervisorNotFoundError:
|
||||
_LOGGER.debug("Backup %s does not exist", backup_id)
|
||||
except SupervisorNotFoundError as err:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found") from err
|
||||
|
||||
|
||||
class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
@@ -492,10 +494,12 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
) -> None:
|
||||
"""Restore a backup."""
|
||||
manager = self._hass.data[DATA_MANAGER]
|
||||
# The backup manager has already checked that the backup exists so we don't need to
|
||||
# check that here.
|
||||
# The backup manager has already checked that the backup exists so we don't
|
||||
# need to catch BackupNotFound here.
|
||||
backup = await manager.backup_agents[agent_id].async_get_backup(backup_id)
|
||||
if (
|
||||
# Check for None to be backwards compatible with the old BackupAgent API,
|
||||
# this can be removed in HA Core 2025.10
|
||||
backup
|
||||
and restore_homeassistant
|
||||
and restore_database != backup.database_included
|
||||
|
||||
@@ -266,7 +266,7 @@ class HomeConnectCoordinator(
|
||||
_LOGGER.debug(
|
||||
"Non-breaking error (%s) while listening for events,"
|
||||
" continuing in %s seconds",
|
||||
type(error).__name__,
|
||||
error,
|
||||
retry_time,
|
||||
)
|
||||
await asyncio.sleep(retry_time)
|
||||
@@ -343,9 +343,7 @@ class HomeConnectCoordinator(
|
||||
_LOGGER.debug(
|
||||
"Error fetching settings for %s: %s",
|
||||
appliance.ha_id,
|
||||
error
|
||||
if isinstance(error, HomeConnectApiError)
|
||||
else type(error).__name__,
|
||||
error,
|
||||
)
|
||||
settings = {}
|
||||
try:
|
||||
@@ -357,9 +355,7 @@ class HomeConnectCoordinator(
|
||||
_LOGGER.debug(
|
||||
"Error fetching status for %s: %s",
|
||||
appliance.ha_id,
|
||||
error
|
||||
if isinstance(error, HomeConnectApiError)
|
||||
else type(error).__name__,
|
||||
error,
|
||||
)
|
||||
status = {}
|
||||
|
||||
@@ -373,9 +369,7 @@ class HomeConnectCoordinator(
|
||||
_LOGGER.debug(
|
||||
"Error fetching programs for %s: %s",
|
||||
appliance.ha_id,
|
||||
error
|
||||
if isinstance(error, HomeConnectApiError)
|
||||
else type(error).__name__,
|
||||
error,
|
||||
)
|
||||
else:
|
||||
programs.extend(all_programs.programs)
|
||||
@@ -465,9 +459,7 @@ class HomeConnectCoordinator(
|
||||
_LOGGER.debug(
|
||||
"Error fetching options for %s: %s",
|
||||
ha_id,
|
||||
error
|
||||
if isinstance(error, HomeConnectApiError)
|
||||
else type(error).__name__,
|
||||
error,
|
||||
)
|
||||
return {}
|
||||
|
||||
|
||||
@@ -49,6 +49,23 @@
|
||||
"default": "mdi:map-marker-remove-variant"
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
"open_door": {
|
||||
"default": "mdi:door-open"
|
||||
},
|
||||
"partly_open_door": {
|
||||
"default": "mdi:door-open"
|
||||
},
|
||||
"pause_program": {
|
||||
"default": "mdi:pause"
|
||||
},
|
||||
"resume_program": {
|
||||
"default": "mdi:play"
|
||||
},
|
||||
"stop_program": {
|
||||
"default": "mdi:stop"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"operation_state": {
|
||||
"default": "mdi:state-machine",
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/home_connect",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"requirements": ["aiohomeconnect==0.16.2"],
|
||||
"requirements": ["aiohomeconnect==0.16.3"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -386,6 +386,13 @@ class HomeConnectProgramSensor(HomeConnectSensor):
|
||||
|
||||
def update_native_value(self) -> None:
|
||||
"""Update the program sensor's status."""
|
||||
self.program_running = (
|
||||
status := self.appliance.status.get(StatusKey.BSH_COMMON_OPERATION_STATE)
|
||||
) is not None and status.value in [
|
||||
BSH_OPERATION_STATE_RUN,
|
||||
BSH_OPERATION_STATE_PAUSE,
|
||||
BSH_OPERATION_STATE_FINISHED,
|
||||
]
|
||||
event = self.appliance.events.get(cast(EventKey, self.bsh_key))
|
||||
if event:
|
||||
self._update_native_value(event.value)
|
||||
|
||||
@@ -354,7 +354,7 @@
|
||||
"options": {
|
||||
"consumer_products_coffee_maker_enum_type_flow_rate_normal": "Normal",
|
||||
"consumer_products_coffee_maker_enum_type_flow_rate_intense": "Intense",
|
||||
"consumer_products_coffee_maker_enum_type_flow_rate_intense_plus": "Intense plus"
|
||||
"consumer_products_coffee_maker_enum_type_flow_rate_intense_plus": "Intense +"
|
||||
}
|
||||
},
|
||||
"coffee_milk_ratio": {
|
||||
@@ -410,7 +410,7 @@
|
||||
"laundry_care_dryer_enum_type_drying_target_iron_dry": "Iron dry",
|
||||
"laundry_care_dryer_enum_type_drying_target_gentle_dry": "Gentle dry",
|
||||
"laundry_care_dryer_enum_type_drying_target_cupboard_dry": "Cupboard dry",
|
||||
"laundry_care_dryer_enum_type_drying_target_cupboard_dry_plus": "Cupboard dry plus",
|
||||
"laundry_care_dryer_enum_type_drying_target_cupboard_dry_plus": "Cupboard dry +",
|
||||
"laundry_care_dryer_enum_type_drying_target_extra_dry": "Extra dry"
|
||||
}
|
||||
},
|
||||
@@ -592,7 +592,7 @@
|
||||
"description": "Defines if the program sequence is optimized with a special drying cycle to ensure more shine on glasses and plastic items."
|
||||
},
|
||||
"dishcare_dishwasher_option_vario_speed_plus": {
|
||||
"name": "Vario speed plus",
|
||||
"name": "Vario speed +",
|
||||
"description": "Defines if the program run time is reduced by up to 66% with the usual optimum cleaning and drying."
|
||||
},
|
||||
"dishcare_dishwasher_option_silence_on_demand": {
|
||||
@@ -608,7 +608,7 @@
|
||||
"description": "Defines if improved drying for glasses and plasticware is enabled."
|
||||
},
|
||||
"dishcare_dishwasher_option_hygiene_plus": {
|
||||
"name": "Hygiene plus",
|
||||
"name": "Hygiene +",
|
||||
"description": "Defines if the cleaning is done with increased temperature. This ensures maximum hygienic cleanliness for regular use."
|
||||
},
|
||||
"dishcare_dishwasher_option_eco_dry": {
|
||||
@@ -1462,7 +1462,7 @@
|
||||
"inactive": "Inactive",
|
||||
"ready": "Ready",
|
||||
"delayedstart": "Delayed start",
|
||||
"run": "Run",
|
||||
"run": "Running",
|
||||
"pause": "[%key:common::state::paused%]",
|
||||
"actionrequired": "Action required",
|
||||
"finished": "Finished",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import re
|
||||
|
||||
from aiohomeconnect.model.error import HomeConnectApiError, HomeConnectError
|
||||
from aiohomeconnect.model.error import HomeConnectError
|
||||
|
||||
RE_CAMEL_CASE = re.compile(r"(?<!^)(?=[A-Z])|(?=\d)(?<=\D)")
|
||||
|
||||
@@ -11,11 +11,7 @@ def get_dict_from_home_connect_error(
|
||||
err: HomeConnectError,
|
||||
) -> dict[str, str]:
|
||||
"""Return a translation string from a Home Connect error."""
|
||||
return {
|
||||
"error": str(err)
|
||||
if isinstance(err, HomeConnectApiError)
|
||||
else type(err).__name__
|
||||
}
|
||||
return {"error": str(err)}
|
||||
|
||||
|
||||
def bsh_key_to_translation_key(bsh_key: str) -> str:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user