mirror of
https://github.com/home-assistant/core.git
synced 2026-02-07 07:44:50 +01:00
Compare commits
296 Commits
edenhaus-g
...
python-3.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7a1e378300 | ||
|
|
f1e1792e64 | ||
|
|
5de171f714 | ||
|
|
6b07b2b8bc | ||
|
|
79e0a93e48 | ||
|
|
3867c1d7d1 | ||
|
|
b9b6b050cc | ||
|
|
d960736b3d | ||
|
|
afa0f572ce | ||
|
|
a6a1b9ddbd | ||
|
|
c1f5b4593f | ||
|
|
f1de4dc1cc | ||
|
|
4ae0d9a9c6 | ||
|
|
fcd0b579cf | ||
|
|
dee7a237ee | ||
|
|
3975eba12c | ||
|
|
ade91ebdab | ||
|
|
1bf194dd0f | ||
|
|
2eca8db8aa | ||
|
|
78415bc1ff | ||
|
|
e2469bcd0f | ||
|
|
54d64b7da2 | ||
|
|
d548f3d12f | ||
|
|
668995da73 | ||
|
|
9eeae8eac6 | ||
|
|
7e7056aa94 | ||
|
|
b633b8d271 | ||
|
|
45c7b9ccb8 | ||
|
|
3ad1a57dfc | ||
|
|
3cbe236a36 | ||
|
|
39816c1e8a | ||
|
|
5587dd43b9 | ||
|
|
715d1e4eb8 | ||
|
|
af172fb70d | ||
|
|
8c8bc104eb | ||
|
|
51b20fb5db | ||
|
|
445ba26667 | ||
|
|
886448f4ba | ||
|
|
ede4341ef3 | ||
|
|
fe363f32ec | ||
|
|
31562e7571 | ||
|
|
0bdb51e4ca | ||
|
|
67a5d7ac21 | ||
|
|
5e7f06c476 | ||
|
|
9a69852296 | ||
|
|
a722925b8e | ||
|
|
419c5de50e | ||
|
|
37faed565e | ||
|
|
622953e61f | ||
|
|
17926c3f6a | ||
|
|
48d85170c2 | ||
|
|
08d179c520 | ||
|
|
5752387da8 | ||
|
|
1ebde65f03 | ||
|
|
89f536e332 | ||
|
|
8784329333 | ||
|
|
d73538722d | ||
|
|
d49d3f0a2f | ||
|
|
8466dd4c2b | ||
|
|
6bb1e688c6 | ||
|
|
9bc1c4c4f3 | ||
|
|
a554cb8211 | ||
|
|
145d38403e | ||
|
|
10d4af5674 | ||
|
|
ed3b4d2de3 | ||
|
|
e66d324877 | ||
|
|
f7f18627a2 | ||
|
|
d18630020f | ||
|
|
a715ec318c | ||
|
|
0ef5a77dc9 | ||
|
|
b43abf83b8 | ||
|
|
84d28db3a7 | ||
|
|
74d99fa0be | ||
|
|
3ff0320ed8 | ||
|
|
16cb9e9785 | ||
|
|
d92279dfcb | ||
|
|
4b9d28d0e5 | ||
|
|
e6a60dfe50 | ||
|
|
d219056e9d | ||
|
|
6ff6b099b5 | ||
|
|
c5b9699098 | ||
|
|
6937bfdf67 | ||
|
|
39ee3fcfaa | ||
|
|
16cdfd05a0 | ||
|
|
f49d4787be | ||
|
|
2076700dc4 | ||
|
|
76c135913e | ||
|
|
c3534d5445 | ||
|
|
fc60b16d65 | ||
|
|
0443c93f77 | ||
|
|
f97cf0e446 | ||
|
|
bd4fa0d5c2 | ||
|
|
f60d367184 | ||
|
|
6e231f2ec5 | ||
|
|
13ba2d2e47 | ||
|
|
ba4a163e24 | ||
|
|
b7db8684db | ||
|
|
a7595dc468 | ||
|
|
d2c8c3565b | ||
|
|
422d1031f4 | ||
|
|
c9a79cf100 | ||
|
|
c42d47a619 | ||
|
|
a26f871d32 | ||
|
|
d481c1bcc5 | ||
|
|
379e3596b4 | ||
|
|
423a7cdbba | ||
|
|
841fa48186 | ||
|
|
61e35157e3 | ||
|
|
87f655f56d | ||
|
|
692b8d0722 | ||
|
|
5f9f623c3f | ||
|
|
e595b6cd90 | ||
|
|
a748eebf3e | ||
|
|
6bdd544867 | ||
|
|
705eadf8ce | ||
|
|
b7c6e4eafc | ||
|
|
f4aba286fe | ||
|
|
5fa4f6de11 | ||
|
|
db1f045c42 | ||
|
|
eaba4817bd | ||
|
|
96cb2247df | ||
|
|
99fa7a1f52 | ||
|
|
e0ba928296 | ||
|
|
16fd5e8f1f | ||
|
|
201e95a417 | ||
|
|
dc01592991 | ||
|
|
c5fb2bd566 | ||
|
|
d03d996155 | ||
|
|
9618412a44 | ||
|
|
967e97661f | ||
|
|
b757312fe0 | ||
|
|
2ed8ec0bdf | ||
|
|
97f6e3741a | ||
|
|
c2d3244d26 | ||
|
|
eafeba792d | ||
|
|
c9318b6fbf | ||
|
|
99be382abf | ||
|
|
7cfcfca210 | ||
|
|
f29daccb19 | ||
|
|
be869fce6c | ||
|
|
7bb0414a39 | ||
|
|
3f8807d063 | ||
|
|
67642e6246 | ||
|
|
0d215597f3 | ||
|
|
f41bd2b582 | ||
|
|
5c9ec1911b | ||
|
|
1a0b7fe984 | ||
|
|
26ee25d7bb | ||
|
|
aabf52d3cf | ||
|
|
99fcb46a7e | ||
|
|
6580c5e5bf | ||
|
|
63e7d4dc08 | ||
|
|
cc6900d846 | ||
|
|
ca2ad22884 | ||
|
|
40944f0f2d | ||
|
|
91a3e488b1 | ||
|
|
9a1f517e6e | ||
|
|
c82c614bb9 | ||
|
|
20914dce67 | ||
|
|
5fc407d2f3 | ||
|
|
c7444d38a1 | ||
|
|
81f6136bda | ||
|
|
862d0ea49e | ||
|
|
f2fdfed241 | ||
|
|
15640049cb | ||
|
|
5c163434f8 | ||
|
|
e54c2ea55e | ||
|
|
1ec42693ab | ||
|
|
672864ae4f | ||
|
|
e54d7e42cb | ||
|
|
5d63fce015 | ||
|
|
190fe10eed | ||
|
|
ef410c1e2a | ||
|
|
5a712398e7 | ||
|
|
b1be3fe0da | ||
|
|
97a7ab011b | ||
|
|
694a3050b9 | ||
|
|
8164e65188 | ||
|
|
9af0d1eed4 | ||
|
|
72e6ca55ba | ||
|
|
0fb62a7e97 | ||
|
|
930eb70a8b | ||
|
|
462104fa68 | ||
|
|
d0c77d8a7e | ||
|
|
606780b20f | ||
|
|
8f465cf2ca | ||
|
|
4e29476dd9 | ||
|
|
b4328083be | ||
|
|
72ba59f559 | ||
|
|
826168b601 | ||
|
|
66f181992c | ||
|
|
336ef4c37b | ||
|
|
72e7bf7f9c | ||
|
|
acbdbc9be7 | ||
|
|
3551382f8d | ||
|
|
95014d7e6d | ||
|
|
dfe1990484 | ||
|
|
15ff5d0f74 | ||
|
|
1407f61a9c | ||
|
|
6107b794d6 | ||
|
|
7ab8ceab7e | ||
|
|
a4db6a9ebc | ||
|
|
12a2650b6b | ||
|
|
23da7ecedd | ||
|
|
8d9e7b0b26 | ||
|
|
9664047345 | ||
|
|
804fbf9cef | ||
|
|
e10fe074c9 | ||
|
|
7b0e21da74 | ||
|
|
29e142cf1e | ||
|
|
6b765ebabb | ||
|
|
899aa62697 | ||
|
|
a11efba405 | ||
|
|
78280dfc5a | ||
|
|
4220bab08a | ||
|
|
f7dcf8de15 | ||
|
|
7e32b50fee | ||
|
|
c875b75272 | ||
|
|
7368b9ca1d | ||
|
|
493e8c1a22 | ||
|
|
1b16b24550 | ||
|
|
7637300632 | ||
|
|
bdbce57217 | ||
|
|
8536472fe9 | ||
|
|
ad4fda7bb4 | ||
|
|
36e1b86952 | ||
|
|
0c9834e4ca | ||
|
|
360af74519 | ||
|
|
d099ac457d | ||
|
|
fc330ce165 | ||
|
|
b52dd5fc05 | ||
|
|
b517ce132f | ||
|
|
acec35846c | ||
|
|
af661898c2 | ||
|
|
e2f5a4849c | ||
|
|
399b7f6223 | ||
|
|
782f7af332 | ||
|
|
66af6565bf | ||
|
|
8a00aa8550 | ||
|
|
b07adc03d2 | ||
|
|
a978e3c199 | ||
|
|
bb3c977448 | ||
|
|
8057de408e | ||
|
|
0be4ee71e7 | ||
|
|
7ff5f14748 | ||
|
|
d5e58c817d | ||
|
|
8a08016fb9 | ||
|
|
d45ddd3762 | ||
|
|
0e98e8c893 | ||
|
|
84a09bec0e | ||
|
|
6fd27ec7ec | ||
|
|
91e2a318a5 | ||
|
|
1221c5bcad | ||
|
|
8e3befc301 | ||
|
|
2df62385f1 | ||
|
|
9f3b13dfa1 | ||
|
|
9c27e1233e | ||
|
|
825da95550 | ||
|
|
18bda2dbbe | ||
|
|
630a9b4896 | ||
|
|
e6399d2bfe | ||
|
|
4bae0d15ec | ||
|
|
760a75d1f1 | ||
|
|
c08912fc78 | ||
|
|
316d804336 | ||
|
|
d3658a52dd | ||
|
|
b3e42a1f07 | ||
|
|
dee07b25a2 | ||
|
|
f460bf36fe | ||
|
|
020d122799 | ||
|
|
699b4b12da | ||
|
|
3ec96f21d1 | ||
|
|
c6c5970864 | ||
|
|
570146c4a6 | ||
|
|
75b7f80f6c | ||
|
|
1c1a99e5ae | ||
|
|
0203f6e6f1 | ||
|
|
66612f97ec | ||
|
|
6d215c284c | ||
|
|
8e9e406341 | ||
|
|
b6772c4104 | ||
|
|
d6a830da1a | ||
|
|
2f7a895e28 | ||
|
|
5cb5b0eb45 | ||
|
|
33ae951030 | ||
|
|
1cb56216ba | ||
|
|
6409574ecf | ||
|
|
a94d39e493 | ||
|
|
fec008c589 | ||
|
|
358e58ea85 | ||
|
|
e8bbc9598f | ||
|
|
49e0c8e0bd | ||
|
|
0623da8aa9 | ||
|
|
8356524cf2 | ||
|
|
420123f1ff | ||
|
|
3ea3d88889 |
21
.github/workflows/builder.yml
vendored
21
.github/workflows/builder.yml
vendored
@@ -10,12 +10,12 @@ on:
|
||||
|
||||
env:
|
||||
BUILD_TYPE: core
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
DEFAULT_PYTHON: "3.14.3"
|
||||
PIP_TIMEOUT: 60
|
||||
UV_HTTP_TIMEOUT: 60
|
||||
UV_SYSTEM_PYTHON: "true"
|
||||
# Base image version from https://github.com/home-assistant/docker
|
||||
BASE_IMAGE_VERSION: "2025.12.0"
|
||||
BASE_IMAGE_VERSION: "2026.02.0"
|
||||
ARCHITECTURES: '["amd64", "aarch64"]'
|
||||
|
||||
jobs:
|
||||
@@ -100,7 +100,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@0bd50d53a6d7fb5cb921e607957e9cc12b4ce392 # v12
|
||||
uses: dawidd6/action-download-artifact@5c98f0b039f36ef966fdb7dfa9779262785ecb05 # v14
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -111,7 +111,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@0bd50d53a6d7fb5cb921e607957e9cc12b4ce392 # v12
|
||||
uses: dawidd6/action-download-artifact@5c98f0b039f36ef966fdb7dfa9779262785ecb05 # v14
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: OHF-Voice/intents-package
|
||||
@@ -184,7 +184,7 @@ jobs:
|
||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -235,6 +235,7 @@ jobs:
|
||||
build-args: |
|
||||
BUILD_FROM=${{ steps.vars.outputs.base_image }}
|
||||
tags: ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}
|
||||
outputs: type=image,push=true,compression=zstd,compression-level=9,force-compression=true,oci-mediatypes=true
|
||||
labels: |
|
||||
io.hass.arch=${{ matrix.arch }}
|
||||
io.hass.version=${{ needs.init.outputs.version }}
|
||||
@@ -287,7 +288,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -358,13 +359,13 @@ jobs:
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -522,7 +523,7 @@ jobs:
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -551,7 +552,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0
|
||||
uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3.2.0
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
||||
14
.github/workflows/ci.yaml
vendored
14
.github/workflows/ci.yaml
vendored
@@ -40,9 +40,9 @@ env:
|
||||
CACHE_VERSION: 2
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2026.2"
|
||||
DEFAULT_PYTHON: "3.13.11"
|
||||
ALL_PYTHON_VERSIONS: "['3.13.11', '3.14.2']"
|
||||
HA_SHORT_VERSION: "2026.3"
|
||||
DEFAULT_PYTHON: "3.14.3"
|
||||
ALL_PYTHON_VERSIONS: "['3.14.3']"
|
||||
# 10.3 is the oldest supported version
|
||||
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
|
||||
# 10.6 is the current long-term-support
|
||||
@@ -254,7 +254,7 @@ jobs:
|
||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/codespell.json"
|
||||
- name: Run prek
|
||||
uses: j178/prek-action@9d6a3097e0c1865ecce00cfb89fe80f2ee91b547 # v1.0.12
|
||||
uses: j178/prek-action@564dda4cfa5e96aafdc4a5696c4bf7b46baae5ac # v1.1.0
|
||||
env:
|
||||
PREK_SKIP: no-commit-to-branch,mypy,pylint,gen_requirements_all,hassfest,hassfest-metadata,hassfest-mypy-config
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
@@ -310,7 +310,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: &actions-cache actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: &actions-cache actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: venv
|
||||
key: &key-python-venv >-
|
||||
@@ -374,7 +374,7 @@ jobs:
|
||||
fi
|
||||
- name: Save apt cache
|
||||
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
uses: &actions-cache-save actions/cache/save@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: &actions-cache-save actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: *path-apt-cache
|
||||
key: *key-apt-cache
|
||||
@@ -425,7 +425,7 @@ jobs:
|
||||
steps:
|
||||
- &cache-restore-apt
|
||||
name: Restore apt cache
|
||||
uses: &actions-cache-restore actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: &actions-cache-restore actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: *path-apt-cache
|
||||
fail-on-cache-miss: true
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@b20883b0cd1f46c72ae0ba6d1090936928f9fa30 # v4.32.0
|
||||
uses: github/codeql-action/init@6bc82e05fd0ea64601dd4b465378bbcf57de0314 # v4.32.1
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@b20883b0cd1f46c72ae0ba6d1090936928f9fa30 # v4.32.0
|
||||
uses: github/codeql-action/analyze@6bc82e05fd0ea64601dd4b465378bbcf57de0314 # v4.32.1
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -10,7 +10,7 @@ on:
|
||||
- "**strings.json"
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
DEFAULT_PYTHON: "3.14.3"
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
|
||||
2
.github/workflows/wheels.yml
vendored
2
.github/workflows/wheels.yml
vendored
@@ -17,7 +17,7 @@ on:
|
||||
- "script/gen_requirements_all.py"
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
DEFAULT_PYTHON: "3.14.3"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name}}
|
||||
|
||||
@@ -1 +1 @@
|
||||
3.13
|
||||
3.14
|
||||
|
||||
@@ -389,6 +389,7 @@ homeassistant.components.onkyo.*
|
||||
homeassistant.components.open_meteo.*
|
||||
homeassistant.components.open_router.*
|
||||
homeassistant.components.openai_conversation.*
|
||||
homeassistant.components.openevse.*
|
||||
homeassistant.components.openexchangerates.*
|
||||
homeassistant.components.opensky.*
|
||||
homeassistant.components.openuv.*
|
||||
|
||||
8
CODEOWNERS
generated
8
CODEOWNERS
generated
@@ -288,6 +288,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/cloud/ @home-assistant/cloud
|
||||
/homeassistant/components/cloudflare/ @ludeeus @ctalkington
|
||||
/tests/components/cloudflare/ @ludeeus @ctalkington
|
||||
/homeassistant/components/cloudflare_r2/ @corrreia
|
||||
/tests/components/cloudflare_r2/ @corrreia
|
||||
/homeassistant/components/co2signal/ @jpbede @VIKTORVAV99
|
||||
/tests/components/co2signal/ @jpbede @VIKTORVAV99
|
||||
/homeassistant/components/coinbase/ @tombrien
|
||||
@@ -919,6 +921,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/libre_hardware_monitor/ @Sab44
|
||||
/homeassistant/components/lidarr/ @tkdrob
|
||||
/tests/components/lidarr/ @tkdrob
|
||||
/homeassistant/components/liebherr/ @mettolen
|
||||
/tests/components/liebherr/ @mettolen
|
||||
/homeassistant/components/lifx/ @Djelibeybi
|
||||
/tests/components/lifx/ @Djelibeybi
|
||||
/homeassistant/components/light/ @home-assistant/core
|
||||
@@ -1263,6 +1267,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/powerfox/ @klaasnicolaas
|
||||
/homeassistant/components/powerwall/ @bdraco @jrester @daniel-simpson
|
||||
/tests/components/powerwall/ @bdraco @jrester @daniel-simpson
|
||||
/homeassistant/components/prana/ @prana-dev-official
|
||||
/tests/components/prana/ @prana-dev-official
|
||||
/homeassistant/components/private_ble_device/ @Jc2k
|
||||
/tests/components/private_ble_device/ @Jc2k
|
||||
/homeassistant/components/probe_plus/ @pantherale0
|
||||
@@ -1874,6 +1880,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/worldclock/ @fabaff
|
||||
/homeassistant/components/ws66i/ @ssaenger
|
||||
/tests/components/ws66i/ @ssaenger
|
||||
/homeassistant/components/wsdot/ @ucodery
|
||||
/tests/components/wsdot/ @ucodery
|
||||
/homeassistant/components/wyoming/ @synesthesiam
|
||||
/tests/components/wyoming/ @synesthesiam
|
||||
/homeassistant/components/xbox/ @hunterjm @tr4nt0r
|
||||
|
||||
@@ -52,6 +52,9 @@ RUN --mount=type=bind,source=requirements.txt,target=requirements.txt \
|
||||
--mount=type=bind,source=requirements_test_pre_commit.txt,target=requirements_test_pre_commit.txt \
|
||||
uv pip install -r requirements.txt -r requirements_test.txt
|
||||
|
||||
# Claude Code native install
|
||||
RUN curl -fsSL https://claude.ai/install.sh | bash
|
||||
|
||||
WORKDIR /workspaces
|
||||
|
||||
# Set the default shell to bash instead of sh
|
||||
|
||||
5
homeassistant/brands/cloudflare.json
Normal file
5
homeassistant/brands/cloudflare.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "cloudflare",
|
||||
"name": "Cloudflare",
|
||||
"integrations": ["cloudflare", "cloudflare_r2"]
|
||||
}
|
||||
5
homeassistant/brands/heatit.json
Normal file
5
homeassistant/brands/heatit.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "heatit",
|
||||
"name": "Heatit",
|
||||
"iot_standards": ["zwave"]
|
||||
}
|
||||
5
homeassistant/brands/heiman.json
Normal file
5
homeassistant/brands/heiman.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "heiman",
|
||||
"name": "Heiman",
|
||||
"iot_standards": ["matter", "zigbee"]
|
||||
}
|
||||
@@ -99,7 +99,7 @@ class AbodeLight(AbodeDevice, LightEntity):
|
||||
return _hs
|
||||
|
||||
@property
|
||||
def color_mode(self) -> str | None:
|
||||
def color_mode(self) -> ColorMode | None:
|
||||
"""Return the color mode of the light."""
|
||||
if self._device.is_dimmable and self._device.is_color_capable:
|
||||
if self.hs_color is not None:
|
||||
@@ -110,7 +110,7 @@ class AbodeLight(AbodeDevice, LightEntity):
|
||||
return ColorMode.ONOFF
|
||||
|
||||
@property
|
||||
def supported_color_modes(self) -> set[str] | None:
|
||||
def supported_color_modes(self) -> set[ColorMode] | None:
|
||||
"""Flag supported color modes."""
|
||||
if self._device.is_dimmable and self._device.is_color_capable:
|
||||
return {ColorMode.COLOR_TEMP, ColorMode.HS}
|
||||
|
||||
@@ -7,10 +7,12 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN, SERVER_URL
|
||||
from .services import async_setup_services
|
||||
|
||||
ATTRIBUTION = "ispyconnect.com"
|
||||
DEFAULT_BRAND = "Agent DVR by ispyconnect.com"
|
||||
@@ -19,6 +21,14 @@ PLATFORMS = [Platform.ALARM_CONTROL_PANEL, Platform.CAMERA]
|
||||
|
||||
AgentDVRConfigEntry = ConfigEntry[Agent]
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the component."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, config_entry: AgentDVRConfigEntry
|
||||
|
||||
@@ -9,10 +9,7 @@ from homeassistant.components.camera import CameraEntityFeature
|
||||
from homeassistant.components.mjpeg import MjpegCamera, filter_urllib3_logging
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AgentDVRConfigEntry
|
||||
from .const import ATTRIBUTION, CAMERA_SCAN_INTERVAL_SECS, DOMAIN
|
||||
@@ -21,20 +18,6 @@ SCAN_INTERVAL = timedelta(seconds=CAMERA_SCAN_INTERVAL_SECS)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_DEV_EN_ALT = "enable_alerts"
|
||||
_DEV_DS_ALT = "disable_alerts"
|
||||
_DEV_EN_REC = "start_recording"
|
||||
_DEV_DS_REC = "stop_recording"
|
||||
_DEV_SNAP = "snapshot"
|
||||
|
||||
CAMERA_SERVICES = {
|
||||
_DEV_EN_ALT: "async_enable_alerts",
|
||||
_DEV_DS_ALT: "async_disable_alerts",
|
||||
_DEV_EN_REC: "async_start_recording",
|
||||
_DEV_DS_REC: "async_stop_recording",
|
||||
_DEV_SNAP: "async_snapshot",
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -57,10 +40,6 @@ async def async_setup_entry(
|
||||
|
||||
async_add_entities(cameras)
|
||||
|
||||
platform = async_get_current_platform()
|
||||
for service, method in CAMERA_SERVICES.items():
|
||||
platform.async_register_entity_service(service, None, method)
|
||||
|
||||
|
||||
class AgentCamera(MjpegCamera):
|
||||
"""Representation of an Agent Device Stream."""
|
||||
|
||||
38
homeassistant/components/agent_dvr/services.py
Normal file
38
homeassistant/components/agent_dvr/services.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Services for Agent DVR."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_DEV_EN_ALT = "enable_alerts"
|
||||
_DEV_DS_ALT = "disable_alerts"
|
||||
_DEV_EN_REC = "start_recording"
|
||||
_DEV_DS_REC = "stop_recording"
|
||||
_DEV_SNAP = "snapshot"
|
||||
|
||||
CAMERA_SERVICES = {
|
||||
_DEV_EN_ALT: "async_enable_alerts",
|
||||
_DEV_DS_ALT: "async_disable_alerts",
|
||||
_DEV_EN_REC: "async_start_recording",
|
||||
_DEV_DS_REC: "async_stop_recording",
|
||||
_DEV_SNAP: "async_snapshot",
|
||||
}
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Home Assistant services."""
|
||||
|
||||
for service_name, method in CAMERA_SERVICES.items():
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
service_name,
|
||||
entity_domain=CAMERA_DOMAIN,
|
||||
schema=None,
|
||||
func=method,
|
||||
)
|
||||
@@ -166,7 +166,7 @@
|
||||
},
|
||||
"services": {
|
||||
"alarm_arm_away": {
|
||||
"description": "Arms the alarm in the away mode.",
|
||||
"description": "Arms an alarm in the away mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -176,7 +176,7 @@
|
||||
"name": "Arm away"
|
||||
},
|
||||
"alarm_arm_custom_bypass": {
|
||||
"description": "Arms the alarm while allowing to bypass a custom area.",
|
||||
"description": "Arms an alarm while allowing to bypass a custom area.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "Code to arm the alarm.",
|
||||
@@ -186,7 +186,7 @@
|
||||
"name": "Arm with custom bypass"
|
||||
},
|
||||
"alarm_arm_home": {
|
||||
"description": "Arms the alarm in the home mode.",
|
||||
"description": "Arms an alarm in the home mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -196,7 +196,7 @@
|
||||
"name": "Arm home"
|
||||
},
|
||||
"alarm_arm_night": {
|
||||
"description": "Arms the alarm in the night mode.",
|
||||
"description": "Arms an alarm in the night mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -206,7 +206,7 @@
|
||||
"name": "Arm night"
|
||||
},
|
||||
"alarm_arm_vacation": {
|
||||
"description": "Arms the alarm in the vacation mode.",
|
||||
"description": "Arms an alarm in the vacation mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -216,7 +216,7 @@
|
||||
"name": "Arm vacation"
|
||||
},
|
||||
"alarm_disarm": {
|
||||
"description": "Disarms the alarm.",
|
||||
"description": "Disarms an alarm.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "Code to disarm the alarm.",
|
||||
@@ -226,7 +226,7 @@
|
||||
"name": "Disarm"
|
||||
},
|
||||
"alarm_trigger": {
|
||||
"description": "Triggers the alarm manually.",
|
||||
"description": "Triggers an alarm manually.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
|
||||
@@ -18,12 +18,15 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
CONF_DEVICE_BAUD,
|
||||
CONF_DEVICE_PATH,
|
||||
DOMAIN,
|
||||
PROTOCOL_SERIAL,
|
||||
PROTOCOL_SOCKET,
|
||||
SIGNAL_PANEL_MESSAGE,
|
||||
@@ -32,9 +35,11 @@ from .const import (
|
||||
SIGNAL_ZONE_FAULT,
|
||||
SIGNAL_ZONE_RESTORE,
|
||||
)
|
||||
from .services import async_setup_services
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
PLATFORMS = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.BINARY_SENSOR,
|
||||
@@ -54,6 +59,12 @@ class AlarmDecoderData:
|
||||
restart: bool
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the component."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: AlarmDecoderConfigEntry
|
||||
) -> bool:
|
||||
|
||||
@@ -2,17 +2,13 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelEntity,
|
||||
AlarmControlPanelEntityFeature,
|
||||
AlarmControlPanelState,
|
||||
CodeFormat,
|
||||
)
|
||||
from homeassistant.const import ATTR_CODE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
@@ -27,11 +23,6 @@ from .const import (
|
||||
)
|
||||
from .entity import AlarmDecoderEntity
|
||||
|
||||
SERVICE_ALARM_TOGGLE_CHIME = "alarm_toggle_chime"
|
||||
|
||||
SERVICE_ALARM_KEYPRESS = "alarm_keypress"
|
||||
ATTR_KEYPRESS = "keypress"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -50,23 +41,6 @@ async def async_setup_entry(
|
||||
)
|
||||
async_add_entities([entity])
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_ALARM_TOGGLE_CHIME,
|
||||
{
|
||||
vol.Required(ATTR_CODE): cv.string,
|
||||
},
|
||||
"alarm_toggle_chime",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_ALARM_KEYPRESS,
|
||||
{
|
||||
vol.Required(ATTR_KEYPRESS): cv.string,
|
||||
},
|
||||
"alarm_keypress",
|
||||
)
|
||||
|
||||
|
||||
class AlarmDecoderAlarmPanel(AlarmDecoderEntity, AlarmControlPanelEntity):
|
||||
"""Representation of an AlarmDecoder-based alarm panel."""
|
||||
|
||||
46
homeassistant/components/alarmdecoder/services.py
Normal file
46
homeassistant/components/alarmdecoder/services.py
Normal file
@@ -0,0 +1,46 @@
|
||||
"""Support for AlarmDecoder-based alarm control panels (Honeywell/DSC)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
DOMAIN as ALARM_CONTROL_PANEL_DOMAIN,
|
||||
)
|
||||
from homeassistant.const import ATTR_CODE
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
SERVICE_ALARM_TOGGLE_CHIME = "alarm_toggle_chime"
|
||||
|
||||
SERVICE_ALARM_KEYPRESS = "alarm_keypress"
|
||||
ATTR_KEYPRESS = "keypress"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Home Assistant services."""
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_ALARM_TOGGLE_CHIME,
|
||||
entity_domain=ALARM_CONTROL_PANEL_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_CODE): cv.string,
|
||||
},
|
||||
func="alarm_toggle_chime",
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_ALARM_KEYPRESS,
|
||||
entity_domain=ALARM_CONTROL_PANEL_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_KEYPRESS): cv.string,
|
||||
},
|
||||
func="alarm_keypress",
|
||||
)
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==11.0.2"]
|
||||
"requirements": ["aioamazondevices==11.1.1"]
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ from homeassistant.helpers.typing import StateType
|
||||
from .const import CATEGORY_NOTIFICATIONS, CATEGORY_SENSORS
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import async_remove_unsupported_notification_sensors
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -105,6 +106,9 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
# Remove notification sensors from unsupported devices
|
||||
await async_remove_unsupported_notification_sensors(hass, coordinator)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
@@ -122,6 +126,7 @@ async def async_setup_entry(
|
||||
AmazonSensorEntity(coordinator, serial_num, notification_desc)
|
||||
for notification_desc in NOTIFICATIONS
|
||||
for serial_num in new_devices
|
||||
if coordinator.data[serial_num].notifications_supported
|
||||
]
|
||||
async_add_entities(sensors_list + notifications_list)
|
||||
|
||||
|
||||
@@ -59,13 +59,15 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
# Replace unique id for "DND" switch and remove from Speaker Group
|
||||
await async_update_unique_id(
|
||||
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
|
||||
)
|
||||
# DND keys
|
||||
old_key = "do_not_disturb"
|
||||
new_key = "dnd"
|
||||
|
||||
# Remove DND switch from virtual groups
|
||||
await async_remove_dnd_from_virtual_group(hass, coordinator)
|
||||
# Remove old DND switch from virtual groups
|
||||
await async_remove_dnd_from_virtual_group(hass, coordinator, old_key)
|
||||
|
||||
# Replace unique id for DND switch
|
||||
await async_update_unique_id(hass, coordinator, SWITCH_DOMAIN, old_key, new_key)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
|
||||
@@ -5,8 +5,14 @@ from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.const.schedules import (
|
||||
NOTIFICATION_ALARM,
|
||||
NOTIFICATION_REMINDER,
|
||||
NOTIFICATION_TIMER,
|
||||
)
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -48,7 +54,7 @@ def alexa_api_call[_T: AmazonEntity, **_P](
|
||||
async def async_update_unique_id(
|
||||
hass: HomeAssistant,
|
||||
coordinator: AmazonDevicesCoordinator,
|
||||
domain: str,
|
||||
platform: str,
|
||||
old_key: str,
|
||||
new_key: str,
|
||||
) -> None:
|
||||
@@ -57,7 +63,9 @@ async def async_update_unique_id(
|
||||
|
||||
for serial_num in coordinator.data:
|
||||
unique_id = f"{serial_num}-{old_key}"
|
||||
if entity_id := entity_registry.async_get_entity_id(domain, DOMAIN, unique_id):
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
DOMAIN, platform, unique_id
|
||||
):
|
||||
_LOGGER.debug("Updating unique_id for %s", entity_id)
|
||||
new_unique_id = unique_id.replace(old_key, new_key)
|
||||
|
||||
@@ -68,12 +76,13 @@ async def async_update_unique_id(
|
||||
async def async_remove_dnd_from_virtual_group(
|
||||
hass: HomeAssistant,
|
||||
coordinator: AmazonDevicesCoordinator,
|
||||
key: str,
|
||||
) -> None:
|
||||
"""Remove entity DND from virtual group."""
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
for serial_num in coordinator.data:
|
||||
unique_id = f"{serial_num}-do_not_disturb"
|
||||
unique_id = f"{serial_num}-{key}"
|
||||
entity_id = entity_registry.async_get_entity_id(
|
||||
DOMAIN, SWITCH_DOMAIN, unique_id
|
||||
)
|
||||
@@ -81,3 +90,27 @@ async def async_remove_dnd_from_virtual_group(
|
||||
if entity_id and is_group:
|
||||
entity_registry.async_remove(entity_id)
|
||||
_LOGGER.debug("Removed DND switch from virtual group %s", entity_id)
|
||||
|
||||
|
||||
async def async_remove_unsupported_notification_sensors(
|
||||
hass: HomeAssistant,
|
||||
coordinator: AmazonDevicesCoordinator,
|
||||
) -> None:
|
||||
"""Remove notification sensors from unsupported devices."""
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
for serial_num in coordinator.data:
|
||||
for notification_key in (
|
||||
NOTIFICATION_ALARM,
|
||||
NOTIFICATION_REMINDER,
|
||||
NOTIFICATION_TIMER,
|
||||
):
|
||||
unique_id = f"{serial_num}-{notification_key}"
|
||||
entity_id = entity_registry.async_get_entity_id(
|
||||
DOMAIN, SENSOR_DOMAIN, unique_id=unique_id
|
||||
)
|
||||
is_unsupported = not coordinator.data[serial_num].notifications_supported
|
||||
|
||||
if entity_id and is_unsupported:
|
||||
entity_registry.async_remove(entity_id)
|
||||
_LOGGER.debug("Removed unsupported notification sensor %s", entity_id)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncIterator, Callable
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
from contextlib import asynccontextmanager, suppress
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
@@ -202,7 +202,7 @@ class AmcrestChecker(ApiWrapper):
|
||||
@asynccontextmanager
|
||||
async def async_stream_command(
|
||||
self, *args: Any, **kwargs: Any
|
||||
) -> AsyncIterator[httpx.Response]:
|
||||
) -> AsyncGenerator[httpx.Response]:
|
||||
"""amcrest.ApiWrapper.command wrapper to catch errors."""
|
||||
async with (
|
||||
self._async_command_wrapper(),
|
||||
@@ -211,7 +211,7 @@ class AmcrestChecker(ApiWrapper):
|
||||
yield ret
|
||||
|
||||
@asynccontextmanager
|
||||
async def _async_command_wrapper(self) -> AsyncIterator[None]:
|
||||
async def _async_command_wrapper(self) -> AsyncGenerator[None]:
|
||||
try:
|
||||
yield
|
||||
except LoginError as ex:
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components import labs, websocket_api
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -18,7 +18,13 @@ from .analytics import (
|
||||
EntityAnalyticsModifications,
|
||||
async_devices_payload,
|
||||
)
|
||||
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, PREFERENCE_SCHEMA
|
||||
from .const import (
|
||||
ATTR_ONBOARDED,
|
||||
ATTR_PREFERENCES,
|
||||
ATTR_SNAPSHOTS,
|
||||
DOMAIN,
|
||||
PREFERENCE_SCHEMA,
|
||||
)
|
||||
from .http import AnalyticsDevicesView
|
||||
|
||||
__all__ = [
|
||||
@@ -44,29 +50,55 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
|
||||
DATA_COMPONENT: HassKey[Analytics] = HassKey(DOMAIN)
|
||||
|
||||
LABS_SNAPSHOT_FEATURE = "snapshots"
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the analytics integration."""
|
||||
analytics_config = config.get(DOMAIN, {})
|
||||
|
||||
# For now we want to enable device analytics only if the url option
|
||||
# is explicitly listed in YAML.
|
||||
if CONF_SNAPSHOTS_URL in analytics_config:
|
||||
disable_snapshots = False
|
||||
await labs.async_update_preview_feature(
|
||||
hass, DOMAIN, LABS_SNAPSHOT_FEATURE, enabled=True
|
||||
)
|
||||
snapshots_url = analytics_config[CONF_SNAPSHOTS_URL]
|
||||
else:
|
||||
disable_snapshots = True
|
||||
snapshots_url = None
|
||||
|
||||
analytics = Analytics(hass, snapshots_url, disable_snapshots)
|
||||
analytics = Analytics(hass, snapshots_url)
|
||||
|
||||
# Load stored data
|
||||
await analytics.load()
|
||||
|
||||
started = False
|
||||
|
||||
async def _async_handle_labs_update(
|
||||
event: Event[labs.EventLabsUpdatedData],
|
||||
) -> None:
|
||||
"""Handle labs feature toggle."""
|
||||
await analytics.save_preferences({ATTR_SNAPSHOTS: event.data["enabled"]})
|
||||
if started:
|
||||
await analytics.async_schedule()
|
||||
|
||||
@callback
|
||||
def _async_labs_event_filter(event_data: labs.EventLabsUpdatedData) -> bool:
|
||||
"""Filter labs events for this integration's snapshot feature."""
|
||||
return (
|
||||
event_data["domain"] == DOMAIN
|
||||
and event_data["preview_feature"] == LABS_SNAPSHOT_FEATURE
|
||||
)
|
||||
|
||||
async def start_schedule(_event: Event) -> None:
|
||||
"""Start the send schedule after the started event."""
|
||||
nonlocal started
|
||||
started = True
|
||||
await analytics.async_schedule()
|
||||
|
||||
hass.bus.async_listen(
|
||||
labs.EVENT_LABS_UPDATED,
|
||||
_async_handle_labs_update,
|
||||
event_filter=_async_labs_event_filter,
|
||||
)
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, start_schedule)
|
||||
|
||||
websocket_api.async_register_command(hass, websocket_analytics)
|
||||
|
||||
@@ -22,6 +22,7 @@ from homeassistant.components.energy import (
|
||||
DOMAIN as ENERGY_DOMAIN,
|
||||
is_configured as energy_is_configured,
|
||||
)
|
||||
from homeassistant.components.labs import async_is_preview_feature_enabled
|
||||
from homeassistant.components.recorder import (
|
||||
DOMAIN as RECORDER_DOMAIN,
|
||||
get_instance as get_recorder_instance,
|
||||
@@ -241,12 +242,10 @@ class Analytics:
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
snapshots_url: str | None = None,
|
||||
disable_snapshots: bool = False,
|
||||
) -> None:
|
||||
"""Initialize the Analytics class."""
|
||||
self._hass: HomeAssistant = hass
|
||||
self._snapshots_url = snapshots_url
|
||||
self._disable_snapshots = disable_snapshots
|
||||
|
||||
self._session = async_get_clientsession(hass)
|
||||
self._data = AnalyticsData(False, {})
|
||||
@@ -258,15 +257,13 @@ class Analytics:
|
||||
def preferences(self) -> dict:
|
||||
"""Return the current active preferences."""
|
||||
preferences = self._data.preferences
|
||||
result = {
|
||||
return {
|
||||
ATTR_BASE: preferences.get(ATTR_BASE, False),
|
||||
ATTR_DIAGNOSTICS: preferences.get(ATTR_DIAGNOSTICS, False),
|
||||
ATTR_USAGE: preferences.get(ATTR_USAGE, False),
|
||||
ATTR_STATISTICS: preferences.get(ATTR_STATISTICS, False),
|
||||
ATTR_SNAPSHOTS: preferences.get(ATTR_SNAPSHOTS, False),
|
||||
}
|
||||
if not self._disable_snapshots:
|
||||
result[ATTR_SNAPSHOTS] = preferences.get(ATTR_SNAPSHOTS, False)
|
||||
return result
|
||||
|
||||
@property
|
||||
def onboarded(self) -> bool:
|
||||
@@ -291,6 +288,11 @@ class Analytics:
|
||||
"""Return bool if a supervisor is present."""
|
||||
return is_hassio(self._hass)
|
||||
|
||||
@property
|
||||
def _snapshots_enabled(self) -> bool:
|
||||
"""Check if snapshots feature is enabled via labs."""
|
||||
return async_is_preview_feature_enabled(self._hass, DOMAIN, "snapshots")
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Load preferences."""
|
||||
stored = await self._store.async_load()
|
||||
@@ -645,7 +647,10 @@ class Analytics:
|
||||
),
|
||||
)
|
||||
|
||||
if not self.preferences.get(ATTR_SNAPSHOTS, False) or self._disable_snapshots:
|
||||
if (
|
||||
not self.preferences.get(ATTR_SNAPSHOTS, False)
|
||||
or not self._snapshots_enabled
|
||||
):
|
||||
LOGGER.debug("Snapshot analytics not scheduled")
|
||||
if self._snapshot_scheduled:
|
||||
self._snapshot_scheduled()
|
||||
|
||||
@@ -7,5 +7,12 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/analytics",
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"preview_features": {
|
||||
"snapshots": {
|
||||
"feedback_url": "https://forms.gle/GqvRmgmghSDco8M46",
|
||||
"learn_more_url": "https://www.home-assistant.io/blog/2026/02/02/about-device-database/",
|
||||
"report_issue_url": "https://github.com/OHF-Device-Database/device-database/issues/new"
|
||||
}
|
||||
},
|
||||
"quality_scale": "internal"
|
||||
}
|
||||
|
||||
10
homeassistant/components/analytics/strings.json
Normal file
10
homeassistant/components/analytics/strings.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"preview_features": {
|
||||
"snapshots": {
|
||||
"description": "We're creating the [Open Home Foundation Device Database](https://www.home-assistant.io/blog/2026/02/02/about-device-database/): a free, open source community-powered resource to help users find practical information about how smart home devices perform in real installations.\n\nYou can help us build it by opting in to share anonymized data about your devices. This data will only ever include device-specific details (like model or manufacturer) – never personally identifying information (like the names you assign).\n\nFind out how we process your data (should you choose to contribute) in our [Data Use Statement](https://www.openhomefoundation.org/device-database-data-use-statement).",
|
||||
"disable_confirmation": "Your data will no longer be shared with the Open Home Foundation's device database.",
|
||||
"enable_confirmation": "This feature is still in development and may change. The device database is being refined based on user feedback and is not yet complete.",
|
||||
"name": "Device database"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -13,9 +13,10 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_TRACKED_INTEGRATIONS
|
||||
from .const import CONF_TRACKED_APPS, CONF_TRACKED_INTEGRATIONS
|
||||
from .coordinator import HomeassistantAnalyticsDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
@@ -59,6 +60,30 @@ async def async_setup_entry(
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, entry: AnalyticsInsightsConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate to a new version."""
|
||||
# Migration for switching add-ons to apps
|
||||
if entry.version < 2:
|
||||
ent_reg = er.async_get(hass)
|
||||
for entity_entry in er.async_entries_for_config_entry(ent_reg, entry.entry_id):
|
||||
if not entity_entry.unique_id.startswith("addon_"):
|
||||
continue
|
||||
|
||||
ent_reg.async_update_entity(
|
||||
entity_entry.entity_id,
|
||||
new_unique_id=entity_entry.unique_id.replace("addon_", "app_"),
|
||||
)
|
||||
|
||||
options = dict(entry.options)
|
||||
options[CONF_TRACKED_APPS] = options.pop("tracked_addons", [])
|
||||
|
||||
hass.config_entries.async_update_entry(entry, version=2, options=options)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: AnalyticsInsightsConfigEntry
|
||||
) -> bool:
|
||||
|
||||
@@ -26,7 +26,7 @@ from homeassistant.helpers.selector import (
|
||||
|
||||
from . import AnalyticsInsightsConfigEntry
|
||||
from .const import (
|
||||
CONF_TRACKED_ADDONS,
|
||||
CONF_TRACKED_APPS,
|
||||
CONF_TRACKED_CUSTOM_INTEGRATIONS,
|
||||
CONF_TRACKED_INTEGRATIONS,
|
||||
DOMAIN,
|
||||
@@ -43,6 +43,8 @@ INTEGRATION_TYPES_WITHOUT_ANALYTICS = (
|
||||
class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Homeassistant Analytics."""
|
||||
|
||||
VERSION = 2
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
@@ -59,7 +61,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
if all(
|
||||
[
|
||||
not user_input.get(CONF_TRACKED_ADDONS),
|
||||
not user_input.get(CONF_TRACKED_APPS),
|
||||
not user_input.get(CONF_TRACKED_INTEGRATIONS),
|
||||
not user_input.get(CONF_TRACKED_CUSTOM_INTEGRATIONS),
|
||||
]
|
||||
@@ -70,7 +72,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title="Home Assistant Analytics Insights",
|
||||
data={},
|
||||
options={
|
||||
CONF_TRACKED_ADDONS: user_input.get(CONF_TRACKED_ADDONS, []),
|
||||
CONF_TRACKED_APPS: user_input.get(CONF_TRACKED_APPS, []),
|
||||
CONF_TRACKED_INTEGRATIONS: user_input.get(
|
||||
CONF_TRACKED_INTEGRATIONS, []
|
||||
),
|
||||
@@ -84,7 +86,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
session=async_get_clientsession(self.hass)
|
||||
)
|
||||
try:
|
||||
addons = await client.get_addons()
|
||||
apps = await client.get_addons()
|
||||
integrations = await client.get_integrations(Environment.NEXT)
|
||||
custom_integrations = await client.get_custom_integrations()
|
||||
except HomeassistantAnalyticsConnectionError:
|
||||
@@ -107,9 +109,9 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_TRACKED_ADDONS): SelectSelector(
|
||||
vol.Optional(CONF_TRACKED_APPS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=list(addons),
|
||||
options=list(apps),
|
||||
multiple=True,
|
||||
sort=True,
|
||||
)
|
||||
@@ -144,7 +146,7 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithReload):
|
||||
if user_input is not None:
|
||||
if all(
|
||||
[
|
||||
not user_input.get(CONF_TRACKED_ADDONS),
|
||||
not user_input.get(CONF_TRACKED_APPS),
|
||||
not user_input.get(CONF_TRACKED_INTEGRATIONS),
|
||||
not user_input.get(CONF_TRACKED_CUSTOM_INTEGRATIONS),
|
||||
]
|
||||
@@ -154,7 +156,7 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithReload):
|
||||
return self.async_create_entry(
|
||||
title="",
|
||||
data={
|
||||
CONF_TRACKED_ADDONS: user_input.get(CONF_TRACKED_ADDONS, []),
|
||||
CONF_TRACKED_APPS: user_input.get(CONF_TRACKED_APPS, []),
|
||||
CONF_TRACKED_INTEGRATIONS: user_input.get(
|
||||
CONF_TRACKED_INTEGRATIONS, []
|
||||
),
|
||||
@@ -168,7 +170,7 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithReload):
|
||||
session=async_get_clientsession(self.hass)
|
||||
)
|
||||
try:
|
||||
addons = await client.get_addons()
|
||||
apps = await client.get_addons()
|
||||
integrations = await client.get_integrations(Environment.NEXT)
|
||||
custom_integrations = await client.get_custom_integrations()
|
||||
except HomeassistantAnalyticsConnectionError:
|
||||
@@ -189,9 +191,9 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithReload):
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_TRACKED_ADDONS): SelectSelector(
|
||||
vol.Optional(CONF_TRACKED_APPS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=list(addons),
|
||||
options=list(apps),
|
||||
multiple=True,
|
||||
sort=True,
|
||||
)
|
||||
|
||||
@@ -4,7 +4,7 @@ import logging
|
||||
|
||||
DOMAIN = "analytics_insights"
|
||||
|
||||
CONF_TRACKED_ADDONS = "tracked_addons"
|
||||
CONF_TRACKED_APPS = "tracked_apps"
|
||||
CONF_TRACKED_INTEGRATIONS = "tracked_integrations"
|
||||
CONF_TRACKED_CUSTOM_INTEGRATIONS = "tracked_custom_integrations"
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import (
|
||||
CONF_TRACKED_ADDONS,
|
||||
CONF_TRACKED_APPS,
|
||||
CONF_TRACKED_CUSTOM_INTEGRATIONS,
|
||||
CONF_TRACKED_INTEGRATIONS,
|
||||
DOMAIN,
|
||||
@@ -35,7 +35,7 @@ class AnalyticsData:
|
||||
|
||||
active_installations: int
|
||||
reports_integrations: int
|
||||
addons: dict[str, int]
|
||||
apps: dict[str, int]
|
||||
core_integrations: dict[str, int]
|
||||
custom_integrations: dict[str, int]
|
||||
|
||||
@@ -60,7 +60,7 @@ class HomeassistantAnalyticsDataUpdateCoordinator(DataUpdateCoordinator[Analytic
|
||||
update_interval=timedelta(hours=12),
|
||||
)
|
||||
self._client = client
|
||||
self._tracked_addons = self.config_entry.options.get(CONF_TRACKED_ADDONS, [])
|
||||
self._tracked_apps = self.config_entry.options.get(CONF_TRACKED_APPS, [])
|
||||
self._tracked_integrations = self.config_entry.options[
|
||||
CONF_TRACKED_INTEGRATIONS
|
||||
]
|
||||
@@ -70,7 +70,9 @@ class HomeassistantAnalyticsDataUpdateCoordinator(DataUpdateCoordinator[Analytic
|
||||
|
||||
async def _async_update_data(self) -> AnalyticsData:
|
||||
try:
|
||||
addons_data = await self._client.get_addons()
|
||||
apps_data = (
|
||||
await self._client.get_addons()
|
||||
) # Still add method name. Needs library update
|
||||
data = await self._client.get_current_analytics()
|
||||
custom_data = await self._client.get_custom_integrations()
|
||||
except HomeassistantAnalyticsConnectionError as err:
|
||||
@@ -79,9 +81,7 @@ class HomeassistantAnalyticsDataUpdateCoordinator(DataUpdateCoordinator[Analytic
|
||||
) from err
|
||||
except HomeassistantAnalyticsNotModifiedError:
|
||||
return self.data
|
||||
addons = {
|
||||
addon: get_addon_value(addons_data, addon) for addon in self._tracked_addons
|
||||
}
|
||||
apps = {app: get_app_value(apps_data, app) for app in self._tracked_apps}
|
||||
core_integrations = {
|
||||
integration: data.integrations.get(integration, 0)
|
||||
for integration in self._tracked_integrations
|
||||
@@ -93,14 +93,14 @@ class HomeassistantAnalyticsDataUpdateCoordinator(DataUpdateCoordinator[Analytic
|
||||
return AnalyticsData(
|
||||
data.active_installations,
|
||||
data.reports_integrations,
|
||||
addons,
|
||||
apps,
|
||||
core_integrations,
|
||||
custom_integrations,
|
||||
)
|
||||
|
||||
|
||||
def get_addon_value(data: dict[str, Addon], name_slug: str) -> int:
|
||||
"""Get addon value."""
|
||||
def get_app_value(data: dict[str, Addon], name_slug: str) -> int:
|
||||
"""Get app value."""
|
||||
if name_slug in data:
|
||||
return data[name_slug].total
|
||||
return 0
|
||||
|
||||
@@ -29,17 +29,17 @@ class AnalyticsSensorEntityDescription(SensorEntityDescription):
|
||||
value_fn: Callable[[AnalyticsData], StateType]
|
||||
|
||||
|
||||
def get_addon_entity_description(
|
||||
def get_app_entity_description(
|
||||
name_slug: str,
|
||||
) -> AnalyticsSensorEntityDescription:
|
||||
"""Get addon entity description."""
|
||||
"""Get app entity description."""
|
||||
return AnalyticsSensorEntityDescription(
|
||||
key=f"addon_{name_slug}_active_installations",
|
||||
translation_key="addons",
|
||||
key=f"app_{name_slug}_active_installations",
|
||||
translation_key="apps",
|
||||
name=name_slug,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
native_unit_of_measurement="active installations",
|
||||
value_fn=lambda data: data.addons.get(name_slug),
|
||||
value_fn=lambda data: data.apps.get(name_slug),
|
||||
)
|
||||
|
||||
|
||||
@@ -106,9 +106,9 @@ async def async_setup_entry(
|
||||
entities.extend(
|
||||
HomeassistantAnalyticsSensor(
|
||||
coordinator,
|
||||
get_addon_entity_description(addon_name_slug),
|
||||
get_app_entity_description(app_name_slug),
|
||||
)
|
||||
for addon_name_slug in coordinator.data.addons
|
||||
for app_name_slug in coordinator.data.apps
|
||||
)
|
||||
entities.extend(
|
||||
HomeassistantAnalyticsSensor(
|
||||
|
||||
@@ -10,12 +10,12 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"tracked_addons": "Add-ons",
|
||||
"tracked_apps": "Apps",
|
||||
"tracked_custom_integrations": "Custom integrations",
|
||||
"tracked_integrations": "Integrations"
|
||||
},
|
||||
"data_description": {
|
||||
"tracked_addons": "Select the add-ons you want to track",
|
||||
"tracked_apps": "Select the apps you want to track",
|
||||
"tracked_custom_integrations": "Select the custom integrations you want to track",
|
||||
"tracked_integrations": "Select the integrations you want to track"
|
||||
}
|
||||
@@ -45,12 +45,12 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"tracked_addons": "[%key:component::analytics_insights::config::step::user::data::tracked_addons%]",
|
||||
"tracked_apps": "[%key:component::analytics_insights::config::step::user::data::tracked_apps%]",
|
||||
"tracked_custom_integrations": "[%key:component::analytics_insights::config::step::user::data::tracked_custom_integrations%]",
|
||||
"tracked_integrations": "[%key:component::analytics_insights::config::step::user::data::tracked_integrations%]"
|
||||
},
|
||||
"data_description": {
|
||||
"tracked_addons": "[%key:component::analytics_insights::config::step::user::data_description::tracked_addons%]",
|
||||
"tracked_apps": "[%key:component::analytics_insights::config::step::user::data_description::tracked_apps%]",
|
||||
"tracked_custom_integrations": "[%key:component::analytics_insights::config::step::user::data_description::tracked_custom_integrations%]",
|
||||
"tracked_integrations": "[%key:component::analytics_insights::config::step::user::data_description::tracked_integrations%]"
|
||||
}
|
||||
|
||||
@@ -14,10 +14,18 @@ from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DEFAULT_CONVERSATION_NAME, DOMAIN, LOGGER
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
DATA_REPAIR_DEFER_RELOAD,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DEPRECATED_MODELS,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
)
|
||||
|
||||
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@@ -27,6 +35,7 @@ type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Anthropic."""
|
||||
hass.data.setdefault(DOMAIN, {}).setdefault(DATA_REPAIR_DEFER_RELOAD, set())
|
||||
await async_migrate_integration(hass)
|
||||
return True
|
||||
|
||||
@@ -50,6 +59,22 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(async_update_options))
|
||||
|
||||
for subentry in entry.subentries.values():
|
||||
if (model := subentry.data.get(CONF_CHAT_MODEL)) and model.startswith(
|
||||
tuple(DEPRECATED_MODELS)
|
||||
):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"model_deprecated",
|
||||
is_fixable=True,
|
||||
is_persistent=False,
|
||||
learn_more_url="https://platform.claude.com/docs/en/about-claude/model-deprecations",
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="model_deprecated",
|
||||
)
|
||||
break
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -62,6 +87,11 @@ async def async_update_options(
|
||||
hass: HomeAssistant, entry: AnthropicConfigEntry
|
||||
) -> None:
|
||||
"""Update options."""
|
||||
defer_reload_entries: set[str] = hass.data.setdefault(DOMAIN, {}).setdefault(
|
||||
DATA_REPAIR_DEFER_RELOAD, set()
|
||||
)
|
||||
if entry.entry_id in defer_reload_entries:
|
||||
return
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
|
||||
@@ -92,6 +92,40 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
await client.models.list(timeout=10.0)
|
||||
|
||||
|
||||
async def get_model_list(client: anthropic.AsyncAnthropic) -> list[SelectOptionDict]:
|
||||
"""Get list of available models."""
|
||||
try:
|
||||
models = (await client.models.list()).data
|
||||
except anthropic.AnthropicError:
|
||||
models = []
|
||||
_LOGGER.debug("Available models: %s", models)
|
||||
model_options: list[SelectOptionDict] = []
|
||||
short_form = re.compile(r"[^\d]-\d$")
|
||||
for model_info in models:
|
||||
# Resolve alias from versioned model name:
|
||||
model_alias = (
|
||||
model_info.id[:-9]
|
||||
if model_info.id
|
||||
not in (
|
||||
"claude-3-haiku-20240307",
|
||||
"claude-3-5-haiku-20241022",
|
||||
"claude-3-opus-20240229",
|
||||
)
|
||||
else model_info.id
|
||||
)
|
||||
if short_form.search(model_alias):
|
||||
model_alias += "-0"
|
||||
if model_alias.endswith(("haiku", "opus", "sonnet")):
|
||||
model_alias += "-latest"
|
||||
model_options.append(
|
||||
SelectOptionDict(
|
||||
label=model_info.display_name,
|
||||
value=model_alias,
|
||||
)
|
||||
)
|
||||
return model_options
|
||||
|
||||
|
||||
class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Anthropic."""
|
||||
|
||||
@@ -401,38 +435,13 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
|
||||
async def _get_model_list(self) -> list[SelectOptionDict]:
|
||||
"""Get list of available models."""
|
||||
try:
|
||||
client = await self.hass.async_add_executor_job(
|
||||
partial(
|
||||
anthropic.AsyncAnthropic,
|
||||
api_key=self._get_entry().data[CONF_API_KEY],
|
||||
)
|
||||
client = await self.hass.async_add_executor_job(
|
||||
partial(
|
||||
anthropic.AsyncAnthropic,
|
||||
api_key=self._get_entry().data[CONF_API_KEY],
|
||||
)
|
||||
models = (await client.models.list()).data
|
||||
except anthropic.AnthropicError:
|
||||
models = []
|
||||
_LOGGER.debug("Available models: %s", models)
|
||||
model_options: list[SelectOptionDict] = []
|
||||
short_form = re.compile(r"[^\d]-\d$")
|
||||
for model_info in models:
|
||||
# Resolve alias from versioned model name:
|
||||
model_alias = (
|
||||
model_info.id[:-9]
|
||||
if model_info.id
|
||||
not in ("claude-3-haiku-20240307", "claude-3-opus-20240229")
|
||||
else model_info.id
|
||||
)
|
||||
if short_form.search(model_alias):
|
||||
model_alias += "-0"
|
||||
if model_alias.endswith(("haiku", "opus", "sonnet")):
|
||||
model_alias += "-latest"
|
||||
model_options.append(
|
||||
SelectOptionDict(
|
||||
label=model_info.display_name,
|
||||
value=model_alias,
|
||||
)
|
||||
)
|
||||
return model_options
|
||||
)
|
||||
return await get_model_list(client)
|
||||
|
||||
async def _get_location_data(self) -> dict[str, str]:
|
||||
"""Get approximate location data of the user."""
|
||||
|
||||
@@ -22,8 +22,10 @@ CONF_WEB_SEARCH_REGION = "region"
|
||||
CONF_WEB_SEARCH_COUNTRY = "country"
|
||||
CONF_WEB_SEARCH_TIMEZONE = "timezone"
|
||||
|
||||
DATA_REPAIR_DEFER_RELOAD = "repair_defer_reload"
|
||||
|
||||
DEFAULT = {
|
||||
CONF_CHAT_MODEL: "claude-3-5-haiku-latest",
|
||||
CONF_CHAT_MODEL: "claude-haiku-4-5",
|
||||
CONF_MAX_TOKENS: 3000,
|
||||
CONF_TEMPERATURE: 1.0,
|
||||
CONF_THINKING_BUDGET: 0,
|
||||
@@ -46,3 +48,10 @@ WEB_SEARCH_UNSUPPORTED_MODELS = [
|
||||
"claude-3-5-sonnet-20240620",
|
||||
"claude-3-5-sonnet-20241022",
|
||||
]
|
||||
|
||||
DEPRECATED_MODELS = [
|
||||
"claude-3-5-haiku",
|
||||
"claude-3-7-sonnet",
|
||||
"claude-3-5-sonnet",
|
||||
"claude-3-opus",
|
||||
]
|
||||
|
||||
@@ -600,6 +600,16 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
system = chat_log.content[0]
|
||||
if not isinstance(system, conversation.SystemContent):
|
||||
raise TypeError("First message must be a system message")
|
||||
|
||||
# System prompt with caching enabled
|
||||
system_prompt: list[TextBlockParam] = [
|
||||
TextBlockParam(
|
||||
type="text",
|
||||
text=system.content,
|
||||
cache_control={"type": "ephemeral"},
|
||||
)
|
||||
]
|
||||
|
||||
messages = _convert_content(chat_log.content[1:])
|
||||
|
||||
model = options.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL])
|
||||
@@ -608,7 +618,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
model=model,
|
||||
messages=messages,
|
||||
max_tokens=options.get(CONF_MAX_TOKENS, DEFAULT[CONF_MAX_TOKENS]),
|
||||
system=system.content,
|
||||
system=system_prompt,
|
||||
stream=True,
|
||||
)
|
||||
|
||||
@@ -695,10 +705,6 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
type="auto",
|
||||
)
|
||||
|
||||
if isinstance(model_args["system"], str):
|
||||
model_args["system"] = [
|
||||
TextBlockParam(type="text", text=model_args["system"])
|
||||
]
|
||||
model_args["system"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(
|
||||
type="text",
|
||||
|
||||
275
homeassistant/components/anthropic/repairs.py
Normal file
275
homeassistant/components/anthropic/repairs.py
Normal file
@@ -0,0 +1,275 @@
|
||||
"""Issue repair flow for Anthropic."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterator
|
||||
from typing import cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.repairs import RepairsFlow
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigSubentry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
||||
|
||||
from .config_flow import get_model_list
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
DATA_REPAIR_DEFER_RELOAD,
|
||||
DEFAULT,
|
||||
DEPRECATED_MODELS,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
|
||||
class ModelDeprecatedRepairFlow(RepairsFlow):
|
||||
"""Handler for an issue fixing flow."""
|
||||
|
||||
_subentry_iter: Iterator[tuple[str, str]] | None
|
||||
_current_entry_id: str | None
|
||||
_current_subentry_id: str | None
|
||||
_reload_pending: set[str]
|
||||
_pending_updates: dict[str, dict[str, str]]
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the flow."""
|
||||
super().__init__()
|
||||
self._subentry_iter = None
|
||||
self._current_entry_id = None
|
||||
self._current_subentry_id = None
|
||||
self._reload_pending = set()
|
||||
self._pending_updates = {}
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the first step of a fix flow."""
|
||||
previous_entry_id: str | None = None
|
||||
if user_input is not None:
|
||||
previous_entry_id = self._async_update_current_subentry(user_input)
|
||||
self._clear_current_target()
|
||||
|
||||
target = await self._async_next_target()
|
||||
next_entry_id = target[0].entry_id if target else None
|
||||
if previous_entry_id and previous_entry_id != next_entry_id:
|
||||
await self._async_apply_pending_updates(previous_entry_id)
|
||||
if target is None:
|
||||
await self._async_apply_all_pending_updates()
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
entry, subentry, model = target
|
||||
client = entry.runtime_data
|
||||
model_list = [
|
||||
model_option
|
||||
for model_option in await get_model_list(client)
|
||||
if not model_option["value"].startswith(tuple(DEPRECATED_MODELS))
|
||||
]
|
||||
|
||||
if "opus" in model:
|
||||
suggested_model = "claude-opus-4-5"
|
||||
elif "haiku" in model:
|
||||
suggested_model = "claude-haiku-4-5"
|
||||
elif "sonnet" in model:
|
||||
suggested_model = "claude-sonnet-4-5"
|
||||
else:
|
||||
suggested_model = cast(str, DEFAULT[CONF_CHAT_MODEL])
|
||||
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_CHAT_MODEL,
|
||||
default=suggested_model,
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(options=model_list, custom_value=True)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=schema,
|
||||
description_placeholders={
|
||||
"entry_name": entry.title,
|
||||
"model": model,
|
||||
"subentry_name": subentry.title,
|
||||
"subentry_type": self._format_subentry_type(subentry.subentry_type),
|
||||
},
|
||||
)
|
||||
|
||||
def _iter_deprecated_subentries(self) -> Iterator[tuple[str, str]]:
|
||||
"""Yield entry/subentry pairs that use deprecated models."""
|
||||
for entry in self.hass.config_entries.async_entries(DOMAIN):
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
continue
|
||||
for subentry in entry.subentries.values():
|
||||
model = subentry.data.get(CONF_CHAT_MODEL)
|
||||
if model and model.startswith(tuple(DEPRECATED_MODELS)):
|
||||
yield entry.entry_id, subentry.subentry_id
|
||||
|
||||
async def _async_next_target(
|
||||
self,
|
||||
) -> tuple[ConfigEntry, ConfigSubentry, str] | None:
|
||||
"""Return the next deprecated subentry target."""
|
||||
if self._subentry_iter is None:
|
||||
self._subentry_iter = self._iter_deprecated_subentries()
|
||||
|
||||
while True:
|
||||
try:
|
||||
entry_id, subentry_id = next(self._subentry_iter)
|
||||
except StopIteration:
|
||||
return None
|
||||
|
||||
entry = self.hass.config_entries.async_get_entry(entry_id)
|
||||
if entry is None:
|
||||
continue
|
||||
|
||||
subentry = entry.subentries.get(subentry_id)
|
||||
if subentry is None:
|
||||
continue
|
||||
|
||||
model = self._pending_model(entry_id, subentry_id)
|
||||
if model is None:
|
||||
model = subentry.data.get(CONF_CHAT_MODEL)
|
||||
if not model or not model.startswith(tuple(DEPRECATED_MODELS)):
|
||||
continue
|
||||
|
||||
self._current_entry_id = entry_id
|
||||
self._current_subentry_id = subentry_id
|
||||
return entry, subentry, model
|
||||
|
||||
def _async_update_current_subentry(self, user_input: dict[str, str]) -> str | None:
|
||||
"""Update the currently selected subentry."""
|
||||
if not self._current_entry_id or not self._current_subentry_id:
|
||||
return None
|
||||
|
||||
entry = self.hass.config_entries.async_get_entry(self._current_entry_id)
|
||||
if entry is None:
|
||||
return None
|
||||
|
||||
subentry = entry.subentries.get(self._current_subentry_id)
|
||||
if subentry is None:
|
||||
return None
|
||||
|
||||
updated_data = {
|
||||
**subentry.data,
|
||||
CONF_CHAT_MODEL: user_input[CONF_CHAT_MODEL],
|
||||
}
|
||||
if updated_data == subentry.data:
|
||||
return entry.entry_id
|
||||
self._queue_pending_update(
|
||||
entry.entry_id,
|
||||
subentry.subentry_id,
|
||||
updated_data[CONF_CHAT_MODEL],
|
||||
)
|
||||
return entry.entry_id
|
||||
|
||||
def _clear_current_target(self) -> None:
|
||||
"""Clear current target tracking."""
|
||||
self._current_entry_id = None
|
||||
self._current_subentry_id = None
|
||||
|
||||
def _format_subentry_type(self, subentry_type: str) -> str:
|
||||
"""Return a user-friendly subentry type label."""
|
||||
if subentry_type == "conversation":
|
||||
return "Conversation agent"
|
||||
if subentry_type in ("ai_task", "ai_task_data"):
|
||||
return "AI task"
|
||||
return subentry_type
|
||||
|
||||
def _queue_pending_update(
|
||||
self, entry_id: str, subentry_id: str, model: str
|
||||
) -> None:
|
||||
"""Store a pending model update for a subentry."""
|
||||
self._pending_updates.setdefault(entry_id, {})[subentry_id] = model
|
||||
|
||||
def _pending_model(self, entry_id: str, subentry_id: str) -> str | None:
|
||||
"""Return a pending model update if one exists."""
|
||||
return self._pending_updates.get(entry_id, {}).get(subentry_id)
|
||||
|
||||
def _mark_entry_for_reload(self, entry_id: str) -> None:
|
||||
"""Prevent reload until repairs are complete for the entry."""
|
||||
self._reload_pending.add(entry_id)
|
||||
defer_reload_entries: set[str] = self.hass.data.setdefault(
|
||||
DOMAIN, {}
|
||||
).setdefault(DATA_REPAIR_DEFER_RELOAD, set())
|
||||
defer_reload_entries.add(entry_id)
|
||||
|
||||
async def _async_reload_entry(self, entry_id: str) -> None:
|
||||
"""Reload an entry once all repairs are completed."""
|
||||
if entry_id not in self._reload_pending:
|
||||
return
|
||||
|
||||
entry = self.hass.config_entries.async_get_entry(entry_id)
|
||||
if entry is not None and entry.state is not ConfigEntryState.LOADED:
|
||||
self._clear_defer_reload(entry_id)
|
||||
self._reload_pending.discard(entry_id)
|
||||
return
|
||||
|
||||
if entry is not None:
|
||||
await self.hass.config_entries.async_reload(entry_id)
|
||||
|
||||
self._clear_defer_reload(entry_id)
|
||||
self._reload_pending.discard(entry_id)
|
||||
|
||||
def _clear_defer_reload(self, entry_id: str) -> None:
|
||||
"""Remove entry from the deferred reload set."""
|
||||
defer_reload_entries: set[str] = self.hass.data.setdefault(
|
||||
DOMAIN, {}
|
||||
).setdefault(DATA_REPAIR_DEFER_RELOAD, set())
|
||||
defer_reload_entries.discard(entry_id)
|
||||
|
||||
async def _async_apply_pending_updates(self, entry_id: str) -> None:
|
||||
"""Apply pending subentry updates for a single entry."""
|
||||
updates = self._pending_updates.pop(entry_id, None)
|
||||
if not updates:
|
||||
return
|
||||
|
||||
entry = self.hass.config_entries.async_get_entry(entry_id)
|
||||
if entry is None or entry.state is not ConfigEntryState.LOADED:
|
||||
return
|
||||
|
||||
changed = False
|
||||
for subentry_id, model in updates.items():
|
||||
subentry = entry.subentries.get(subentry_id)
|
||||
if subentry is None:
|
||||
continue
|
||||
|
||||
updated_data = {
|
||||
**subentry.data,
|
||||
CONF_CHAT_MODEL: model,
|
||||
}
|
||||
if updated_data == subentry.data:
|
||||
continue
|
||||
|
||||
if not changed:
|
||||
self._mark_entry_for_reload(entry_id)
|
||||
changed = True
|
||||
|
||||
self.hass.config_entries.async_update_subentry(
|
||||
entry,
|
||||
subentry,
|
||||
data=updated_data,
|
||||
)
|
||||
|
||||
if not changed:
|
||||
return
|
||||
|
||||
await self._async_reload_entry(entry_id)
|
||||
|
||||
async def _async_apply_all_pending_updates(self) -> None:
|
||||
"""Apply all pending updates across entries."""
|
||||
for entry_id in list(self._pending_updates):
|
||||
await self._async_apply_pending_updates(entry_id)
|
||||
|
||||
|
||||
async def async_create_fix_flow(
|
||||
hass: HomeAssistant,
|
||||
issue_id: str,
|
||||
data: dict[str, str | int | float | None] | None,
|
||||
) -> RepairsFlow:
|
||||
"""Create flow."""
|
||||
if issue_id == "model_deprecated":
|
||||
return ModelDeprecatedRepairFlow()
|
||||
raise HomeAssistantError("Unknown issue ID")
|
||||
@@ -109,5 +109,21 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"model_deprecated": {
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"chat_model": "[%key:common::generic::model%]"
|
||||
},
|
||||
"description": "You are updating {subentry_name} ({subentry_type}) in {entry_name}. The current model {model} is deprecated. Select a supported model to continue.",
|
||||
"title": "Update model"
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "Model deprecated"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aosmith",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["py-aosmith==1.0.15"]
|
||||
"requirements": ["py-aosmith==1.0.16"]
|
||||
}
|
||||
|
||||
@@ -540,7 +540,17 @@ class APCUPSdSensor(APCUPSdEntity, SensorEntity):
|
||||
data = self.coordinator.data[key]
|
||||
|
||||
if self.entity_description.device_class == SensorDeviceClass.TIMESTAMP:
|
||||
self._attr_native_value = dateutil.parser.parse(data)
|
||||
# The date could be "N/A" for certain fields (e.g., XOFFBATT), indicating there is no value yet.
|
||||
if data == "N/A":
|
||||
self._attr_native_value = None
|
||||
return
|
||||
|
||||
try:
|
||||
self._attr_native_value = dateutil.parser.parse(data)
|
||||
except (dateutil.parser.ParserError, OverflowError):
|
||||
# If parsing fails we should mark it as unknown, with a log for further debugging.
|
||||
_LOGGER.warning('Failed to parse date for %s: "%s"', key, data)
|
||||
self._attr_native_value = None
|
||||
return
|
||||
|
||||
self._attr_native_value, inferred_unit = infer_unit(data)
|
||||
|
||||
@@ -2,15 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pyatv.const import KeyboardFocusState
|
||||
from pyatv.const import FeatureName, FeatureState, KeyboardFocusState
|
||||
from pyatv.interface import AppleTV, KeyboardListener
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorEntity
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AppleTvConfigEntry
|
||||
from . import SIGNAL_CONNECTED, AppleTvConfigEntry
|
||||
from .entity import AppleTVEntity
|
||||
|
||||
|
||||
@@ -21,10 +22,22 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Load Apple TV binary sensor based on a config entry."""
|
||||
# apple_tv config entries always have a unique id
|
||||
assert config_entry.unique_id is not None
|
||||
name: str = config_entry.data[CONF_NAME]
|
||||
manager = config_entry.runtime_data
|
||||
async_add_entities([AppleTVKeyboardFocused(name, config_entry.unique_id, manager)])
|
||||
cb: CALLBACK_TYPE
|
||||
|
||||
def setup_entities(atv: AppleTV) -> None:
|
||||
if atv.features.in_state(FeatureState.Available, FeatureName.TextFocusState):
|
||||
assert config_entry.unique_id is not None
|
||||
name: str = config_entry.data[CONF_NAME]
|
||||
async_add_entities(
|
||||
[AppleTVKeyboardFocused(name, config_entry.unique_id, manager)]
|
||||
)
|
||||
cb()
|
||||
|
||||
cb = async_dispatcher_connect(
|
||||
hass, f"{SIGNAL_CONNECTED}_{config_entry.unique_id}", setup_entities
|
||||
)
|
||||
config_entry.async_on_unload(cb)
|
||||
|
||||
|
||||
class AppleTVKeyboardFocused(AppleTVEntity, BinarySensorEntity, KeyboardListener):
|
||||
|
||||
@@ -7,7 +7,7 @@ import asyncio
|
||||
from collections.abc import Callable, Mapping
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any, Literal, Protocol, cast
|
||||
from typing import Any, Protocol, cast
|
||||
|
||||
from propcache.api import cached_property
|
||||
import voluptuous as vol
|
||||
@@ -25,18 +25,11 @@ from homeassistant.const import (
|
||||
CONF_ACTIONS,
|
||||
CONF_ALIAS,
|
||||
CONF_CONDITIONS,
|
||||
CONF_DEVICE_ID,
|
||||
CONF_ENTITY_ID,
|
||||
CONF_EVENT_DATA,
|
||||
CONF_ID,
|
||||
CONF_MODE,
|
||||
CONF_OPTIONS,
|
||||
CONF_PATH,
|
||||
CONF_PLATFORM,
|
||||
CONF_TARGET,
|
||||
CONF_TRIGGERS,
|
||||
CONF_VARIABLES,
|
||||
CONF_ZONE,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
SERVICE_RELOAD,
|
||||
SERVICE_TOGGLE,
|
||||
@@ -53,10 +46,13 @@ from homeassistant.core import (
|
||||
ServiceCall,
|
||||
callback,
|
||||
split_entity_id,
|
||||
valid_entity_id,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceNotFound, TemplateError
|
||||
from homeassistant.helpers import condition as condition_helper, config_validation as cv
|
||||
from homeassistant.helpers import (
|
||||
condition as condition_helper,
|
||||
config_validation as cv,
|
||||
trigger as trigger_helper,
|
||||
)
|
||||
from homeassistant.helpers.entity import ToggleEntity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
@@ -86,7 +82,6 @@ from homeassistant.helpers.trace import (
|
||||
trace_get,
|
||||
trace_path,
|
||||
)
|
||||
from homeassistant.helpers.trigger import async_initialize_triggers
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.dt import parse_datetime
|
||||
@@ -618,7 +613,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_LABEL_ID))
|
||||
referenced |= set(trigger_helper.async_extract_targets(conf, ATTR_LABEL_ID))
|
||||
return referenced
|
||||
|
||||
@cached_property
|
||||
@@ -633,7 +628,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_FLOOR_ID))
|
||||
referenced |= set(trigger_helper.async_extract_targets(conf, ATTR_FLOOR_ID))
|
||||
return referenced
|
||||
|
||||
@cached_property
|
||||
@@ -646,7 +641,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
referenced |= condition_helper.async_extract_targets(conf, ATTR_AREA_ID)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_AREA_ID))
|
||||
referenced |= set(trigger_helper.async_extract_targets(conf, ATTR_AREA_ID))
|
||||
return referenced
|
||||
|
||||
@property
|
||||
@@ -666,7 +661,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
referenced |= condition_helper.async_extract_devices(conf)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_trigger_extract_devices(conf))
|
||||
referenced |= set(trigger_helper.async_extract_devices(conf))
|
||||
|
||||
return referenced
|
||||
|
||||
@@ -680,7 +675,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
referenced |= condition_helper.async_extract_entities(conf)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
for entity_id in _trigger_extract_entities(conf):
|
||||
for entity_id in trigger_helper.async_extract_entities(conf):
|
||||
referenced.add(entity_id)
|
||||
|
||||
return referenced
|
||||
@@ -954,7 +949,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
self._logger.error("Error rendering trigger variables: %s", err)
|
||||
return None
|
||||
|
||||
return await async_initialize_triggers(
|
||||
return await trigger_helper.async_initialize_triggers(
|
||||
self.hass,
|
||||
self._trigger_config,
|
||||
self._async_trigger_if_enabled,
|
||||
@@ -1238,78 +1233,6 @@ async def _async_process_if(
|
||||
return result
|
||||
|
||||
|
||||
@callback
|
||||
def _trigger_extract_devices(trigger_conf: dict) -> list[str]:
|
||||
"""Extract devices from a trigger config."""
|
||||
if trigger_conf[CONF_PLATFORM] == "device":
|
||||
return [trigger_conf[CONF_DEVICE_ID]]
|
||||
|
||||
if (
|
||||
trigger_conf[CONF_PLATFORM] == "event"
|
||||
and CONF_EVENT_DATA in trigger_conf
|
||||
and CONF_DEVICE_ID in trigger_conf[CONF_EVENT_DATA]
|
||||
and isinstance(trigger_conf[CONF_EVENT_DATA][CONF_DEVICE_ID], str)
|
||||
):
|
||||
return [trigger_conf[CONF_EVENT_DATA][CONF_DEVICE_ID]]
|
||||
|
||||
if trigger_conf[CONF_PLATFORM] == "tag" and CONF_DEVICE_ID in trigger_conf:
|
||||
return trigger_conf[CONF_DEVICE_ID] # type: ignore[no-any-return]
|
||||
|
||||
if target_devices := _get_targets_from_trigger_config(trigger_conf, CONF_DEVICE_ID):
|
||||
return target_devices
|
||||
|
||||
return []
|
||||
|
||||
|
||||
@callback
|
||||
def _trigger_extract_entities(trigger_conf: dict) -> list[str]:
|
||||
"""Extract entities from a trigger config."""
|
||||
if trigger_conf[CONF_PLATFORM] in ("state", "numeric_state"):
|
||||
return trigger_conf[CONF_ENTITY_ID] # type: ignore[no-any-return]
|
||||
|
||||
if trigger_conf[CONF_PLATFORM] == "calendar":
|
||||
return [trigger_conf[CONF_OPTIONS][CONF_ENTITY_ID]]
|
||||
|
||||
if trigger_conf[CONF_PLATFORM] == "zone":
|
||||
return trigger_conf[CONF_ENTITY_ID] + [trigger_conf[CONF_ZONE]] # type: ignore[no-any-return]
|
||||
|
||||
if trigger_conf[CONF_PLATFORM] == "geo_location":
|
||||
return [trigger_conf[CONF_ZONE]]
|
||||
|
||||
if trigger_conf[CONF_PLATFORM] == "sun":
|
||||
return ["sun.sun"]
|
||||
|
||||
if (
|
||||
trigger_conf[CONF_PLATFORM] == "event"
|
||||
and CONF_EVENT_DATA in trigger_conf
|
||||
and CONF_ENTITY_ID in trigger_conf[CONF_EVENT_DATA]
|
||||
and isinstance(trigger_conf[CONF_EVENT_DATA][CONF_ENTITY_ID], str)
|
||||
and valid_entity_id(trigger_conf[CONF_EVENT_DATA][CONF_ENTITY_ID])
|
||||
):
|
||||
return [trigger_conf[CONF_EVENT_DATA][CONF_ENTITY_ID]]
|
||||
|
||||
if target_entities := _get_targets_from_trigger_config(
|
||||
trigger_conf, CONF_ENTITY_ID
|
||||
):
|
||||
return target_entities
|
||||
|
||||
return []
|
||||
|
||||
|
||||
@callback
|
||||
def _get_targets_from_trigger_config(
|
||||
config: dict,
|
||||
target: Literal["entity_id", "device_id", "area_id", "floor_id", "label_id"],
|
||||
) -> list[str]:
|
||||
"""Extract targets from a target config."""
|
||||
if not (target_conf := config.get(CONF_TARGET)):
|
||||
return []
|
||||
if not (targets := target_conf.get(target)):
|
||||
return []
|
||||
|
||||
return [targets] if isinstance(targets, str) else targets
|
||||
|
||||
|
||||
@websocket_api.websocket_command({"type": "automation/config", "entity_id": str})
|
||||
def websocket_config(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -52,7 +52,7 @@ from .const import (
|
||||
from .errors import AuthenticationRequired, CannotConnect
|
||||
from .hub import AxisHub, get_axis_api
|
||||
|
||||
AXIS_OUI = {"00:40:8c", "ac:cc:8e", "b8:a4:4f"}
|
||||
AXIS_OUI = {"00:40:8c", "ac:cc:8e", "b8:a4:4f", "e8:27:25"}
|
||||
DEFAULT_PORT = 443
|
||||
DEFAULT_PROTOCOL = "https"
|
||||
PROTOCOL_CHOICES = ["https", "http"]
|
||||
|
||||
@@ -26,6 +26,7 @@ EXCLUDE_FROM_BACKUP = [
|
||||
"tmp_backups/*.tar",
|
||||
"OZW_Log.txt",
|
||||
"tts/*",
|
||||
".cache/*",
|
||||
]
|
||||
|
||||
EXCLUDE_DATABASE_FROM_BACKUP = [
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Support for Baidu speech service."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aip import AipSpeech
|
||||
import voluptuous as vol
|
||||
@@ -9,6 +10,7 @@ from homeassistant.components.tts import (
|
||||
CONF_LANG,
|
||||
PLATFORM_SCHEMA as TTS_PLATFORM_SCHEMA,
|
||||
Provider,
|
||||
TtsAudioType,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
@@ -85,17 +87,17 @@ class BaiduTTSProvider(Provider):
|
||||
}
|
||||
|
||||
@property
|
||||
def default_language(self):
|
||||
def default_language(self) -> str:
|
||||
"""Return the default language."""
|
||||
return self._lang
|
||||
|
||||
@property
|
||||
def supported_languages(self):
|
||||
def supported_languages(self) -> list[str]:
|
||||
"""Return a list of supported languages."""
|
||||
return SUPPORTED_LANGUAGES
|
||||
|
||||
@property
|
||||
def default_options(self):
|
||||
def default_options(self) -> dict[str, Any]:
|
||||
"""Return a dict including default options."""
|
||||
return {
|
||||
CONF_PERSON: self._speech_conf_data[_OPTIONS[CONF_PERSON]],
|
||||
@@ -105,11 +107,16 @@ class BaiduTTSProvider(Provider):
|
||||
}
|
||||
|
||||
@property
|
||||
def supported_options(self):
|
||||
def supported_options(self) -> list[str]:
|
||||
"""Return a list of supported options."""
|
||||
return SUPPORTED_OPTIONS
|
||||
|
||||
def get_tts_audio(self, message, language, options):
|
||||
def get_tts_audio(
|
||||
self,
|
||||
message: str,
|
||||
language: str,
|
||||
options: dict[str, Any],
|
||||
) -> TtsAudioType:
|
||||
"""Load TTS from BaiduTTS."""
|
||||
|
||||
aip_speech = AipSpeech(
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bang_olufsen",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["mozart-api==5.3.1.108.0"],
|
||||
"requirements": ["mozart-api==5.3.1.108.2"],
|
||||
"zeroconf": ["_bangolufsen._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ from datetime import timedelta
|
||||
import json
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from uuid import UUID
|
||||
|
||||
from aiohttp import ClientConnectorError
|
||||
from mozart_api import __version__ as MOZART_API_VERSION
|
||||
@@ -735,7 +736,7 @@ class BeoMediaPlayer(BeoEntity, MediaPlayerEntity):
|
||||
await self._client.set_active_source(source_id=key)
|
||||
else:
|
||||
# Video
|
||||
await self._client.post_remote_trigger(id=key)
|
||||
await self._client.post_remote_trigger(id=UUID(key))
|
||||
|
||||
async def async_select_sound_mode(self, sound_mode: str) -> None:
|
||||
"""Select a sound mode."""
|
||||
@@ -894,7 +895,7 @@ class BeoMediaPlayer(BeoEntity, MediaPlayerEntity):
|
||||
translation_key="play_media_error",
|
||||
translation_placeholders={
|
||||
"media_type": media_type,
|
||||
"error_message": json.loads(error.body)["message"],
|
||||
"error_message": json.loads(cast(str, error.body))["message"],
|
||||
},
|
||||
) from error
|
||||
|
||||
|
||||
@@ -6,16 +6,9 @@ from typing import Any
|
||||
|
||||
from blinkpy.auth import Auth
|
||||
from blinkpy.blinkpy import Blink
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import persistent_notification
|
||||
from homeassistant.const import (
|
||||
CONF_FILE_PATH,
|
||||
CONF_FILENAME,
|
||||
CONF_NAME,
|
||||
CONF_PIN,
|
||||
CONF_SCAN_INTERVAL,
|
||||
)
|
||||
from homeassistant.const import CONF_SCAN_INTERVAL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -27,13 +20,6 @@ from .services import async_setup_services
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SERVICE_SAVE_VIDEO_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_NAME): cv.string, vol.Required(CONF_FILENAME): cv.string}
|
||||
)
|
||||
SERVICE_SEND_PIN_SCHEMA = vol.Schema({vol.Optional(CONF_PIN): cv.string})
|
||||
SERVICE_SAVE_RECENT_CLIPS_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_NAME): cv.string, vol.Required(CONF_FILE_PATH): cv.string}
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
@@ -9,35 +9,23 @@ from typing import Any
|
||||
from blinkpy.auth import UnauthorizedError
|
||||
from blinkpy.camera import BlinkCamera as BlinkCameraAPI
|
||||
from requests.exceptions import ChunkedEncodingError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.camera import Camera
|
||||
from homeassistant.const import CONF_FILE_PATH, CONF_FILENAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
HomeAssistantError,
|
||||
ServiceValidationError,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import (
|
||||
DEFAULT_BRAND,
|
||||
DOMAIN,
|
||||
SERVICE_RECORD,
|
||||
SERVICE_SAVE_RECENT_CLIPS,
|
||||
SERVICE_SAVE_VIDEO,
|
||||
SERVICE_TRIGGER,
|
||||
)
|
||||
from .const import DEFAULT_BRAND, DOMAIN
|
||||
from .coordinator import BlinkConfigEntry, BlinkUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_VIDEO_CLIP = "video"
|
||||
ATTR_IMAGE = "image"
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@@ -56,20 +44,6 @@ async def async_setup_entry(
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(SERVICE_RECORD, None, "record")
|
||||
platform.async_register_entity_service(SERVICE_TRIGGER, None, "trigger_camera")
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SAVE_RECENT_CLIPS,
|
||||
{vol.Required(CONF_FILE_PATH): cv.string},
|
||||
"save_recent_clips",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SAVE_VIDEO,
|
||||
{vol.Required(CONF_FILENAME): cv.string},
|
||||
"save_video",
|
||||
)
|
||||
|
||||
|
||||
class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera):
|
||||
"""An implementation of a Blink Camera."""
|
||||
|
||||
@@ -20,11 +20,6 @@ TYPE_TEMPERATURE = "temperature"
|
||||
TYPE_BATTERY = "battery"
|
||||
TYPE_WIFI_STRENGTH = "wifi_strength"
|
||||
|
||||
SERVICE_RECORD = "record"
|
||||
SERVICE_TRIGGER = "trigger_camera"
|
||||
SERVICE_SAVE_VIDEO = "save_video"
|
||||
SERVICE_SAVE_RECENT_CLIPS = "save_recent_clips"
|
||||
SERVICE_SEND_PIN = "send_pin"
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
|
||||
@@ -4,13 +4,27 @@ from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_CONFIG_ENTRY_ID, CONF_PIN
|
||||
from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN
|
||||
from homeassistant.const import (
|
||||
ATTR_CONFIG_ENTRY_ID,
|
||||
CONF_FILE_PATH,
|
||||
CONF_FILENAME,
|
||||
CONF_PIN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir, service
|
||||
|
||||
from .const import DOMAIN, SERVICE_SEND_PIN
|
||||
from .const import DOMAIN
|
||||
|
||||
SERVICE_RECORD = "record"
|
||||
SERVICE_TRIGGER = "trigger_camera"
|
||||
SERVICE_SAVE_VIDEO = "save_video"
|
||||
SERVICE_SAVE_RECENT_CLIPS = "save_recent_clips"
|
||||
|
||||
|
||||
# Deprecated
|
||||
SERVICE_SEND_PIN = "send_pin"
|
||||
SERVICE_SEND_PIN_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY_ID): vol.All(cv.ensure_list, [cv.string]),
|
||||
@@ -52,3 +66,36 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
_send_pin,
|
||||
schema=SERVICE_SEND_PIN_SCHEMA,
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_RECORD,
|
||||
entity_domain=CAMERA_DOMAIN,
|
||||
schema=None,
|
||||
func="record",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_TRIGGER,
|
||||
entity_domain=CAMERA_DOMAIN,
|
||||
schema=None,
|
||||
func="trigger_camera",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_SAVE_RECENT_CLIPS,
|
||||
entity_domain=CAMERA_DOMAIN,
|
||||
schema={vol.Required(CONF_FILE_PATH): cv.string},
|
||||
func="save_recent_clips",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_SAVE_VIDEO,
|
||||
entity_domain=CAMERA_DOMAIN,
|
||||
schema={vol.Required(CONF_FILENAME): cv.string},
|
||||
func="save_video",
|
||||
)
|
||||
|
||||
@@ -13,14 +13,7 @@ from homeassistant.helpers import config_validation as cv, service
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
ATTR_MASTER,
|
||||
DOMAIN,
|
||||
SERVICE_CLEAR_TIMER,
|
||||
SERVICE_JOIN,
|
||||
SERVICE_SET_TIMER,
|
||||
SERVICE_UNJOIN,
|
||||
)
|
||||
from .const import ATTR_MASTER, DOMAIN, SERVICE_JOIN, SERVICE_UNJOIN
|
||||
from .coordinator import (
|
||||
BluesoundConfigEntry,
|
||||
BluesoundCoordinator,
|
||||
@@ -37,22 +30,6 @@ PLATFORMS = [
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Bluesound."""
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_SET_TIMER,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema=None,
|
||||
func="async_increase_timer",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_CLEAR_TIMER,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema=None,
|
||||
func="async_clear_timer",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
|
||||
@@ -5,7 +5,5 @@ INTEGRATION_TITLE = "Bluesound"
|
||||
ATTR_BLUESOUND_GROUP = "bluesound_group"
|
||||
ATTR_MASTER = "master"
|
||||
|
||||
SERVICE_CLEAR_TIMER = "clear_sleep_timer"
|
||||
SERVICE_JOIN = "join"
|
||||
SERVICE_SET_TIMER = "set_sleep_timer"
|
||||
SERVICE_UNJOIN = "unjoin"
|
||||
|
||||
@@ -1,14 +1,8 @@
|
||||
{
|
||||
"services": {
|
||||
"clear_sleep_timer": {
|
||||
"service": "mdi:sleep-off"
|
||||
},
|
||||
"join": {
|
||||
"service": "mdi:link-variant"
|
||||
},
|
||||
"set_sleep_timer": {
|
||||
"service": "mdi:sleep"
|
||||
},
|
||||
"unjoin": {
|
||||
"service": "mdi:link-variant-off"
|
||||
}
|
||||
|
||||
@@ -39,9 +39,7 @@ from .const import (
|
||||
ATTR_BLUESOUND_GROUP,
|
||||
ATTR_MASTER,
|
||||
DOMAIN,
|
||||
SERVICE_CLEAR_TIMER,
|
||||
SERVICE_JOIN,
|
||||
SERVICE_SET_TIMER,
|
||||
SERVICE_UNJOIN,
|
||||
)
|
||||
from .coordinator import BluesoundCoordinator
|
||||
@@ -603,42 +601,6 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
"""Remove follower to leader."""
|
||||
await self._player.remove_follower(host, port)
|
||||
|
||||
async def async_increase_timer(self) -> int:
|
||||
"""Increase sleep time on player."""
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_service_{SERVICE_SET_TIMER}",
|
||||
is_fixable=False,
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_service_set_sleep_timer",
|
||||
translation_placeholders={
|
||||
"name": slugify(self.sync_status.name),
|
||||
},
|
||||
)
|
||||
return await self._player.sleep_timer()
|
||||
|
||||
async def async_clear_timer(self) -> None:
|
||||
"""Clear sleep timer on player."""
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_service_{SERVICE_CLEAR_TIMER}",
|
||||
is_fixable=False,
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_service_clear_sleep_timer",
|
||||
translation_placeholders={
|
||||
"name": slugify(self.sync_status.name),
|
||||
},
|
||||
)
|
||||
sleep = 1
|
||||
while sleep > 0:
|
||||
sleep = await self._player.sleep_timer()
|
||||
|
||||
async def async_set_shuffle(self, shuffle: bool) -> None:
|
||||
"""Enable or disable shuffle mode."""
|
||||
await self._player.shuffle(shuffle)
|
||||
|
||||
@@ -19,19 +19,3 @@ unjoin:
|
||||
entity:
|
||||
integration: bluesound
|
||||
domain: media_player
|
||||
|
||||
set_sleep_timer:
|
||||
fields:
|
||||
entity_id:
|
||||
selector:
|
||||
entity:
|
||||
integration: bluesound
|
||||
domain: media_player
|
||||
|
||||
clear_sleep_timer:
|
||||
fields:
|
||||
entity_id:
|
||||
selector:
|
||||
entity:
|
||||
integration: bluesound
|
||||
domain: media_player
|
||||
|
||||
@@ -37,34 +37,16 @@
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_service_clear_sleep_timer": {
|
||||
"description": "Use `button.{name}_clear_sleep_timer` instead.\n\nPlease replace this action and adjust your automations and scripts.",
|
||||
"title": "Detected use of deprecated action bluesound.clear_sleep_timer"
|
||||
},
|
||||
"deprecated_service_join": {
|
||||
"description": "Use the `media_player.join` action instead.\n\nPlease replace this action and adjust your automations and scripts.",
|
||||
"title": "Detected use of deprecated action bluesound.join"
|
||||
},
|
||||
"deprecated_service_set_sleep_timer": {
|
||||
"description": "Use `button.{name}_set_sleep_timer` instead.\n\nPlease replace this action and adjust your automations and scripts.",
|
||||
"title": "Detected use of deprecated action bluesound.set_sleep_timer"
|
||||
},
|
||||
"deprecated_service_unjoin": {
|
||||
"description": "Use the `media_player.unjoin` action instead.\n\nPlease replace this action and adjust your automations and scripts.",
|
||||
"title": "Detected use of deprecated action bluesound.unjoin"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"clear_sleep_timer": {
|
||||
"description": "Clears a Bluesound timer.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"description": "Name(s) of entities that will have the timer cleared.",
|
||||
"name": "Entity"
|
||||
}
|
||||
},
|
||||
"name": "Clear sleep timer"
|
||||
},
|
||||
"join": {
|
||||
"description": "Groups players together under a single master speaker.",
|
||||
"fields": {
|
||||
@@ -79,16 +61,6 @@
|
||||
},
|
||||
"name": "Join"
|
||||
},
|
||||
"set_sleep_timer": {
|
||||
"description": "Sets a Bluesound timer that will turn off the speaker. It will increase in steps: 15, 30, 45, 60, 90, 0.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"description": "Name(s) of entities that will have a timer set.",
|
||||
"name": "Entity"
|
||||
}
|
||||
},
|
||||
"name": "Set sleep timer"
|
||||
},
|
||||
"unjoin": {
|
||||
"description": "Separates a player from a group.",
|
||||
"fields": {
|
||||
|
||||
@@ -16,14 +16,17 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.entity import SLOW_UPDATE_WARNING
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import BRIDGE_MAKE, DOMAIN
|
||||
from .models import BondData
|
||||
from .services import async_setup_services
|
||||
from .utils import BondHub
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
PLATFORMS = [
|
||||
Platform.BUTTON,
|
||||
Platform.COVER,
|
||||
@@ -38,6 +41,12 @@ _LOGGER = logging.getLogger(__name__)
|
||||
type BondConfigEntry = ConfigEntry[BondData]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the component."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BondConfigEntry) -> bool:
|
||||
"""Set up Bond from a config entry."""
|
||||
host = entry.data[CONF_HOST]
|
||||
|
||||
@@ -5,10 +5,3 @@ BRIDGE_MAKE = "Olibra"
|
||||
DOMAIN = "bond"
|
||||
|
||||
CONF_BOND_ID: str = "bond_id"
|
||||
|
||||
|
||||
SERVICE_SET_FAN_SPEED_TRACKED_STATE = "set_fan_speed_tracked_state"
|
||||
SERVICE_SET_POWER_TRACKED_STATE = "set_switch_power_tracked_state"
|
||||
SERVICE_SET_LIGHT_POWER_TRACKED_STATE = "set_light_power_tracked_state"
|
||||
SERVICE_SET_LIGHT_BRIGHTNESS_TRACKED_STATE = "set_light_brightness_tracked_state"
|
||||
ATTR_POWER_STATE = "power_state"
|
||||
|
||||
@@ -8,7 +8,6 @@ from typing import Any
|
||||
|
||||
from aiohttp.client_exceptions import ClientResponseError
|
||||
from bond_async import Action, DeviceType, Direction
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.fan import (
|
||||
DIRECTION_FORWARD,
|
||||
@@ -18,7 +17,6 @@ from homeassistant.components.fan import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_platform
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.percentage import (
|
||||
percentage_to_ranged_value,
|
||||
@@ -27,7 +25,6 @@ from homeassistant.util.percentage import (
|
||||
from homeassistant.util.scaling import int_states_in_range
|
||||
|
||||
from . import BondConfigEntry
|
||||
from .const import SERVICE_SET_FAN_SPEED_TRACKED_STATE
|
||||
from .entity import BondEntity
|
||||
from .models import BondData
|
||||
from .utils import BondDevice
|
||||
@@ -44,12 +41,6 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up Bond fan devices."""
|
||||
data = entry.runtime_data
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SET_FAN_SPEED_TRACKED_STATE,
|
||||
{vol.Required("speed"): vol.All(vol.Number(scale=0), vol.Range(0, 100))},
|
||||
"async_set_speed_belief",
|
||||
)
|
||||
|
||||
async_add_entities(
|
||||
BondFan(data, device)
|
||||
|
||||
@@ -7,37 +7,20 @@ from typing import Any
|
||||
|
||||
from aiohttp.client_exceptions import ClientResponseError
|
||||
from bond_async import Action, DeviceType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEntity
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BondConfigEntry
|
||||
from .const import (
|
||||
ATTR_POWER_STATE,
|
||||
SERVICE_SET_LIGHT_BRIGHTNESS_TRACKED_STATE,
|
||||
SERVICE_SET_LIGHT_POWER_TRACKED_STATE,
|
||||
)
|
||||
from .entity import BondEntity
|
||||
from .models import BondData
|
||||
from .utils import BondDevice
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SERVICE_START_INCREASING_BRIGHTNESS = "start_increasing_brightness"
|
||||
SERVICE_START_DECREASING_BRIGHTNESS = "start_decreasing_brightness"
|
||||
SERVICE_STOP = "stop"
|
||||
|
||||
ENTITY_SERVICES = [
|
||||
SERVICE_START_INCREASING_BRIGHTNESS,
|
||||
SERVICE_START_DECREASING_BRIGHTNESS,
|
||||
SERVICE_STOP,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -48,14 +31,6 @@ async def async_setup_entry(
|
||||
data = entry.runtime_data
|
||||
hub = data.hub
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
for service in ENTITY_SERVICES:
|
||||
platform.async_register_entity_service(
|
||||
service,
|
||||
None,
|
||||
f"async_{service}",
|
||||
)
|
||||
|
||||
fan_lights: list[Entity] = [
|
||||
BondLight(data, device)
|
||||
for device in hub.devices
|
||||
@@ -94,22 +69,6 @@ async def async_setup_entry(
|
||||
if DeviceType.is_light(device.type)
|
||||
]
|
||||
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SET_LIGHT_BRIGHTNESS_TRACKED_STATE,
|
||||
{
|
||||
vol.Required(ATTR_BRIGHTNESS): vol.All(
|
||||
vol.Number(scale=0), vol.Range(0, 255)
|
||||
)
|
||||
},
|
||||
"async_set_brightness_belief",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SET_LIGHT_POWER_TRACKED_STATE,
|
||||
{vol.Required(ATTR_POWER_STATE): vol.All(cv.boolean)},
|
||||
"async_set_power_belief",
|
||||
)
|
||||
|
||||
async_add_entities(
|
||||
fan_lights + fan_up_lights + fan_down_lights + fireplaces + fp_lights + lights,
|
||||
)
|
||||
|
||||
101
homeassistant/components/bond/services.py
Normal file
101
homeassistant/components/bond/services.py
Normal file
@@ -0,0 +1,101 @@
|
||||
"""Support for Bond services."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.fan import DOMAIN as FAN_DOMAIN
|
||||
from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
ATTR_POWER_STATE = "power_state"
|
||||
|
||||
# Fan
|
||||
SERVICE_SET_FAN_SPEED_TRACKED_STATE = "set_fan_speed_tracked_state"
|
||||
|
||||
# Switch
|
||||
SERVICE_SET_POWER_TRACKED_STATE = "set_switch_power_tracked_state"
|
||||
|
||||
# Light
|
||||
SERVICE_SET_LIGHT_POWER_TRACKED_STATE = "set_light_power_tracked_state"
|
||||
SERVICE_SET_LIGHT_BRIGHTNESS_TRACKED_STATE = "set_light_brightness_tracked_state"
|
||||
SERVICE_START_INCREASING_BRIGHTNESS = "start_increasing_brightness"
|
||||
SERVICE_START_DECREASING_BRIGHTNESS = "start_decreasing_brightness"
|
||||
SERVICE_STOP = "stop"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Home Assistant services."""
|
||||
|
||||
# Fan entity services
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_SET_FAN_SPEED_TRACKED_STATE,
|
||||
entity_domain=FAN_DOMAIN,
|
||||
schema={vol.Required("speed"): vol.All(vol.Number(scale=0), vol.Range(0, 100))},
|
||||
func="async_set_speed_belief",
|
||||
)
|
||||
|
||||
# Light entity services
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_START_INCREASING_BRIGHTNESS,
|
||||
entity_domain=LIGHT_DOMAIN,
|
||||
schema=None,
|
||||
func="async_start_increasing_brightness",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_START_DECREASING_BRIGHTNESS,
|
||||
entity_domain=LIGHT_DOMAIN,
|
||||
schema=None,
|
||||
func="async_start_decreasing_brightness",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_STOP,
|
||||
entity_domain=LIGHT_DOMAIN,
|
||||
schema=None,
|
||||
func="async_stop",
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_SET_LIGHT_BRIGHTNESS_TRACKED_STATE,
|
||||
entity_domain=LIGHT_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_BRIGHTNESS): vol.All(
|
||||
vol.Number(scale=0), vol.Range(0, 255)
|
||||
)
|
||||
},
|
||||
func="async_set_brightness_belief",
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_SET_LIGHT_POWER_TRACKED_STATE,
|
||||
entity_domain=LIGHT_DOMAIN,
|
||||
schema={vol.Required(ATTR_POWER_STATE): vol.All(cv.boolean)},
|
||||
func="async_set_power_belief",
|
||||
)
|
||||
|
||||
# Switch entity services
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_SET_POWER_TRACKED_STATE,
|
||||
entity_domain=SWITCH_DOMAIN,
|
||||
schema={vol.Required(ATTR_POWER_STATE): cv.boolean},
|
||||
func="async_set_power_belief",
|
||||
)
|
||||
@@ -6,16 +6,13 @@ from typing import Any
|
||||
|
||||
from aiohttp.client_exceptions import ClientResponseError
|
||||
from bond_async import Action, DeviceType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BondConfigEntry
|
||||
from .const import ATTR_POWER_STATE, SERVICE_SET_POWER_TRACKED_STATE
|
||||
from .entity import BondEntity
|
||||
|
||||
|
||||
@@ -26,12 +23,6 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up Bond generic devices."""
|
||||
data = entry.runtime_data
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SET_POWER_TRACKED_STATE,
|
||||
{vol.Required(ATTR_POWER_STATE): cv.boolean},
|
||||
"async_set_power_belief",
|
||||
)
|
||||
|
||||
async_add_entities(
|
||||
BondSwitch(data, device)
|
||||
|
||||
@@ -1,14 +1,3 @@
|
||||
"""Constants for the Bring! integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN = "bring"
|
||||
|
||||
ATTR_SENDER: Final = "sender"
|
||||
ATTR_ITEM_NAME: Final = "item"
|
||||
ATTR_NOTIFICATION_TYPE: Final = "message"
|
||||
ATTR_REACTION: Final = "reaction"
|
||||
ATTR_ACTIVITY: Final = "uuid"
|
||||
ATTR_RECEIVER: Final = "publicUserUuid"
|
||||
SERVICE_PUSH_NOTIFICATION = "send_message"
|
||||
SERVICE_ACTIVITY_STREAM_REACTION = "send_reaction"
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Actions for Bring! integration."""
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from bring_api import (
|
||||
@@ -13,22 +12,28 @@ from bring_api import (
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.event import ATTR_EVENT_TYPE
|
||||
from homeassistant.components.todo import DOMAIN as TODO_DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
|
||||
from .const import (
|
||||
ATTR_ACTIVITY,
|
||||
ATTR_REACTION,
|
||||
ATTR_RECEIVER,
|
||||
DOMAIN,
|
||||
SERVICE_ACTIVITY_STREAM_REACTION,
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
entity_registry as er,
|
||||
service,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BringConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
ATTR_ACTIVITY = "uuid"
|
||||
ATTR_ITEM_NAME = "item"
|
||||
ATTR_NOTIFICATION_TYPE = "message"
|
||||
ATTR_REACTION = "reaction"
|
||||
ATTR_RECEIVER = "publicUserUuid"
|
||||
|
||||
SERVICE_PUSH_NOTIFICATION = "send_message"
|
||||
SERVICE_ACTIVITY_STREAM_REACTION = "send_reaction"
|
||||
|
||||
SERVICE_ACTIVITY_STREAM_REACTION_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -54,6 +59,7 @@ def get_config_entry(hass: HomeAssistant, entry_id: str) -> BringConfigEntry:
|
||||
return entry
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up services for Bring! integration."""
|
||||
|
||||
@@ -108,3 +114,17 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
async_send_activity_stream_reaction,
|
||||
SERVICE_ACTIVITY_STREAM_REACTION_SCHEMA,
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_PUSH_NOTIFICATION,
|
||||
entity_domain=TODO_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_NOTIFICATION_TYPE): vol.All(
|
||||
vol.Upper, vol.Coerce(BringNotificationType)
|
||||
),
|
||||
vol.Optional(ATTR_ITEM_NAME): cv.string,
|
||||
},
|
||||
func="async_send_message",
|
||||
)
|
||||
|
||||
@@ -13,7 +13,6 @@ from bring_api import (
|
||||
BringNotificationType,
|
||||
BringRequestException,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.todo import (
|
||||
TodoItem,
|
||||
@@ -23,15 +22,9 @@ from homeassistant.components.todo import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import (
|
||||
ATTR_ITEM_NAME,
|
||||
ATTR_NOTIFICATION_TYPE,
|
||||
DOMAIN,
|
||||
SERVICE_PUSH_NOTIFICATION,
|
||||
)
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BringConfigEntry, BringData, BringDataUpdateCoordinator
|
||||
from .entity import BringBaseEntity
|
||||
|
||||
@@ -63,19 +56,6 @@ async def async_setup_entry(
|
||||
coordinator.async_add_listener(add_entities)
|
||||
add_entities()
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_PUSH_NOTIFICATION,
|
||||
{
|
||||
vol.Required(ATTR_NOTIFICATION_TYPE): vol.All(
|
||||
vol.Upper, vol.Coerce(BringNotificationType)
|
||||
),
|
||||
vol.Optional(ATTR_ITEM_NAME): cv.string,
|
||||
},
|
||||
"async_send_message",
|
||||
)
|
||||
|
||||
|
||||
class BringTodoListEntity(BringBaseEntity, TodoListEntity):
|
||||
"""A To-do List representation of the Bring! Shopping List."""
|
||||
|
||||
@@ -15,7 +15,7 @@ from homeassistant.components.bluetooth import (
|
||||
)
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers import device_registry as dr, issue_registry as ir
|
||||
from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceRegistry
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.util.signal_type import SignalType
|
||||
@@ -36,6 +36,45 @@ PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.EVENT, Platform.SE
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_encryption_issue_id(entry_id: str) -> str:
|
||||
"""Return the repair issue id for encryption removal."""
|
||||
return f"encryption_removed_{entry_id}"
|
||||
|
||||
|
||||
def _async_create_encryption_downgrade_issue(
|
||||
hass: HomeAssistant, entry: BTHomeConfigEntry, issue_id: str
|
||||
) -> None:
|
||||
"""Create a repair issue for encryption downgrade."""
|
||||
_LOGGER.warning(
|
||||
"BTHome device %s was previously encrypted but is now sending "
|
||||
"unencrypted data. This could be a spoofing attempt. "
|
||||
"Data will be ignored until resolved",
|
||||
entry.title,
|
||||
)
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
issue_id,
|
||||
is_fixable=True,
|
||||
is_persistent=True,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="encryption_removed",
|
||||
translation_placeholders={"name": entry.title},
|
||||
data={"entry_id": entry.entry_id},
|
||||
)
|
||||
|
||||
|
||||
def _async_clear_encryption_downgrade_issue(
|
||||
hass: HomeAssistant, entry: BTHomeConfigEntry, issue_id: str
|
||||
) -> None:
|
||||
"""Clear the encryption downgrade repair issue."""
|
||||
ir.async_delete_issue(hass, DOMAIN, issue_id)
|
||||
_LOGGER.info(
|
||||
"BTHome device %s is now sending encrypted data again. Resuming normal operation",
|
||||
entry.title,
|
||||
)
|
||||
|
||||
|
||||
def process_service_info(
|
||||
hass: HomeAssistant,
|
||||
entry: BTHomeConfigEntry,
|
||||
@@ -45,7 +84,26 @@ def process_service_info(
|
||||
"""Process a BluetoothServiceInfoBleak, running side effects and returning sensor data."""
|
||||
coordinator = entry.runtime_data
|
||||
data = coordinator.device_data
|
||||
issue_registry = ir.async_get(hass)
|
||||
issue_id = get_encryption_issue_id(entry.entry_id)
|
||||
update = data.update(service_info)
|
||||
|
||||
# Block unencrypted payloads for devices that were previously verified as encrypted.
|
||||
if entry.data.get(CONF_BINDKEY) and data.downgrade_detected:
|
||||
if not coordinator.encryption_downgrade_logged:
|
||||
coordinator.encryption_downgrade_logged = True
|
||||
if not issue_registry.async_get_issue(DOMAIN, issue_id):
|
||||
_async_create_encryption_downgrade_issue(hass, entry, issue_id)
|
||||
return SensorUpdate(title=None, devices={})
|
||||
|
||||
if data.bindkey_verified and (
|
||||
(existing_issue := issue_registry.async_get_issue(DOMAIN, issue_id))
|
||||
or coordinator.encryption_downgrade_logged
|
||||
):
|
||||
coordinator.encryption_downgrade_logged = False
|
||||
if existing_issue:
|
||||
_async_clear_encryption_downgrade_issue(hass, entry, issue_id)
|
||||
|
||||
discovered_event_classes = coordinator.discovered_event_classes
|
||||
if entry.data.get(CONF_SLEEPY_DEVICE, False) != data.sleepy_device:
|
||||
hass.config_entries.async_update_entry(
|
||||
@@ -150,3 +208,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: BTHomeConfigEntry) -> bo
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: BTHomeConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: BTHomeConfigEntry) -> None:
|
||||
"""Remove a config entry."""
|
||||
ir.async_delete_issue(hass, DOMAIN, get_encryption_issue_id(entry.entry_id))
|
||||
|
||||
@@ -41,6 +41,8 @@ class BTHomePassiveBluetoothProcessorCoordinator(
|
||||
self.discovered_event_classes = discovered_event_classes
|
||||
self.device_data = device_data
|
||||
self.entry = entry
|
||||
# Track whether we've already logged the encryption downgrade this session.
|
||||
self.encryption_downgrade_logged = False
|
||||
|
||||
@property
|
||||
def sleepy_device(self) -> bool:
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bthome",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bthome-ble==3.16.0"]
|
||||
"requirements": ["bthome-ble==3.17.0"]
|
||||
}
|
||||
|
||||
65
homeassistant/components/bthome/repairs.py
Normal file
65
homeassistant/components/bthome/repairs.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""Repairs for the BTHome integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.repairs import RepairsFlow
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
|
||||
from . import get_encryption_issue_id
|
||||
from .const import CONF_BINDKEY, DOMAIN
|
||||
|
||||
|
||||
class EncryptionRemovedRepairFlow(RepairsFlow):
|
||||
"""Handle the repair flow when encryption is disabled."""
|
||||
|
||||
def __init__(self, entry_id: str, entry_title: str) -> None:
|
||||
"""Initialize the repair flow."""
|
||||
self._entry_id = entry_id
|
||||
self._entry_title = entry_title
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the initial step of the repair flow."""
|
||||
return await self.async_step_confirm()
|
||||
|
||||
async def async_step_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle confirmation, remove the bindkey, and reload the entry."""
|
||||
if user_input is not None:
|
||||
entry = self.hass.config_entries.async_get_entry(self._entry_id)
|
||||
if not entry:
|
||||
return self.async_abort(reason="entry_removed")
|
||||
|
||||
new_data = {k: v for k, v in entry.data.items() if k != CONF_BINDKEY}
|
||||
self.hass.config_entries.async_update_entry(entry, data=new_data)
|
||||
|
||||
ir.async_delete_issue(
|
||||
self.hass, DOMAIN, get_encryption_issue_id(self._entry_id)
|
||||
)
|
||||
|
||||
await self.hass.config_entries.async_reload(self._entry_id)
|
||||
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="confirm",
|
||||
description_placeholders={"name": self._entry_title},
|
||||
)
|
||||
|
||||
|
||||
async def async_create_fix_flow(
|
||||
hass: HomeAssistant, issue_id: str, data: dict[str, Any] | None
|
||||
) -> RepairsFlow:
|
||||
"""Create the repair flow for removing the encryption key."""
|
||||
if not data or "entry_id" not in data:
|
||||
raise ValueError("Missing data for repair flow")
|
||||
entry_id = data["entry_id"]
|
||||
entry = hass.config_entries.async_get_entry(entry_id)
|
||||
entry_title = entry.title if entry else "Unknown device"
|
||||
return EncryptionRemovedRepairFlow(entry_id, entry_title)
|
||||
@@ -117,5 +117,21 @@
|
||||
"name": "UV Index"
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"encryption_removed": {
|
||||
"fix_flow": {
|
||||
"abort": {
|
||||
"entry_removed": "The device has been removed"
|
||||
},
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "The BTHome device **{name}** was configured with encryption but is now broadcasting unencrypted data. Data from this device is being ignored until this is resolved.\n\nIf you disabled encryption on the device, select **Submit** to remove the encryption key and resume receiving data.\n\nIf you did not disable encryption, someone may be attempting to spoof your device. Do not submit this form and the unencrypted data will continue to be ignored.",
|
||||
"title": "Remove encryption key for {name}"
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "Encryption disabled on {name}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -506,6 +506,8 @@ def is_offset_reached(
|
||||
class CalendarEntityDescription(EntityDescription, frozen_or_thawed=True):
|
||||
"""A class that describes calendar entities."""
|
||||
|
||||
initial_color: str | None = None
|
||||
|
||||
|
||||
class CalendarEntity(Entity):
|
||||
"""Base class for calendar event entities."""
|
||||
@@ -516,12 +518,16 @@ class CalendarEntity(Entity):
|
||||
|
||||
_alarm_unsubs: list[CALLBACK_TYPE] | None = None
|
||||
|
||||
_attr_initial_color: str | None = None
|
||||
_attr_initial_color: str | None
|
||||
|
||||
@property
|
||||
def initial_color(self) -> str | None:
|
||||
"""Return the initial color for the calendar entity."""
|
||||
return self._attr_initial_color
|
||||
if hasattr(self, "_attr_initial_color"):
|
||||
return self._attr_initial_color
|
||||
if hasattr(self, "entity_description"):
|
||||
return self.entity_description.initial_color
|
||||
return None
|
||||
|
||||
def get_initial_entity_options(self) -> er.EntityOptionsType | None:
|
||||
"""Return initial entity options."""
|
||||
|
||||
@@ -234,7 +234,7 @@ async def _async_get_stream_image(
|
||||
height: int | None = None,
|
||||
wait_for_next_keyframe: bool = False,
|
||||
) -> bytes | None:
|
||||
if (provider := camera.webrtc_provider) and (
|
||||
if (provider := camera._webrtc_provider) and ( # noqa: SLF001
|
||||
image := await provider.async_get_image(camera, width=width, height=height)
|
||||
) is not None:
|
||||
return image
|
||||
@@ -515,12 +515,6 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
return False
|
||||
return super().available
|
||||
|
||||
@final
|
||||
@property
|
||||
def webrtc_provider(self) -> CameraWebRTCProvider | None:
|
||||
"""Return the WebRTC provider."""
|
||||
return self._webrtc_provider
|
||||
|
||||
async def async_create_stream(self) -> Stream | None:
|
||||
"""Create a Stream for stream_source."""
|
||||
# There is at most one stream (a decode worker) per camera
|
||||
@@ -680,13 +674,6 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
self.__supports_stream = self.supported_features & CameraEntityFeature.STREAM
|
||||
await self.async_refresh_providers(write_state=False)
|
||||
|
||||
async def async_internal_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
if self._webrtc_provider:
|
||||
await self._webrtc_provider.async_unregister_camera(self)
|
||||
self._webrtc_provider = None
|
||||
await super().async_internal_will_remove_from_hass()
|
||||
|
||||
async def async_refresh_providers(self, *, write_state: bool = True) -> None:
|
||||
"""Determine if any of the registered providers are suitable for this entity.
|
||||
|
||||
@@ -703,19 +690,11 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
async_get_supported_provider
|
||||
)
|
||||
|
||||
if old_provider == new_provider:
|
||||
return
|
||||
|
||||
if old_provider:
|
||||
await old_provider.async_unregister_camera(self)
|
||||
|
||||
if new_provider:
|
||||
await new_provider.async_register_camera(self)
|
||||
|
||||
self._webrtc_provider = new_provider
|
||||
self._invalidate_camera_capabilities_cache()
|
||||
if write_state:
|
||||
self.async_write_ha_state()
|
||||
if old_provider != new_provider:
|
||||
self._webrtc_provider = new_provider
|
||||
self._invalidate_camera_capabilities_cache()
|
||||
if write_state:
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def _async_get_supported_webrtc_provider[_T](
|
||||
self, fn: Callable[[HomeAssistant, Camera], Coroutine[None, None, _T | None]]
|
||||
@@ -968,10 +947,6 @@ async def websocket_update_prefs(
|
||||
_LOGGER.error("Error setting camera preferences: %s", ex)
|
||||
connection.send_error(msg["id"], "update_failed", str(ex))
|
||||
else:
|
||||
if (camera := hass.data[DATA_COMPONENT].get_entity(entity_id)) and (
|
||||
provider := camera.webrtc_provider
|
||||
):
|
||||
await provider.async_on_camera_prefs_update(camera)
|
||||
connection.send_result(msg["id"], entity_prefs)
|
||||
|
||||
|
||||
|
||||
@@ -50,11 +50,11 @@
|
||||
"selector": {},
|
||||
"services": {
|
||||
"disable_motion_detection": {
|
||||
"description": "Disables the motion detection.",
|
||||
"description": "Disables the motion detection of a camera.",
|
||||
"name": "Disable motion detection"
|
||||
},
|
||||
"enable_motion_detection": {
|
||||
"description": "Enables the motion detection.",
|
||||
"description": "Enables the motion detection of a camera.",
|
||||
"name": "Enable motion detection"
|
||||
},
|
||||
"play_stream": {
|
||||
@@ -100,11 +100,11 @@
|
||||
"name": "Take snapshot"
|
||||
},
|
||||
"turn_off": {
|
||||
"description": "Turns off the camera.",
|
||||
"description": "Turns off a camera.",
|
||||
"name": "[%key:common::action::turn_off%]"
|
||||
},
|
||||
"turn_on": {
|
||||
"description": "Turns on the camera.",
|
||||
"description": "Turns on a camera.",
|
||||
"name": "[%key:common::action::turn_on%]"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -146,7 +146,7 @@ class CameraWebRTCProvider(ABC):
|
||||
@callback
|
||||
def async_close_session(self, session_id: str) -> None:
|
||||
"""Close the session."""
|
||||
## This is an optional method so we need a default here.
|
||||
return ## This is an optional method so we need a default here.
|
||||
|
||||
async def async_get_image(
|
||||
self,
|
||||
@@ -157,27 +157,6 @@ class CameraWebRTCProvider(ABC):
|
||||
"""Get an image from the camera."""
|
||||
return None
|
||||
|
||||
async def async_register_camera(
|
||||
self,
|
||||
camera: Camera,
|
||||
) -> None:
|
||||
"""Will be called when the provider is registered for a camera."""
|
||||
## This is an optional method so we need a default here.
|
||||
|
||||
async def async_unregister_camera(
|
||||
self,
|
||||
camera: Camera,
|
||||
) -> None:
|
||||
"""Will be called when the provider is unregistered for a camera."""
|
||||
## This is an optional method so we need a default here.
|
||||
|
||||
async def async_on_camera_prefs_update(
|
||||
self,
|
||||
camera: Camera,
|
||||
) -> None:
|
||||
"""Will be called when the camera preferences are updated."""
|
||||
## This is an optional method so we need a default here.
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_webrtc_provider(
|
||||
|
||||
@@ -49,6 +49,7 @@ from .const import ( # noqa: F401
|
||||
ATTR_SWING_HORIZONTAL_MODES,
|
||||
ATTR_SWING_MODE,
|
||||
ATTR_SWING_MODES,
|
||||
ATTR_TARGET_HUMIDITY_STEP,
|
||||
ATTR_TARGET_TEMP_HIGH,
|
||||
ATTR_TARGET_TEMP_LOW,
|
||||
ATTR_TARGET_TEMP_STEP,
|
||||
@@ -234,6 +235,7 @@ CACHED_PROPERTIES_WITH_ATTR_ = {
|
||||
"max_temp",
|
||||
"min_humidity",
|
||||
"max_humidity",
|
||||
"target_humidity_step",
|
||||
}
|
||||
|
||||
|
||||
@@ -249,6 +251,7 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
ATTR_MAX_TEMP,
|
||||
ATTR_MIN_HUMIDITY,
|
||||
ATTR_MAX_HUMIDITY,
|
||||
ATTR_TARGET_HUMIDITY_STEP,
|
||||
ATTR_TARGET_TEMP_STEP,
|
||||
ATTR_PRESET_MODES,
|
||||
}
|
||||
@@ -275,6 +278,7 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
_attr_swing_horizontal_mode: str | None
|
||||
_attr_swing_horizontal_modes: list[str] | None
|
||||
_attr_target_humidity: float | None = None
|
||||
_attr_target_humidity_step: int | None = None
|
||||
_attr_target_temperature_high: float | None
|
||||
_attr_target_temperature_low: float | None
|
||||
_attr_target_temperature_step: float | None = None
|
||||
@@ -323,6 +327,9 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
data[ATTR_MIN_HUMIDITY] = self.min_humidity
|
||||
data[ATTR_MAX_HUMIDITY] = self.max_humidity
|
||||
|
||||
if self.target_humidity_step is not None:
|
||||
data[ATTR_TARGET_HUMIDITY_STEP] = self.target_humidity_step
|
||||
|
||||
if ClimateEntityFeature.FAN_MODE in supported_features:
|
||||
data[ATTR_FAN_MODES] = self.fan_modes
|
||||
|
||||
@@ -728,6 +735,11 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Return the maximum humidity."""
|
||||
return self._attr_max_humidity
|
||||
|
||||
@cached_property
|
||||
def target_humidity_step(self) -> int | None:
|
||||
"""Return the supported step of humidity."""
|
||||
return self._attr_target_humidity_step
|
||||
|
||||
|
||||
async def async_service_humidity_set(
|
||||
entity: ClimateEntity, service_call: ServiceCall
|
||||
|
||||
@@ -114,6 +114,7 @@ ATTR_SWING_MODES = "swing_modes"
|
||||
ATTR_SWING_MODE = "swing_mode"
|
||||
ATTR_SWING_HORIZONTAL_MODE = "swing_horizontal_mode"
|
||||
ATTR_SWING_HORIZONTAL_MODES = "swing_horizontal_modes"
|
||||
ATTR_TARGET_HUMIDITY_STEP = "target_humidity_step"
|
||||
ATTR_TARGET_TEMP_HIGH = "target_temp_high"
|
||||
ATTR_TARGET_TEMP_LOW = "target_temp_low"
|
||||
ATTR_TARGET_TEMP_STEP = "target_temp_step"
|
||||
|
||||
@@ -12,14 +12,25 @@ from hass_nabucasa import Cloud, NabuCasaBaseError
|
||||
from hass_nabucasa.llm import (
|
||||
LLMAuthenticationError,
|
||||
LLMRateLimitError,
|
||||
LLMResponseCompletedEvent,
|
||||
LLMResponseError,
|
||||
LLMResponseErrorEvent,
|
||||
LLMResponseFailedEvent,
|
||||
LLMResponseFunctionCallArgumentsDeltaEvent,
|
||||
LLMResponseFunctionCallArgumentsDoneEvent,
|
||||
LLMResponseFunctionCallOutputItem,
|
||||
LLMResponseImageOutputItem,
|
||||
LLMResponseIncompleteEvent,
|
||||
LLMResponseMessageOutputItem,
|
||||
LLMResponseOutputItemAddedEvent,
|
||||
LLMResponseOutputItemDoneEvent,
|
||||
LLMResponseOutputTextDeltaEvent,
|
||||
LLMResponseReasoningOutputItem,
|
||||
LLMResponseReasoningSummaryTextDeltaEvent,
|
||||
LLMResponseWebSearchCallOutputItem,
|
||||
LLMResponseWebSearchCallSearchingEvent,
|
||||
LLMServiceError,
|
||||
)
|
||||
from litellm import (
|
||||
ResponseFunctionToolCall,
|
||||
ResponseInputParam,
|
||||
ResponsesAPIStreamEvents,
|
||||
)
|
||||
from openai.types.responses import (
|
||||
FunctionToolParam,
|
||||
ResponseInputItemParam,
|
||||
@@ -60,9 +71,9 @@ class ResponseItemType(str, Enum):
|
||||
|
||||
def _convert_content_to_param(
|
||||
chat_content: Iterable[conversation.Content],
|
||||
) -> ResponseInputParam:
|
||||
) -> list[ResponseInputItemParam]:
|
||||
"""Convert any native chat message for this agent to the native format."""
|
||||
messages: ResponseInputParam = []
|
||||
messages: list[ResponseInputItemParam] = []
|
||||
reasoning_summary: list[str] = []
|
||||
web_search_calls: dict[str, dict[str, Any]] = {}
|
||||
|
||||
@@ -238,7 +249,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
"""Transform stream result into HA format."""
|
||||
last_summary_index = None
|
||||
last_role: Literal["assistant", "tool_result"] | None = None
|
||||
current_tool_call: ResponseFunctionToolCall | None = None
|
||||
current_tool_call: LLMResponseFunctionCallOutputItem | None = None
|
||||
|
||||
# Non-reasoning models don't follow our request to remove citations, so we remove
|
||||
# them manually here. They always follow the same pattern: the citation is always
|
||||
@@ -248,19 +259,10 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
citation_regexp = re.compile(r"\(\[([^\]]+)\]\((https?:\/\/[^\)]+)\)")
|
||||
|
||||
async for event in stream:
|
||||
event_type = getattr(event, "type", None)
|
||||
event_item = getattr(event, "item", None)
|
||||
event_item_type = getattr(event_item, "type", None) if event_item else None
|
||||
_LOGGER.debug("Event[%s]", getattr(event, "type", None))
|
||||
|
||||
_LOGGER.debug(
|
||||
"Event[%s] | item: %s",
|
||||
event_type,
|
||||
event_item_type,
|
||||
)
|
||||
|
||||
if event_type == ResponsesAPIStreamEvents.OUTPUT_ITEM_ADDED:
|
||||
# Detect function_call even when it's a BaseLiteLLMOpenAIResponseObject
|
||||
if event_item_type == ResponseItemType.FUNCTION_CALL:
|
||||
if isinstance(event, LLMResponseOutputItemAddedEvent):
|
||||
if isinstance(event.item, LLMResponseFunctionCallOutputItem):
|
||||
# OpenAI has tool calls as individual events
|
||||
# while HA puts tool calls inside the assistant message.
|
||||
# We turn them into individual assistant content for HA
|
||||
@@ -268,11 +270,11 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
yield {"role": "assistant"}
|
||||
last_role = "assistant"
|
||||
last_summary_index = None
|
||||
current_tool_call = cast(ResponseFunctionToolCall, event.item)
|
||||
current_tool_call = event.item
|
||||
elif (
|
||||
event_item_type == ResponseItemType.MESSAGE
|
||||
isinstance(event.item, LLMResponseMessageOutputItem)
|
||||
or (
|
||||
event_item_type == ResponseItemType.REASONING
|
||||
isinstance(event.item, LLMResponseReasoningOutputItem)
|
||||
and last_summary_index is not None
|
||||
) # Subsequent ResponseReasoningItem
|
||||
or last_role != "assistant"
|
||||
@@ -281,14 +283,14 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
last_role = "assistant"
|
||||
last_summary_index = None
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.OUTPUT_ITEM_DONE:
|
||||
if event_item_type == ResponseItemType.REASONING:
|
||||
encrypted_content = getattr(event.item, "encrypted_content", None)
|
||||
summary = getattr(event.item, "summary", []) or []
|
||||
elif isinstance(event, LLMResponseOutputItemDoneEvent):
|
||||
if isinstance(event.item, LLMResponseReasoningOutputItem):
|
||||
encrypted_content = event.item.encrypted_content
|
||||
summary = event.item.summary
|
||||
|
||||
yield {
|
||||
"native": ResponseReasoningItem(
|
||||
type="reasoning",
|
||||
"native": LLMResponseReasoningOutputItem(
|
||||
type=event.item.type,
|
||||
id=event.item.id,
|
||||
summary=[],
|
||||
encrypted_content=encrypted_content,
|
||||
@@ -296,14 +298,8 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
}
|
||||
|
||||
last_summary_index = len(summary) - 1 if summary else None
|
||||
elif event_item_type == ResponseItemType.WEB_SEARCH_CALL:
|
||||
action = getattr(event.item, "action", None)
|
||||
if isinstance(action, dict):
|
||||
action_dict = action
|
||||
elif action is not None:
|
||||
action_dict = action.to_dict()
|
||||
else:
|
||||
action_dict = {}
|
||||
elif isinstance(event.item, LLMResponseWebSearchCallOutputItem):
|
||||
action_dict = event.item.action
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
@@ -321,11 +317,11 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
"tool_result": {"status": event.item.status},
|
||||
}
|
||||
last_role = "tool_result"
|
||||
elif event_item_type == ResponseItemType.IMAGE:
|
||||
yield {"native": event.item}
|
||||
elif isinstance(event.item, LLMResponseImageOutputItem):
|
||||
yield {"native": event.item.raw}
|
||||
last_summary_index = -1 # Trigger new assistant message on next turn
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.OUTPUT_TEXT_DELTA:
|
||||
elif isinstance(event, LLMResponseOutputTextDeltaEvent):
|
||||
data = event.delta
|
||||
if remove_parentheses:
|
||||
data = data.removeprefix(")")
|
||||
@@ -344,7 +340,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
if data:
|
||||
yield {"content": data}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.REASONING_SUMMARY_TEXT_DELTA:
|
||||
elif isinstance(event, LLMResponseReasoningSummaryTextDeltaEvent):
|
||||
# OpenAI can output several reasoning summaries
|
||||
# in a single ResponseReasoningItem. We split them as separate
|
||||
# AssistantContent messages. Only last of them will have
|
||||
@@ -358,14 +354,14 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
last_summary_index = event.summary_index
|
||||
yield {"thinking_content": event.delta}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.FUNCTION_CALL_ARGUMENTS_DELTA:
|
||||
elif isinstance(event, LLMResponseFunctionCallArgumentsDeltaEvent):
|
||||
if current_tool_call is not None:
|
||||
current_tool_call.arguments += event.delta
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.WEB_SEARCH_CALL_SEARCHING:
|
||||
elif isinstance(event, LLMResponseWebSearchCallSearchingEvent):
|
||||
yield {"role": "assistant"}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.FUNCTION_CALL_ARGUMENTS_DONE:
|
||||
elif isinstance(event, LLMResponseFunctionCallArgumentsDoneEvent):
|
||||
if current_tool_call is not None:
|
||||
current_tool_call.status = "completed"
|
||||
|
||||
@@ -385,35 +381,36 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
]
|
||||
}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_COMPLETED:
|
||||
if event.response.usage is not None:
|
||||
elif isinstance(event, LLMResponseCompletedEvent):
|
||||
response = event.response
|
||||
if response and "usage" in response:
|
||||
usage = response["usage"]
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
"input_tokens": usage.get("input_tokens"),
|
||||
"output_tokens": usage.get("output_tokens"),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_INCOMPLETE:
|
||||
if event.response.usage is not None:
|
||||
elif isinstance(event, LLMResponseIncompleteEvent):
|
||||
response = event.response
|
||||
if response and "usage" in response:
|
||||
usage = response["usage"]
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
"input_tokens": usage.get("input_tokens"),
|
||||
"output_tokens": usage.get("output_tokens"),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
if (
|
||||
event.response.incomplete_details
|
||||
and event.response.incomplete_details.reason
|
||||
):
|
||||
reason: str = event.response.incomplete_details.reason
|
||||
else:
|
||||
reason = "unknown reason"
|
||||
incomplete_details = response.get("incomplete_details")
|
||||
reason = "unknown reason"
|
||||
if incomplete_details is not None and incomplete_details.get("reason"):
|
||||
reason = incomplete_details["reason"]
|
||||
|
||||
if reason == "max_output_tokens":
|
||||
reason = "max output tokens reached"
|
||||
@@ -422,22 +419,24 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
|
||||
raise HomeAssistantError(f"OpenAI response incomplete: {reason}")
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_FAILED:
|
||||
if event.response.usage is not None:
|
||||
elif isinstance(event, LLMResponseFailedEvent):
|
||||
response = event.response
|
||||
if response and "usage" in response:
|
||||
usage = response["usage"]
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
"input_tokens": usage.get("input_tokens"),
|
||||
"output_tokens": usage.get("output_tokens"),
|
||||
}
|
||||
}
|
||||
)
|
||||
reason = "unknown reason"
|
||||
if event.response.error is not None:
|
||||
reason = event.response.error.message
|
||||
if isinstance(error := response.get("error"), dict):
|
||||
reason = error.get("message") or reason
|
||||
raise HomeAssistantError(f"OpenAI response failed: {reason}")
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.ERROR:
|
||||
elif isinstance(event, LLMResponseErrorEvent):
|
||||
raise HomeAssistantError(f"OpenAI response error: {event.message}")
|
||||
|
||||
|
||||
@@ -452,7 +451,7 @@ class BaseCloudLLMEntity(Entity):
|
||||
async def _prepare_chat_for_generation(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
messages: ResponseInputParam,
|
||||
messages: list[ResponseInputItemParam],
|
||||
response_format: dict[str, Any] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Prepare kwargs for Cloud LLM from the chat log."""
|
||||
@@ -460,8 +459,17 @@ class BaseCloudLLMEntity(Entity):
|
||||
last_content: Any = chat_log.content[-1]
|
||||
if last_content.role == "user" and last_content.attachments:
|
||||
files = await self._async_prepare_files_for_prompt(last_content.attachments)
|
||||
current_content = last_content.content
|
||||
last_content = [*(current_content or []), *files]
|
||||
|
||||
last_message = cast(dict[str, Any], messages[-1])
|
||||
assert (
|
||||
last_message["type"] == "message"
|
||||
and last_message["role"] == "user"
|
||||
and isinstance(last_message["content"], str)
|
||||
)
|
||||
last_message["content"] = [
|
||||
{"type": "input_text", "text": last_message["content"]},
|
||||
*files,
|
||||
]
|
||||
|
||||
tools: list[ToolParam] = []
|
||||
tool_choice: str | None = None
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.11.0"],
|
||||
"requirements": ["hass-nabucasa==1.12.0", "openai==2.15.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
87
homeassistant/components/cloudflare_r2/__init__.py
Normal file
87
homeassistant/components/cloudflare_r2/__init__.py
Normal file
@@ -0,0 +1,87 @@
|
||||
"""The Cloudflare R2 integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import cast
|
||||
|
||||
from aiobotocore.client import AioBaseClient as S3Client
|
||||
from aiobotocore.session import AioSession
|
||||
from botocore.exceptions import (
|
||||
ClientError,
|
||||
ConnectionError,
|
||||
EndpointConnectionError,
|
||||
ParamValidationError,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
|
||||
from .const import (
|
||||
CONF_ACCESS_KEY_ID,
|
||||
CONF_BUCKET,
|
||||
CONF_ENDPOINT_URL,
|
||||
CONF_SECRET_ACCESS_KEY,
|
||||
DATA_BACKUP_AGENT_LISTENERS,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
type R2ConfigEntry = ConfigEntry[S3Client]
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: R2ConfigEntry) -> bool:
|
||||
"""Set up Cloudflare R2 from a config entry."""
|
||||
|
||||
data = cast(dict, entry.data)
|
||||
try:
|
||||
session = AioSession()
|
||||
# pylint: disable-next=unnecessary-dunder-call
|
||||
client = await session.create_client(
|
||||
"s3",
|
||||
endpoint_url=data.get(CONF_ENDPOINT_URL),
|
||||
aws_secret_access_key=data[CONF_SECRET_ACCESS_KEY],
|
||||
aws_access_key_id=data[CONF_ACCESS_KEY_ID],
|
||||
).__aenter__()
|
||||
await client.head_bucket(Bucket=data[CONF_BUCKET])
|
||||
except ClientError as err:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_credentials",
|
||||
) from err
|
||||
except ParamValidationError as err:
|
||||
if "Invalid bucket name" in str(err):
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_bucket_name",
|
||||
) from err
|
||||
except ValueError as err:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_endpoint_url",
|
||||
) from err
|
||||
except (ConnectionError, EndpointConnectionError) as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
) from err
|
||||
|
||||
entry.runtime_data = client
|
||||
|
||||
def notify_backup_listeners() -> None:
|
||||
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||
listener()
|
||||
|
||||
entry.async_on_unload(entry.async_on_state_change(notify_backup_listeners))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: R2ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
client = entry.runtime_data
|
||||
await client.__aexit__(None, None, None)
|
||||
return True
|
||||
346
homeassistant/components/cloudflare_r2/backup.py
Normal file
346
homeassistant/components/cloudflare_r2/backup.py
Normal file
@@ -0,0 +1,346 @@
|
||||
"""Backup platform for the Cloudflare R2 integration."""
|
||||
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
import functools
|
||||
import json
|
||||
import logging
|
||||
from time import time
|
||||
from typing import Any
|
||||
|
||||
from botocore.exceptions import BotoCoreError
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
BackupNotFound,
|
||||
suggested_filename,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from . import R2ConfigEntry
|
||||
from .const import CONF_BUCKET, CONF_PREFIX, DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
CACHE_TTL = 300
|
||||
|
||||
# S3 part size requirements: 5 MiB to 5 GiB per part
|
||||
# We set the threshold to 20 MiB to avoid too many parts.
|
||||
# Note that each part is allocated in the memory.
|
||||
MULTIPART_MIN_PART_SIZE_BYTES = 20 * 2**20
|
||||
|
||||
|
||||
def handle_boto_errors[T](
|
||||
func: Callable[..., Coroutine[Any, Any, T]],
|
||||
) -> Callable[..., Coroutine[Any, Any, T]]:
|
||||
"""Handle BotoCoreError exceptions by converting them to BackupAgentError."""
|
||||
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args: Any, **kwargs: Any) -> T:
|
||||
"""Catch BotoCoreError and raise BackupAgentError."""
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
except BotoCoreError as err:
|
||||
error_msg = f"Failed during {func.__name__}"
|
||||
raise BackupAgentError(error_msg) from err
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
hass: HomeAssistant,
|
||||
) -> list[BackupAgent]:
|
||||
"""Return a list of backup agents."""
|
||||
entries: list[R2ConfigEntry] = hass.config_entries.async_loaded_entries(DOMAIN)
|
||||
return [R2BackupAgent(hass, entry) for entry in entries]
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_backup_agents_listener(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
listener: Callable[[], None],
|
||||
**kwargs: Any,
|
||||
) -> Callable[[], None]:
|
||||
"""Register a listener to be called when agents are added or removed.
|
||||
|
||||
:return: A function to unregister the listener.
|
||||
"""
|
||||
hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener)
|
||||
|
||||
@callback
|
||||
def remove_listener() -> None:
|
||||
"""Remove the listener."""
|
||||
hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener)
|
||||
if not hass.data[DATA_BACKUP_AGENT_LISTENERS]:
|
||||
del hass.data[DATA_BACKUP_AGENT_LISTENERS]
|
||||
|
||||
return remove_listener
|
||||
|
||||
|
||||
def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
|
||||
"""Return the suggested filenames for the backup and metadata files."""
|
||||
base_name = suggested_filename(backup).rsplit(".", 1)[0]
|
||||
return f"{base_name}.tar", f"{base_name}.metadata.json"
|
||||
|
||||
|
||||
class R2BackupAgent(BackupAgent):
|
||||
"""Backup agent for the Cloudflare R2 integration."""
|
||||
|
||||
domain = DOMAIN
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: R2ConfigEntry) -> None:
|
||||
"""Initialize the R2 agent."""
|
||||
super().__init__()
|
||||
self._client = entry.runtime_data
|
||||
self._bucket: str = entry.data[CONF_BUCKET]
|
||||
self.name = entry.title
|
||||
self.unique_id = entry.entry_id
|
||||
self._backup_cache: dict[str, AgentBackup] = {}
|
||||
self._cache_expiration = time()
|
||||
self._prefix: str = entry.data.get(CONF_PREFIX, "").strip("/")
|
||||
|
||||
def _with_prefix(self, key: str) -> str:
|
||||
if not self._prefix:
|
||||
return key
|
||||
return f"{self._prefix}/{key}"
|
||||
|
||||
@handle_boto_errors
|
||||
async def async_download_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file.
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
:return: An async iterator that yields bytes.
|
||||
"""
|
||||
backup = await self._find_backup_by_id(backup_id)
|
||||
tar_filename, _ = suggested_filenames(backup)
|
||||
|
||||
response = await self._client.get_object(
|
||||
Bucket=self._bucket, Key=self._with_prefix(tar_filename)
|
||||
)
|
||||
return response["Body"].iter_chunks()
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
backup: AgentBackup,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup.
|
||||
|
||||
:param open_stream: A function returning an async iterator that yields bytes.
|
||||
:param backup: Metadata about the backup that should be uploaded.
|
||||
"""
|
||||
tar_filename, metadata_filename = suggested_filenames(backup)
|
||||
|
||||
try:
|
||||
if backup.size < MULTIPART_MIN_PART_SIZE_BYTES:
|
||||
await self._upload_simple(tar_filename, open_stream)
|
||||
else:
|
||||
await self._upload_multipart(tar_filename, open_stream)
|
||||
|
||||
# Upload the metadata file
|
||||
metadata_content = json.dumps(backup.as_dict())
|
||||
await self._client.put_object(
|
||||
Bucket=self._bucket,
|
||||
Key=self._with_prefix(metadata_filename),
|
||||
Body=metadata_content,
|
||||
)
|
||||
except BotoCoreError as err:
|
||||
raise BackupAgentError("Failed to upload backup") from err
|
||||
else:
|
||||
# Reset cache after successful upload
|
||||
self._cache_expiration = time()
|
||||
|
||||
async def _upload_simple(
|
||||
self,
|
||||
tar_filename: str,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
) -> None:
|
||||
"""Upload a small file using simple upload.
|
||||
|
||||
:param tar_filename: The target filename for the backup.
|
||||
:param open_stream: A function returning an async iterator that yields bytes.
|
||||
"""
|
||||
_LOGGER.debug("Starting simple upload for %s", tar_filename)
|
||||
stream = await open_stream()
|
||||
file_data = bytearray()
|
||||
async for chunk in stream:
|
||||
file_data.extend(chunk)
|
||||
|
||||
await self._client.put_object(
|
||||
Bucket=self._bucket,
|
||||
Key=self._with_prefix(tar_filename),
|
||||
Body=bytes(file_data),
|
||||
)
|
||||
|
||||
async def _upload_multipart(
|
||||
self,
|
||||
tar_filename: str,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
):
|
||||
"""Upload a large file using multipart upload.
|
||||
|
||||
:param tar_filename: The target filename for the backup.
|
||||
:param open_stream: A function returning an async iterator that yields bytes.
|
||||
"""
|
||||
_LOGGER.debug("Starting multipart upload for %s", tar_filename)
|
||||
multipart_upload = await self._client.create_multipart_upload(
|
||||
Bucket=self._bucket,
|
||||
Key=self._with_prefix(tar_filename),
|
||||
)
|
||||
upload_id = multipart_upload["UploadId"]
|
||||
try:
|
||||
parts = []
|
||||
part_number = 1
|
||||
buffer_size = 0 # bytes
|
||||
buffer: list[bytes] = []
|
||||
|
||||
stream = await open_stream()
|
||||
async for chunk in stream:
|
||||
buffer_size += len(chunk)
|
||||
buffer.append(chunk)
|
||||
|
||||
# If buffer size meets minimum part size, upload it as a part
|
||||
if buffer_size >= MULTIPART_MIN_PART_SIZE_BYTES:
|
||||
_LOGGER.debug(
|
||||
"Uploading part number %d, size %d", part_number, buffer_size
|
||||
)
|
||||
part = await self._client.upload_part(
|
||||
Bucket=self._bucket,
|
||||
Key=self._with_prefix(tar_filename),
|
||||
PartNumber=part_number,
|
||||
UploadId=upload_id,
|
||||
Body=b"".join(buffer),
|
||||
)
|
||||
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
|
||||
part_number += 1
|
||||
buffer_size = 0
|
||||
buffer = []
|
||||
|
||||
# Upload the final buffer as the last part (no minimum size requirement)
|
||||
if buffer:
|
||||
_LOGGER.debug(
|
||||
"Uploading final part number %d, size %d", part_number, buffer_size
|
||||
)
|
||||
part = await self._client.upload_part(
|
||||
Bucket=self._bucket,
|
||||
Key=self._with_prefix(tar_filename),
|
||||
PartNumber=part_number,
|
||||
UploadId=upload_id,
|
||||
Body=b"".join(buffer),
|
||||
)
|
||||
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
|
||||
|
||||
await self._client.complete_multipart_upload(
|
||||
Bucket=self._bucket,
|
||||
Key=self._with_prefix(tar_filename),
|
||||
UploadId=upload_id,
|
||||
MultipartUpload={"Parts": parts},
|
||||
)
|
||||
|
||||
except BotoCoreError:
|
||||
try:
|
||||
await self._client.abort_multipart_upload(
|
||||
Bucket=self._bucket,
|
||||
Key=self._with_prefix(tar_filename),
|
||||
UploadId=upload_id,
|
||||
)
|
||||
except BotoCoreError:
|
||||
_LOGGER.exception("Failed to abort multipart upload")
|
||||
raise
|
||||
|
||||
@handle_boto_errors
|
||||
async def async_delete_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Delete a backup file.
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
"""
|
||||
backup = await self._find_backup_by_id(backup_id)
|
||||
tar_filename, metadata_filename = suggested_filenames(backup)
|
||||
|
||||
# Delete both the backup file and its metadata file
|
||||
await self._client.delete_object(
|
||||
Bucket=self._bucket, Key=self._with_prefix(tar_filename)
|
||||
)
|
||||
await self._client.delete_object(
|
||||
Bucket=self._bucket, Key=self._with_prefix(metadata_filename)
|
||||
)
|
||||
|
||||
# Reset cache after successful deletion
|
||||
self._cache_expiration = time()
|
||||
|
||||
@handle_boto_errors
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
backups = await self._list_backups()
|
||||
return list(backups.values())
|
||||
|
||||
@handle_boto_errors
|
||||
async def async_get_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup:
|
||||
"""Return a backup."""
|
||||
return await self._find_backup_by_id(backup_id)
|
||||
|
||||
async def _find_backup_by_id(self, backup_id: str) -> AgentBackup:
|
||||
"""Find a backup by its backup ID."""
|
||||
backups = await self._list_backups()
|
||||
if backup := backups.get(backup_id):
|
||||
return backup
|
||||
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
async def _list_backups(self) -> dict[str, AgentBackup]:
|
||||
"""List backups, using a cache if possible."""
|
||||
if time() <= self._cache_expiration:
|
||||
return self._backup_cache
|
||||
|
||||
backups = {}
|
||||
# Only pass Prefix if a prefix is configured; some S3-compatible APIs
|
||||
# (and type checkers) do not like Prefix=None.
|
||||
list_kwargs = {"Bucket": self._bucket}
|
||||
if self._prefix:
|
||||
list_kwargs["Prefix"] = self._prefix + "/"
|
||||
response = await self._client.list_objects_v2(**list_kwargs)
|
||||
|
||||
# Filter for metadata files only
|
||||
metadata_files = [
|
||||
obj
|
||||
for obj in response.get("Contents", [])
|
||||
if obj["Key"].endswith(".metadata.json")
|
||||
]
|
||||
|
||||
for metadata_file in metadata_files:
|
||||
try:
|
||||
# Download and parse metadata file
|
||||
metadata_response = await self._client.get_object(
|
||||
Bucket=self._bucket, Key=metadata_file["Key"]
|
||||
)
|
||||
metadata_content = await metadata_response["Body"].read()
|
||||
metadata_json = json.loads(metadata_content)
|
||||
except (BotoCoreError, json.JSONDecodeError) as err:
|
||||
_LOGGER.warning(
|
||||
"Failed to process metadata file %s: %s",
|
||||
metadata_file["Key"],
|
||||
err,
|
||||
)
|
||||
continue
|
||||
backup = AgentBackup.from_dict(metadata_json)
|
||||
backups[backup.backup_id] = backup
|
||||
|
||||
self._backup_cache = backups
|
||||
self._cache_expiration = time() + CACHE_TTL
|
||||
|
||||
return self._backup_cache
|
||||
113
homeassistant/components/cloudflare_r2/config_flow.py
Normal file
113
homeassistant/components/cloudflare_r2/config_flow.py
Normal file
@@ -0,0 +1,113 @@
|
||||
"""Config flow for the Cloudflare R2 integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from aiobotocore.session import AioSession
|
||||
from botocore.exceptions import (
|
||||
ClientError,
|
||||
ConnectionError,
|
||||
EndpointConnectionError,
|
||||
ParamValidationError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from .const import (
|
||||
CLOUDFLARE_R2_DOMAIN,
|
||||
CONF_ACCESS_KEY_ID,
|
||||
CONF_BUCKET,
|
||||
CONF_ENDPOINT_URL,
|
||||
CONF_PREFIX,
|
||||
CONF_SECRET_ACCESS_KEY,
|
||||
DEFAULT_ENDPOINT_URL,
|
||||
DESCRIPTION_R2_AUTH_DOCS_URL,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ACCESS_KEY_ID): cv.string,
|
||||
vol.Required(CONF_SECRET_ACCESS_KEY): TextSelector(
|
||||
config=TextSelectorConfig(type=TextSelectorType.PASSWORD)
|
||||
),
|
||||
vol.Required(CONF_BUCKET): cv.string,
|
||||
vol.Required(CONF_ENDPOINT_URL, default=DEFAULT_ENDPOINT_URL): TextSelector(
|
||||
config=TextSelectorConfig(type=TextSelectorType.URL)
|
||||
),
|
||||
vol.Optional(CONF_PREFIX, default=""): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class R2ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Cloudflare R2."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initiated by the user."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match(
|
||||
{
|
||||
CONF_BUCKET: user_input[CONF_BUCKET],
|
||||
CONF_ENDPOINT_URL: user_input[CONF_ENDPOINT_URL],
|
||||
}
|
||||
)
|
||||
|
||||
parsed = urlparse(user_input[CONF_ENDPOINT_URL])
|
||||
if not parsed.hostname or not parsed.hostname.endswith(
|
||||
CLOUDFLARE_R2_DOMAIN
|
||||
):
|
||||
errors[CONF_ENDPOINT_URL] = "invalid_endpoint_url"
|
||||
else:
|
||||
try:
|
||||
session = AioSession()
|
||||
async with session.create_client(
|
||||
"s3",
|
||||
endpoint_url=user_input.get(CONF_ENDPOINT_URL),
|
||||
aws_secret_access_key=user_input[CONF_SECRET_ACCESS_KEY],
|
||||
aws_access_key_id=user_input[CONF_ACCESS_KEY_ID],
|
||||
) as client:
|
||||
await client.head_bucket(Bucket=user_input[CONF_BUCKET])
|
||||
except ClientError:
|
||||
errors["base"] = "invalid_credentials"
|
||||
except ParamValidationError as err:
|
||||
if "Invalid bucket name" in str(err):
|
||||
errors[CONF_BUCKET] = "invalid_bucket_name"
|
||||
except ValueError:
|
||||
errors[CONF_ENDPOINT_URL] = "invalid_endpoint_url"
|
||||
except EndpointConnectionError:
|
||||
errors[CONF_ENDPOINT_URL] = "cannot_connect"
|
||||
except ConnectionError:
|
||||
errors[CONF_ENDPOINT_URL] = "cannot_connect"
|
||||
else:
|
||||
# Do not persist empty optional values
|
||||
data = dict(user_input)
|
||||
if not data.get(CONF_PREFIX):
|
||||
data.pop(CONF_PREFIX, None)
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_BUCKET], data=data
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_USER_DATA_SCHEMA, user_input
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"auth_docs_url": DESCRIPTION_R2_AUTH_DOCS_URL,
|
||||
},
|
||||
)
|
||||
26
homeassistant/components/cloudflare_r2/const.py
Normal file
26
homeassistant/components/cloudflare_r2/const.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""Constants for the Cloudflare R2 integration."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
DOMAIN: Final = "cloudflare_r2"
|
||||
|
||||
CONF_ACCESS_KEY_ID = "access_key_id"
|
||||
CONF_SECRET_ACCESS_KEY = "secret_access_key"
|
||||
CONF_ENDPOINT_URL = "endpoint_url"
|
||||
CONF_BUCKET = "bucket"
|
||||
CONF_PREFIX = "prefix"
|
||||
|
||||
# R2 is S3-compatible. Endpoint should be like:
|
||||
# https://<accountid>.r2.cloudflarestorage.com
|
||||
CLOUDFLARE_R2_DOMAIN: Final = "r2.cloudflarestorage.com"
|
||||
DEFAULT_ENDPOINT_URL: Final = "https://ACCOUNT_ID." + CLOUDFLARE_R2_DOMAIN + "/"
|
||||
|
||||
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
|
||||
f"{DOMAIN}.backup_agent_listeners"
|
||||
)
|
||||
|
||||
|
||||
DESCRIPTION_R2_AUTH_DOCS_URL: Final = "https://developers.cloudflare.com/r2/api/tokens/"
|
||||
12
homeassistant/components/cloudflare_r2/manifest.json
Normal file
12
homeassistant/components/cloudflare_r2/manifest.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"domain": "cloudflare_r2",
|
||||
"name": "Cloudflare R2",
|
||||
"codeowners": ["@corrreia"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/cloudflare_r2",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiobotocore"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aiobotocore==2.21.1"]
|
||||
}
|
||||
112
homeassistant/components/cloudflare_r2/quality_scale.yaml
Normal file
112
homeassistant/components/cloudflare_r2/quality_scale.yaml
Normal file
@@ -0,0 +1,112 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: This integration does not poll.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: This integration does not have any custom actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: Entities of this integration do not explicitly subscribe to events.
|
||||
entity-unique-id:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
has-entity-name:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: This integration does not have an options flow.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: This integration does not poll.
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Cloudflare R2 is a cloud service that is not discovered on the network.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: Cloudflare R2 is a cloud service that is not discovered on the network.
|
||||
docs-data-update:
|
||||
status: exempt
|
||||
comment: This integration does not poll.
|
||||
docs-examples:
|
||||
status: exempt
|
||||
comment: The integration extends core functionality and does not require examples.
|
||||
docs-known-limitations:
|
||||
status: exempt
|
||||
comment: No known limitations.
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: This integration does not support physical devices.
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting:
|
||||
status: exempt
|
||||
comment: There are no more detailed troubleshooting instructions available than what is already included in strings.json.
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: This integration does not have devices.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
entity-translations:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: This integration does not use icons.
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: There are no issues which can be repaired.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: This integration does not have devices.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: todo
|
||||
strict-typing: todo
|
||||
46
homeassistant/components/cloudflare_r2/strings.json
Normal file
46
homeassistant/components/cloudflare_r2/strings.json
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:component::cloudflare_r2::exceptions::cannot_connect::message%]",
|
||||
"invalid_bucket_name": "[%key:component::cloudflare_r2::exceptions::invalid_bucket_name::message%]",
|
||||
"invalid_credentials": "[%key:component::cloudflare_r2::exceptions::invalid_credentials::message%]",
|
||||
"invalid_endpoint_url": "[%key:component::cloudflare_r2::exceptions::invalid_endpoint_url::message%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"access_key_id": "Access key ID",
|
||||
"bucket": "Bucket name",
|
||||
"endpoint_url": "Endpoint URL",
|
||||
"prefix": "Folder prefix (optional)",
|
||||
"secret_access_key": "Secret access key"
|
||||
},
|
||||
"data_description": {
|
||||
"access_key_id": "Access key ID to connect to Cloudflare R2 (this is your Account ID)",
|
||||
"bucket": "Bucket must already exist and be writable by the provided credentials.",
|
||||
"endpoint_url": "Cloudflare R2 S3-compatible endpoint.",
|
||||
"prefix": "Optional folder path inside the bucket. Example: backups/homeassistant",
|
||||
"secret_access_key": "Secret access key to connect to Cloudflare R2. See [Docs]({auth_docs_url})"
|
||||
},
|
||||
"title": "Add Cloudflare R2 bucket"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"cannot_connect": {
|
||||
"message": "Cannot connect to endpoint"
|
||||
},
|
||||
"invalid_bucket_name": {
|
||||
"message": "Invalid bucket name"
|
||||
},
|
||||
"invalid_credentials": {
|
||||
"message": "Bucket cannot be accessed using provided access key ID and secret."
|
||||
},
|
||||
"invalid_endpoint_url": {
|
||||
"message": "Invalid endpoint URL. Please enter a valid Cloudflare R2 endpoint URL."
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -21,7 +21,7 @@ async def fetch_latest_carbon_intensity(
|
||||
em: ElectricityMaps,
|
||||
config: Mapping[str, Any],
|
||||
) -> HomeAssistantCarbonIntensityResponse:
|
||||
"""Fetch the latest carbon intensity based on country code or location coordinates."""
|
||||
"""Fetch the latest carbon intensity based on zone key or location coordinates."""
|
||||
request: CoordinatesRequest | ZoneRequest = CoordinatesRequest(
|
||||
lat=config.get(CONF_LATITUDE, hass.config.latitude),
|
||||
lon=config.get(CONF_LONGITUDE, hass.config.longitude),
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
},
|
||||
"error": {
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"no_data": "No data is available for the location you have selected.",
|
||||
"no_data": "No data is available for the location or zone you have selected.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
@@ -17,20 +17,20 @@
|
||||
},
|
||||
"country": {
|
||||
"data": {
|
||||
"country_code": "Country code"
|
||||
"country_code": "Zone key"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::access_token%]"
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
}
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::access_token%]",
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"location": "[%key:common::config_flow::data::location%]"
|
||||
},
|
||||
"description": "Visit the [Electricity Maps page]({register_link}) to request a token."
|
||||
"description": "Visit the [Electricity Maps app]({register_link}) to request an API key."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -40,7 +40,7 @@
|
||||
"name": "CO2 intensity",
|
||||
"state_attributes": {
|
||||
"country_code": {
|
||||
"name": "Country code"
|
||||
"name": "Zone key"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -58,7 +58,7 @@
|
||||
"location": {
|
||||
"options": {
|
||||
"specify_coordinates": "Specify coordinates",
|
||||
"specify_country_code": "Specify country code",
|
||||
"specify_country_code": "Specify zone key",
|
||||
"use_home_location": "Use home location"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.CLIMATE,
|
||||
Platform.SELECT,
|
||||
]
|
||||
|
||||
|
||||
|
||||
105
homeassistant/components/compit/icons.json
Normal file
105
homeassistant/components/compit/icons.json
Normal file
@@ -0,0 +1,105 @@
|
||||
{
|
||||
"entity": {
|
||||
"select": {
|
||||
"aero_by_pass": {
|
||||
"default": "mdi:valve",
|
||||
"state": {
|
||||
"off": "mdi:valve-closed",
|
||||
"on": "mdi:valve-open"
|
||||
}
|
||||
},
|
||||
"buffer_mode": {
|
||||
"default": "mdi:database",
|
||||
"state": {
|
||||
"disabled": "mdi:water-boiler-off",
|
||||
"schedule": "mdi:calendar-clock"
|
||||
}
|
||||
},
|
||||
"dhw_circulation": {
|
||||
"default": "mdi:pump",
|
||||
"state": {
|
||||
"disabled": "mdi:pump-off",
|
||||
"schedule": "mdi:calendar-clock"
|
||||
}
|
||||
},
|
||||
"heating_source_of_correction": {
|
||||
"default": "mdi:tune-variant",
|
||||
"state": {
|
||||
"disabled": "mdi:cancel",
|
||||
"nano_nr_1": "mdi:thermostat-box",
|
||||
"nano_nr_2": "mdi:thermostat-box",
|
||||
"nano_nr_3": "mdi:thermostat-box",
|
||||
"nano_nr_4": "mdi:thermostat-box",
|
||||
"nano_nr_5": "mdi:thermostat-box",
|
||||
"no_corrections": "mdi:cancel",
|
||||
"schedule": "mdi:calendar-clock",
|
||||
"thermostat": "mdi:thermostat"
|
||||
}
|
||||
},
|
||||
"language": {
|
||||
"default": "mdi:translate"
|
||||
},
|
||||
"mixer_mode": {
|
||||
"default": "mdi:valve",
|
||||
"state": {
|
||||
"disabled": "mdi:cancel",
|
||||
"nano_nr_1": "mdi:thermostat-box",
|
||||
"nano_nr_2": "mdi:thermostat-box",
|
||||
"nano_nr_3": "mdi:thermostat-box",
|
||||
"nano_nr_4": "mdi:thermostat-box",
|
||||
"nano_nr_5": "mdi:thermostat-box",
|
||||
"schedule": "mdi:calendar-clock",
|
||||
"thermostat": "mdi:thermostat"
|
||||
}
|
||||
},
|
||||
"mixer_mode_zone": {
|
||||
"default": "mdi:valve",
|
||||
"state": {
|
||||
"disabled": "mdi:cancel",
|
||||
"nano_nr_1": "mdi:thermostat-box",
|
||||
"nano_nr_2": "mdi:thermostat-box",
|
||||
"nano_nr_3": "mdi:thermostat-box",
|
||||
"nano_nr_4": "mdi:thermostat-box",
|
||||
"nano_nr_5": "mdi:thermostat-box",
|
||||
"schedule": "mdi:calendar-clock",
|
||||
"thermostat": "mdi:thermostat"
|
||||
}
|
||||
},
|
||||
"nano_work_mode": {
|
||||
"default": "mdi:cog-outline",
|
||||
"state": {
|
||||
"christmas": "mdi:pine-tree",
|
||||
"manual_0": "mdi:home-floor-0",
|
||||
"manual_1": "mdi:home-floor-1",
|
||||
"manual_2": "mdi:home-floor-2",
|
||||
"manual_3": "mdi:home-floor-3",
|
||||
"out_of_home": "mdi:home-export-outline",
|
||||
"schedule": "mdi:calendar-clock"
|
||||
}
|
||||
},
|
||||
"operating_mode": {
|
||||
"default": "mdi:cog",
|
||||
"state": {
|
||||
"disabled": "mdi:cog-off",
|
||||
"eco": "mdi:leaf"
|
||||
}
|
||||
},
|
||||
"solarcomp_operating_mode": {
|
||||
"default": "mdi:heating-coil",
|
||||
"state": {
|
||||
"de_icing": "mdi:snowflake-melt",
|
||||
"disabled": "mdi:cancel",
|
||||
"holiday": "mdi:beach"
|
||||
}
|
||||
},
|
||||
"work_mode": {
|
||||
"default": "mdi:cog-outline",
|
||||
"state": {
|
||||
"cooling": "mdi:snowflake-thermometer",
|
||||
"summer": "mdi:weather-sunny",
|
||||
"winter": "mdi:snowflake"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["compit"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["compit-inext-api==0.6.0"]
|
||||
"requirements": ["compit-inext-api==0.8.0"]
|
||||
}
|
||||
|
||||
@@ -73,10 +73,7 @@ rules:
|
||||
This integration does not have any entities that should disabled by default.
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
There is no need for icon translations.
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
432
homeassistant/components/compit/select.py
Normal file
432
homeassistant/components/compit/select.py
Normal file
@@ -0,0 +1,432 @@
|
||||
"""Select platform for Compit integration."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from compit_inext_api.consts import CompitParameter
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER_NAME
|
||||
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class CompitDeviceDescription:
|
||||
"""Class to describe a Compit device."""
|
||||
|
||||
name: str
|
||||
"""Name of the device."""
|
||||
|
||||
parameters: dict[CompitParameter, SelectEntityDescription]
|
||||
"""Parameters of the device."""
|
||||
|
||||
|
||||
DESCRIPTIONS: dict[CompitParameter, SelectEntityDescription] = {
|
||||
CompitParameter.LANGUAGE: SelectEntityDescription(
|
||||
key=CompitParameter.LANGUAGE.value,
|
||||
translation_key="language",
|
||||
options=[
|
||||
"polish",
|
||||
"english",
|
||||
],
|
||||
),
|
||||
CompitParameter.AEROKONFBYPASS: SelectEntityDescription(
|
||||
key=CompitParameter.AEROKONFBYPASS.value,
|
||||
translation_key="aero_by_pass",
|
||||
options=[
|
||||
"off",
|
||||
"auto",
|
||||
"on",
|
||||
],
|
||||
),
|
||||
CompitParameter.NANO_MODE: SelectEntityDescription(
|
||||
key=CompitParameter.NANO_MODE.value,
|
||||
translation_key="nano_work_mode",
|
||||
options=[
|
||||
"manual_3",
|
||||
"manual_2",
|
||||
"manual_1",
|
||||
"manual_0",
|
||||
"schedule",
|
||||
"christmas",
|
||||
"out_of_home",
|
||||
],
|
||||
),
|
||||
CompitParameter.R900_OPERATING_MODE: SelectEntityDescription(
|
||||
key=CompitParameter.R900_OPERATING_MODE.value,
|
||||
translation_key="operating_mode",
|
||||
options=[
|
||||
"disabled",
|
||||
"eco",
|
||||
"hybrid",
|
||||
],
|
||||
),
|
||||
CompitParameter.SOLAR_COMP_OPERATING_MODE: SelectEntityDescription(
|
||||
key=CompitParameter.SOLAR_COMP_OPERATING_MODE.value,
|
||||
translation_key="solarcomp_operating_mode",
|
||||
options=[
|
||||
"auto",
|
||||
"de_icing",
|
||||
"holiday",
|
||||
"disabled",
|
||||
],
|
||||
),
|
||||
CompitParameter.R490_OPERATING_MODE: SelectEntityDescription(
|
||||
key=CompitParameter.R490_OPERATING_MODE.value,
|
||||
translation_key="operating_mode",
|
||||
options=[
|
||||
"disabled",
|
||||
"eco",
|
||||
"hybrid",
|
||||
],
|
||||
),
|
||||
CompitParameter.WORK_MODE: SelectEntityDescription(
|
||||
key=CompitParameter.WORK_MODE.value,
|
||||
translation_key="work_mode",
|
||||
options=[
|
||||
"winter",
|
||||
"summer",
|
||||
"cooling",
|
||||
],
|
||||
),
|
||||
CompitParameter.R470_OPERATING_MODE: SelectEntityDescription(
|
||||
key=CompitParameter.R470_OPERATING_MODE.value,
|
||||
translation_key="operating_mode",
|
||||
options=[
|
||||
"disabled",
|
||||
"auto",
|
||||
"eco",
|
||||
],
|
||||
),
|
||||
CompitParameter.HEATING_SOURCE_OF_CORRECTION: SelectEntityDescription(
|
||||
key=CompitParameter.HEATING_SOURCE_OF_CORRECTION.value,
|
||||
translation_key="heating_source_of_correction",
|
||||
options=[
|
||||
"no_corrections",
|
||||
"schedule",
|
||||
"thermostat",
|
||||
"nano_nr_1",
|
||||
"nano_nr_2",
|
||||
"nano_nr_3",
|
||||
"nano_nr_4",
|
||||
"nano_nr_5",
|
||||
],
|
||||
),
|
||||
CompitParameter.BIOMAX_MIXER_MODE_ZONE_1: SelectEntityDescription(
|
||||
key=CompitParameter.BIOMAX_MIXER_MODE_ZONE_1.value,
|
||||
translation_key="mixer_mode_zone",
|
||||
options=[
|
||||
"disabled",
|
||||
"without_thermostat",
|
||||
"schedule",
|
||||
"thermostat",
|
||||
"nano_nr_1",
|
||||
"nano_nr_2",
|
||||
"nano_nr_3",
|
||||
"nano_nr_4",
|
||||
"nano_nr_5",
|
||||
],
|
||||
translation_placeholders={"zone": "1"},
|
||||
),
|
||||
CompitParameter.BIOMAX_MIXER_MODE_ZONE_2: SelectEntityDescription(
|
||||
key=CompitParameter.BIOMAX_MIXER_MODE_ZONE_2.value,
|
||||
translation_key="mixer_mode_zone",
|
||||
options=[
|
||||
"disabled",
|
||||
"without_thermostat",
|
||||
"schedule",
|
||||
"thermostat",
|
||||
"nano_nr_1",
|
||||
"nano_nr_2",
|
||||
"nano_nr_3",
|
||||
"nano_nr_4",
|
||||
"nano_nr_5",
|
||||
],
|
||||
translation_placeholders={"zone": "2"},
|
||||
),
|
||||
CompitParameter.DHW_CIRCULATION_MODE: SelectEntityDescription(
|
||||
key=CompitParameter.DHW_CIRCULATION_MODE.value,
|
||||
translation_key="dhw_circulation",
|
||||
options=[
|
||||
"disabled",
|
||||
"constant",
|
||||
"schedule",
|
||||
],
|
||||
),
|
||||
CompitParameter.BIOMAX_HEATING_SOURCE_OF_CORRECTION: SelectEntityDescription(
|
||||
key=CompitParameter.BIOMAX_HEATING_SOURCE_OF_CORRECTION.value,
|
||||
translation_key="heating_source_of_correction",
|
||||
options=[
|
||||
"disabled",
|
||||
"no_corrections",
|
||||
"schedule",
|
||||
"thermostat",
|
||||
"nano_nr_1",
|
||||
"nano_nr_2",
|
||||
"nano_nr_3",
|
||||
"nano_nr_4",
|
||||
"nano_nr_5",
|
||||
],
|
||||
),
|
||||
CompitParameter.MIXER_MODE: SelectEntityDescription(
|
||||
key=CompitParameter.MIXER_MODE.value,
|
||||
translation_key="mixer_mode",
|
||||
options=[
|
||||
"no_corrections",
|
||||
"schedule",
|
||||
"thermostat",
|
||||
"nano_nr_1",
|
||||
"nano_nr_2",
|
||||
"nano_nr_3",
|
||||
"nano_nr_4",
|
||||
"nano_nr_5",
|
||||
],
|
||||
),
|
||||
CompitParameter.R480_OPERATING_MODE: SelectEntityDescription(
|
||||
key=CompitParameter.R480_OPERATING_MODE.value,
|
||||
translation_key="operating_mode",
|
||||
options=[
|
||||
"disabled",
|
||||
"eco",
|
||||
"hybrid",
|
||||
],
|
||||
),
|
||||
CompitParameter.BUFFER_MODE: SelectEntityDescription(
|
||||
key=CompitParameter.BUFFER_MODE.value,
|
||||
translation_key="buffer_mode",
|
||||
options=[
|
||||
"schedule",
|
||||
"manual",
|
||||
"disabled",
|
||||
],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
DEVICE_DEFINITIONS: dict[int, CompitDeviceDescription] = {
|
||||
223: CompitDeviceDescription(
|
||||
name="Nano Color 2",
|
||||
parameters={
|
||||
CompitParameter.LANGUAGE: DESCRIPTIONS[CompitParameter.LANGUAGE],
|
||||
CompitParameter.AEROKONFBYPASS: DESCRIPTIONS[
|
||||
CompitParameter.AEROKONFBYPASS
|
||||
],
|
||||
},
|
||||
),
|
||||
12: CompitDeviceDescription(
|
||||
name="Nano Color",
|
||||
parameters={
|
||||
CompitParameter.LANGUAGE: DESCRIPTIONS[CompitParameter.LANGUAGE],
|
||||
CompitParameter.AEROKONFBYPASS: DESCRIPTIONS[
|
||||
CompitParameter.AEROKONFBYPASS
|
||||
],
|
||||
},
|
||||
),
|
||||
7: CompitDeviceDescription(
|
||||
name="Nano One",
|
||||
parameters={
|
||||
CompitParameter.LANGUAGE: DESCRIPTIONS[CompitParameter.LANGUAGE],
|
||||
CompitParameter.NANO_MODE: DESCRIPTIONS[CompitParameter.NANO_MODE],
|
||||
},
|
||||
),
|
||||
224: CompitDeviceDescription(
|
||||
name="R 900",
|
||||
parameters={
|
||||
CompitParameter.R900_OPERATING_MODE: DESCRIPTIONS[
|
||||
CompitParameter.R900_OPERATING_MODE
|
||||
],
|
||||
},
|
||||
),
|
||||
45: CompitDeviceDescription(
|
||||
name="SolarComp971",
|
||||
parameters={
|
||||
CompitParameter.SOLAR_COMP_OPERATING_MODE: DESCRIPTIONS[
|
||||
CompitParameter.SOLAR_COMP_OPERATING_MODE
|
||||
],
|
||||
},
|
||||
),
|
||||
99: CompitDeviceDescription(
|
||||
name="SolarComp971C",
|
||||
parameters={
|
||||
CompitParameter.SOLAR_COMP_OPERATING_MODE: DESCRIPTIONS[
|
||||
CompitParameter.SOLAR_COMP_OPERATING_MODE
|
||||
],
|
||||
},
|
||||
),
|
||||
44: CompitDeviceDescription(
|
||||
name="SolarComp 951",
|
||||
parameters={
|
||||
CompitParameter.SOLAR_COMP_OPERATING_MODE: DESCRIPTIONS[
|
||||
CompitParameter.SOLAR_COMP_OPERATING_MODE
|
||||
],
|
||||
},
|
||||
),
|
||||
92: CompitDeviceDescription(
|
||||
name="r490",
|
||||
parameters={
|
||||
CompitParameter.R490_OPERATING_MODE: DESCRIPTIONS[
|
||||
CompitParameter.R490_OPERATING_MODE
|
||||
],
|
||||
CompitParameter.WORK_MODE: DESCRIPTIONS[CompitParameter.WORK_MODE],
|
||||
},
|
||||
),
|
||||
34: CompitDeviceDescription(
|
||||
name="r470",
|
||||
parameters={
|
||||
CompitParameter.R470_OPERATING_MODE: DESCRIPTIONS[
|
||||
CompitParameter.R470_OPERATING_MODE
|
||||
],
|
||||
CompitParameter.HEATING_SOURCE_OF_CORRECTION: DESCRIPTIONS[
|
||||
CompitParameter.HEATING_SOURCE_OF_CORRECTION
|
||||
],
|
||||
},
|
||||
),
|
||||
201: CompitDeviceDescription(
|
||||
name="BioMax775",
|
||||
parameters={
|
||||
CompitParameter.BIOMAX_MIXER_MODE_ZONE_1: DESCRIPTIONS[
|
||||
CompitParameter.BIOMAX_MIXER_MODE_ZONE_1
|
||||
],
|
||||
CompitParameter.BIOMAX_MIXER_MODE_ZONE_2: DESCRIPTIONS[
|
||||
CompitParameter.BIOMAX_MIXER_MODE_ZONE_2
|
||||
],
|
||||
CompitParameter.DHW_CIRCULATION_MODE: DESCRIPTIONS[
|
||||
CompitParameter.DHW_CIRCULATION_MODE
|
||||
],
|
||||
},
|
||||
),
|
||||
36: CompitDeviceDescription(
|
||||
name="BioMax742",
|
||||
parameters={
|
||||
CompitParameter.BIOMAX_HEATING_SOURCE_OF_CORRECTION: DESCRIPTIONS[
|
||||
CompitParameter.BIOMAX_HEATING_SOURCE_OF_CORRECTION
|
||||
],
|
||||
CompitParameter.BIOMAX_MIXER_MODE_ZONE_1: DESCRIPTIONS[
|
||||
CompitParameter.BIOMAX_MIXER_MODE_ZONE_1
|
||||
],
|
||||
CompitParameter.DHW_CIRCULATION_MODE: DESCRIPTIONS[
|
||||
CompitParameter.DHW_CIRCULATION_MODE
|
||||
],
|
||||
},
|
||||
),
|
||||
75: CompitDeviceDescription(
|
||||
name="BioMax772",
|
||||
parameters={
|
||||
CompitParameter.BIOMAX_MIXER_MODE_ZONE_1: DESCRIPTIONS[
|
||||
CompitParameter.BIOMAX_MIXER_MODE_ZONE_1
|
||||
],
|
||||
CompitParameter.BIOMAX_MIXER_MODE_ZONE_2: DESCRIPTIONS[
|
||||
CompitParameter.BIOMAX_MIXER_MODE_ZONE_2
|
||||
],
|
||||
CompitParameter.DHW_CIRCULATION_MODE: DESCRIPTIONS[
|
||||
CompitParameter.DHW_CIRCULATION_MODE
|
||||
],
|
||||
},
|
||||
),
|
||||
5: CompitDeviceDescription(
|
||||
name="R350 T3",
|
||||
parameters={
|
||||
CompitParameter.MIXER_MODE: DESCRIPTIONS[CompitParameter.MIXER_MODE],
|
||||
},
|
||||
),
|
||||
215: CompitDeviceDescription(
|
||||
name="R480",
|
||||
parameters={
|
||||
CompitParameter.R480_OPERATING_MODE: DESCRIPTIONS[
|
||||
CompitParameter.R480_OPERATING_MODE
|
||||
],
|
||||
CompitParameter.BUFFER_MODE: DESCRIPTIONS[CompitParameter.BUFFER_MODE],
|
||||
},
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: CompitConfigEntry,
|
||||
async_add_devices: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Compit select entities from a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
select_entities = []
|
||||
for device_id, device in coordinator.connector.all_devices.items():
|
||||
device_definition = DEVICE_DEFINITIONS.get(device.definition.code)
|
||||
|
||||
if not device_definition:
|
||||
continue
|
||||
|
||||
for code, entity_description in device_definition.parameters.items():
|
||||
param = next(
|
||||
(p for p in device.state.params if p.code == entity_description.key),
|
||||
None,
|
||||
)
|
||||
|
||||
if param is None:
|
||||
continue
|
||||
|
||||
select_entities.append(
|
||||
CompitSelect(
|
||||
coordinator,
|
||||
device_id,
|
||||
device_definition.name,
|
||||
code,
|
||||
entity_description,
|
||||
)
|
||||
)
|
||||
|
||||
async_add_devices(select_entities)
|
||||
|
||||
|
||||
class CompitSelect(CoordinatorEntity[CompitDataUpdateCoordinator], SelectEntity):
|
||||
"""Representation of a Compit select entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: CompitDataUpdateCoordinator,
|
||||
device_id: int,
|
||||
device_name: str,
|
||||
parameter_code: CompitParameter,
|
||||
entity_description: SelectEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the select entity."""
|
||||
super().__init__(coordinator)
|
||||
self.device_id = device_id
|
||||
self.entity_description = entity_description
|
||||
self._attr_has_entity_name = True
|
||||
self._attr_unique_id = f"{device_id}_{entity_description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, str(device_id))},
|
||||
name=device_name,
|
||||
manufacturer=MANUFACTURER_NAME,
|
||||
model=device_name,
|
||||
)
|
||||
self.parameter_code = parameter_code
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and self.coordinator.connector.get_device(self.device_id) is not None
|
||||
)
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Return the current option."""
|
||||
return self.coordinator.connector.get_current_option(
|
||||
self.device_id, self.parameter_code
|
||||
)
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
await self.coordinator.connector.select_device_option(
|
||||
self.device_id, self.parameter_code, option
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
@@ -31,5 +31,120 @@
|
||||
"title": "Connect to Compit iNext"
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"select": {
|
||||
"aero_by_pass": {
|
||||
"name": "Bypass",
|
||||
"state": {
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]"
|
||||
}
|
||||
},
|
||||
"buffer_mode": {
|
||||
"name": "Buffer mode",
|
||||
"state": {
|
||||
"disabled": "[%key:common::state::disabled%]",
|
||||
"manual": "[%key:common::state::manual%]",
|
||||
"schedule": "Schedule"
|
||||
}
|
||||
},
|
||||
"dhw_circulation": {
|
||||
"name": "Domestic hot water circulation",
|
||||
"state": {
|
||||
"constant": "Constant",
|
||||
"disabled": "[%key:common::state::disabled%]",
|
||||
"schedule": "Schedule"
|
||||
}
|
||||
},
|
||||
"heating_source_of_correction": {
|
||||
"name": "Heating source of correction",
|
||||
"state": {
|
||||
"disabled": "[%key:common::state::disabled%]",
|
||||
"nano_nr_1": "Nano 1",
|
||||
"nano_nr_2": "Nano 2",
|
||||
"nano_nr_3": "Nano 3",
|
||||
"nano_nr_4": "Nano 4",
|
||||
"nano_nr_5": "Nano 5",
|
||||
"no_corrections": "No corrections",
|
||||
"schedule": "Schedule",
|
||||
"thermostat": "Thermostat"
|
||||
}
|
||||
},
|
||||
"language": {
|
||||
"name": "Language",
|
||||
"state": {
|
||||
"english": "English",
|
||||
"polish": "Polish"
|
||||
}
|
||||
},
|
||||
"mixer_mode": {
|
||||
"name": "Mixer mode",
|
||||
"state": {
|
||||
"nano_nr_1": "Nano 1",
|
||||
"nano_nr_2": "Nano 2",
|
||||
"nano_nr_3": "Nano 3",
|
||||
"nano_nr_4": "Nano 4",
|
||||
"nano_nr_5": "Nano 5",
|
||||
"no_corrections": "No corrections",
|
||||
"schedule": "Schedule",
|
||||
"thermostat": "Thermostat"
|
||||
}
|
||||
},
|
||||
"mixer_mode_zone": {
|
||||
"name": "Zone {zone} mixer mode",
|
||||
"state": {
|
||||
"disabled": "[%key:common::state::disabled%]",
|
||||
"nano_nr_1": "Nano 1",
|
||||
"nano_nr_2": "Nano 2",
|
||||
"nano_nr_3": "Nano 3",
|
||||
"nano_nr_4": "Nano 4",
|
||||
"nano_nr_5": "Nano 5",
|
||||
"no_corrections": "No corrections",
|
||||
"schedule": "Schedule",
|
||||
"thermostat": "Thermostat",
|
||||
"without_thermostat": "Without thermostat"
|
||||
}
|
||||
},
|
||||
"nano_work_mode": {
|
||||
"name": "Nano work mode",
|
||||
"state": {
|
||||
"christmas": "Christmas",
|
||||
"manual_0": "Manual 0",
|
||||
"manual_1": "Manual 1",
|
||||
"manual_2": "Manual 2",
|
||||
"manual_3": "Manual 3",
|
||||
"out_of_home": "Out of home",
|
||||
"schedule": "Schedule"
|
||||
}
|
||||
},
|
||||
"operating_mode": {
|
||||
"name": "Operating mode",
|
||||
"state": {
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"disabled": "[%key:common::state::disabled%]",
|
||||
"eco": "Eco",
|
||||
"hybrid": "Hybrid"
|
||||
}
|
||||
},
|
||||
"solarcomp_operating_mode": {
|
||||
"name": "Operating mode",
|
||||
"state": {
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"de_icing": "De-icing",
|
||||
"disabled": "[%key:common::state::disabled%]",
|
||||
"holiday": "Holiday"
|
||||
}
|
||||
},
|
||||
"work_mode": {
|
||||
"name": "Current season",
|
||||
"state": {
|
||||
"cooling": "Cooling",
|
||||
"summer": "Summer",
|
||||
"winter": "Winter"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,12 +58,13 @@ C4_TO_HA_HVAC_MODE = {
|
||||
|
||||
HA_TO_C4_HVAC_MODE = {v: k for k, v in C4_TO_HA_HVAC_MODE.items()}
|
||||
|
||||
# Map Control4 HVAC state to Home Assistant HVAC action
|
||||
# Map the five known Control4 HVAC states to Home Assistant HVAC actions
|
||||
C4_TO_HA_HVAC_ACTION = {
|
||||
"heating": HVACAction.HEATING,
|
||||
"cooling": HVACAction.COOLING,
|
||||
"idle": HVACAction.IDLE,
|
||||
"off": HVACAction.OFF,
|
||||
"heat": HVACAction.HEATING,
|
||||
"cool": HVACAction.COOLING,
|
||||
"dry": HVACAction.DRYING,
|
||||
"fan": HVACAction.FAN,
|
||||
}
|
||||
|
||||
|
||||
@@ -236,7 +237,10 @@ class Control4Climate(Control4Entity, ClimateEntity):
|
||||
if c4_state is None:
|
||||
return None
|
||||
# Convert state to lowercase for mapping
|
||||
return C4_TO_HA_HVAC_ACTION.get(str(c4_state).lower())
|
||||
action = C4_TO_HA_HVAC_ACTION.get(str(c4_state).lower())
|
||||
if action is None:
|
||||
_LOGGER.debug("Unknown HVAC state received from Control4: %s", c4_state)
|
||||
return action
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
|
||||
@@ -335,20 +335,18 @@ def _get_config_intents(config: ConfigType, hass_config_path: str) -> dict[str,
|
||||
"""Return config intents."""
|
||||
intents = config.get(DOMAIN, {}).get("intents", {})
|
||||
return {
|
||||
"intents": {
|
||||
intent_name: {
|
||||
"data": [
|
||||
{
|
||||
"sentences": sentences,
|
||||
"metadata": {
|
||||
METADATA_CUSTOM_SENTENCE: True,
|
||||
METADATA_CUSTOM_FILE: hass_config_path,
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
for intent_name, sentences in intents.items()
|
||||
intent_name: {
|
||||
"data": [
|
||||
{
|
||||
"sentences": sentences,
|
||||
"metadata": {
|
||||
METADATA_CUSTOM_SENTENCE: True,
|
||||
METADATA_CUSTOM_FILE: hass_config_path,
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
for intent_name, sentences in intents.items()
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import dataclasses
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
@@ -18,7 +19,7 @@ from homeassistant.core import (
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, intent, singleton
|
||||
|
||||
from .const import DATA_COMPONENT, HOME_ASSISTANT_AGENT
|
||||
from .const import DATA_COMPONENT, HOME_ASSISTANT_AGENT, IntentSource
|
||||
from .entity import ConversationEntity
|
||||
from .models import (
|
||||
AbstractConversationAgent,
|
||||
@@ -34,9 +35,11 @@ from .trace import (
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TRIGGER_INTENT_NAME_PREFIX = "HassSentenceTrigger"
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .default_agent import DefaultAgent
|
||||
from .trigger import TriggerDetails
|
||||
from .trigger import TRIGGER_CALLBACK_TYPE
|
||||
|
||||
|
||||
@singleton.singleton("conversation_agent")
|
||||
@@ -139,6 +142,10 @@ async def async_converse(
|
||||
return result
|
||||
|
||||
|
||||
type IntentSourceConfig = dict[str, dict[str, Any]]
|
||||
type IntentsCallback = Callable[[dict[IntentSource, IntentSourceConfig]], None]
|
||||
|
||||
|
||||
class AgentManager:
|
||||
"""Class to manage conversation agents."""
|
||||
|
||||
@@ -147,8 +154,13 @@ class AgentManager:
|
||||
self.hass = hass
|
||||
self._agents: dict[str, AbstractConversationAgent] = {}
|
||||
self.default_agent: DefaultAgent | None = None
|
||||
self.config_intents: dict[str, Any] = {}
|
||||
self.triggers_details: list[TriggerDetails] = []
|
||||
self._intents: dict[IntentSource, IntentSourceConfig] = {
|
||||
IntentSource.CONFIG: {"intents": {}},
|
||||
IntentSource.TRIGGER: {"intents": {}},
|
||||
}
|
||||
self._intents_subscribers: list[IntentsCallback] = []
|
||||
self._trigger_callbacks: dict[int, TRIGGER_CALLBACK_TYPE] = {}
|
||||
self._trigger_callback_counter: int = 0
|
||||
|
||||
@callback
|
||||
def async_get_agent(self, agent_id: str) -> AbstractConversationAgent | None:
|
||||
@@ -200,27 +212,75 @@ class AgentManager:
|
||||
|
||||
async def async_setup_default_agent(self, agent: DefaultAgent) -> None:
|
||||
"""Set up the default agent."""
|
||||
agent.update_config_intents(self.config_intents)
|
||||
agent.update_triggers(self.triggers_details)
|
||||
self.default_agent = agent
|
||||
|
||||
@callback
|
||||
def subscribe_intents(self, subscriber: IntentsCallback) -> CALLBACK_TYPE:
|
||||
"""Subscribe to intents updates.
|
||||
|
||||
The subscriber callback is called immediately with all intent sources
|
||||
and whenever intents are updated (only with the changed source).
|
||||
"""
|
||||
subscriber(self._intents)
|
||||
self._intents_subscribers.append(subscriber)
|
||||
|
||||
@callback
|
||||
def unsubscribe() -> None:
|
||||
"""Unsubscribe from intents updates."""
|
||||
self._intents_subscribers.remove(subscriber)
|
||||
|
||||
return unsubscribe
|
||||
|
||||
def _notify_intents_subscribers(self, source: IntentSource) -> None:
|
||||
"""Notify all intents subscribers of a change to a specific source."""
|
||||
update = {source: self._intents[source]}
|
||||
for subscriber in self._intents_subscribers:
|
||||
subscriber(update)
|
||||
|
||||
def update_config_intents(self, intents: dict[str, Any]) -> None:
|
||||
"""Update config intents."""
|
||||
self.config_intents = intents
|
||||
if self.default_agent is not None:
|
||||
self.default_agent.update_config_intents(intents)
|
||||
self._intents[IntentSource.CONFIG]["intents"] = intents
|
||||
self._notify_intents_subscribers(IntentSource.CONFIG)
|
||||
|
||||
def register_trigger(self, trigger_details: TriggerDetails) -> CALLBACK_TYPE:
|
||||
def register_trigger(
|
||||
self, sentences: list[str], trigger_callback: TRIGGER_CALLBACK_TYPE
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Register a trigger."""
|
||||
self.triggers_details.append(trigger_details)
|
||||
if self.default_agent is not None:
|
||||
self.default_agent.update_triggers(self.triggers_details)
|
||||
trigger_id = self._trigger_callback_counter
|
||||
self._trigger_callback_counter += 1
|
||||
trigger_intent_name = f"{TRIGGER_INTENT_NAME_PREFIX}{trigger_id}"
|
||||
|
||||
trigger_intents = self._intents[IntentSource.TRIGGER]
|
||||
trigger_intents["intents"][trigger_intent_name] = {
|
||||
"data": [{"sentences": sentences}]
|
||||
}
|
||||
self._trigger_callbacks[trigger_id] = trigger_callback
|
||||
self._notify_intents_subscribers(IntentSource.TRIGGER)
|
||||
|
||||
@callback
|
||||
def unregister_trigger() -> None:
|
||||
"""Unregister the trigger."""
|
||||
self.triggers_details.remove(trigger_details)
|
||||
if self.default_agent is not None:
|
||||
self.default_agent.update_triggers(self.triggers_details)
|
||||
del trigger_intents["intents"][trigger_intent_name]
|
||||
del self._trigger_callbacks[trigger_id]
|
||||
self._notify_intents_subscribers(IntentSource.TRIGGER)
|
||||
|
||||
return unregister_trigger
|
||||
|
||||
@property
|
||||
def trigger_sentences(self) -> list[str]:
|
||||
"""Get all trigger sentences."""
|
||||
sentences: list[str] = []
|
||||
trigger_intents = self._intents[IntentSource.TRIGGER]
|
||||
for trigger_intent in trigger_intents.get("intents", {}).values():
|
||||
for data in trigger_intent.get("data", []):
|
||||
sentences.extend(data.get("sentences", []))
|
||||
return sentences
|
||||
|
||||
def get_trigger_callback(
|
||||
self, trigger_intent_name: str
|
||||
) -> TRIGGER_CALLBACK_TYPE | None:
|
||||
"""Get the callback for a trigger from its intent name."""
|
||||
if not trigger_intent_name.startswith(TRIGGER_INTENT_NAME_PREFIX):
|
||||
return None
|
||||
trigger_id = int(trigger_intent_name[len(TRIGGER_INTENT_NAME_PREFIX) :])
|
||||
return self._trigger_callbacks.get(trigger_id)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user