mirror of
https://github.com/home-assistant/core.git
synced 2026-02-28 04:51:41 +01:00
Compare commits
424 Commits
gen-dashbo
...
knx-name-r
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
33236ce11a | ||
|
|
e5b2eb05d2 | ||
|
|
3bee2ce6c7 | ||
|
|
b423495d42 | ||
|
|
285ef803e7 | ||
|
|
11f58e482e | ||
|
|
73fa9925c4 | ||
|
|
9ec456d28e | ||
|
|
4974439850 | ||
|
|
5cf37afbf6 | ||
|
|
76ebc134f3 | ||
|
|
667a77502d | ||
|
|
8c146624f9 | ||
|
|
2418036798 | ||
|
|
459996b760 | ||
|
|
eec854386a | ||
|
|
47d6e3e938 | ||
|
|
957c6039e9 | ||
|
|
c833cfa395 | ||
|
|
9dc38eda9f | ||
|
|
e49767d37a | ||
|
|
e6c5e72470 | ||
|
|
66dc566d3a | ||
|
|
5bb7699df0 | ||
|
|
168dd36d66 | ||
|
|
66d8a5bc51 | ||
|
|
d85040058f | ||
|
|
a5c1ed593c | ||
|
|
977ee1a9d1 | ||
|
|
6c433d0809 | ||
|
|
d370a730c2 | ||
|
|
19aaaf6cc6 | ||
|
|
9e14a643c0 | ||
|
|
80fccaec56 | ||
|
|
09b122e670 | ||
|
|
2684f4b555 | ||
|
|
cbc2928c4a | ||
|
|
aab4f57580 | ||
|
|
fed9ed615e | ||
|
|
97df38f1da | ||
|
|
be228dbe47 | ||
|
|
0292a8cd7e | ||
|
|
a308b84f15 | ||
|
|
fdc264cf71 | ||
|
|
dfd61f85c2 | ||
|
|
7ab4f2f431 | ||
|
|
be31f01fc2 | ||
|
|
8d228b6e6a | ||
|
|
46a1dda8d8 | ||
|
|
8a5d5a8468 | ||
|
|
6e48172654 | ||
|
|
1e6196c6e8 | ||
|
|
726870b829 | ||
|
|
c5b1b4482d | ||
|
|
e88be6bdeb | ||
|
|
3a0bde5d3e | ||
|
|
8dc9937ba4 | ||
|
|
2d2ea3d31c | ||
|
|
26f852d934 | ||
|
|
9977c58aaa | ||
|
|
b664f2ca9a | ||
|
|
6bbe80da72 | ||
|
|
5f3cb37ee6 | ||
|
|
27d715e26a | ||
|
|
3ee20d5e5c | ||
|
|
75b5248e2a | ||
|
|
37af004a37 | ||
|
|
4510ca7994 | ||
|
|
b8885791f7 | ||
|
|
9477fa4471 | ||
|
|
d464806281 | ||
|
|
3f00403c66 | ||
|
|
63f4653a3b | ||
|
|
e48bd88581 | ||
|
|
5d1cb4df94 | ||
|
|
6a49a25799 | ||
|
|
206c4e38be | ||
|
|
98135a1968 | ||
|
|
eecfa68de6 | ||
|
|
ffbb8c037e | ||
|
|
4386b3d5cc | ||
|
|
3f755f1f0d | ||
|
|
4cc9805a4b | ||
|
|
746461e59e | ||
|
|
ddb13b4ee7 | ||
|
|
68b08a6147 | ||
|
|
2178c98ccc | ||
|
|
ebedb182c8 | ||
|
|
335aa02f14 | ||
|
|
2c6c2d09cc | ||
|
|
c8308ad723 | ||
|
|
c65fa5b377 | ||
|
|
48ceb52ebb | ||
|
|
49bea823f5 | ||
|
|
07dcc2eae0 | ||
|
|
8e1c6c2157 | ||
|
|
f10cb23aab | ||
|
|
7020bec262 | ||
|
|
980507480b | ||
|
|
7a52d71b40 | ||
|
|
32092c73c6 | ||
|
|
4846d51341 | ||
|
|
75ddc3f9a1 | ||
|
|
11fe11cc03 | ||
|
|
40890419bb | ||
|
|
7e22a32dff | ||
|
|
6cc2f835e4 | ||
|
|
b20959d938 | ||
|
|
e456331062 | ||
|
|
e1194167cb | ||
|
|
3a6ca5ec17 | ||
|
|
2850192068 | ||
|
|
49689ad677 | ||
|
|
3408fc7520 | ||
|
|
bf482a6b92 | ||
|
|
7af63460ea | ||
|
|
755a3f82d4 | ||
|
|
71e9d54105 | ||
|
|
2208d7e92c | ||
|
|
ea281e14bf | ||
|
|
fcdeaead6f | ||
|
|
a264571ce3 | ||
|
|
43988bf0f5 | ||
|
|
a9495f61a0 | ||
|
|
1c19ddba55 | ||
|
|
99a07984fb | ||
|
|
6f17621957 | ||
|
|
496f44e007 | ||
|
|
3840f7a767 | ||
|
|
af2d2a857a | ||
|
|
31970255a2 | ||
|
|
f30397a11a | ||
|
|
cbcfc43c5a | ||
|
|
acaa2aeeee | ||
|
|
c67c19413b | ||
|
|
8840d2f0ef | ||
|
|
82fb3c35dc | ||
|
|
4d0d5d6817 | ||
|
|
12584482a2 | ||
|
|
b47dd2f923 | ||
|
|
3d354da104 | ||
|
|
89e900dca1 | ||
|
|
675884ad78 | ||
|
|
efb6cdc17e | ||
|
|
aca7fe530c | ||
|
|
10fa02a36c | ||
|
|
5344a874b0 | ||
|
|
ad2fe0d4d0 | ||
|
|
9c275acca9 | ||
|
|
225ecedc95 | ||
|
|
f246c90073 | ||
|
|
5bf7e83e76 | ||
|
|
3b3f4066c3 | ||
|
|
30e484c292 | ||
|
|
137377b50a | ||
|
|
96b98c9cb9 | ||
|
|
7d3601aa6f | ||
|
|
2ef7f6b317 | ||
|
|
7c8b181e6d | ||
|
|
b5147d8afa | ||
|
|
dc4bc6feea | ||
|
|
4cea3b4aac | ||
|
|
d633a69e07 | ||
|
|
3e8e95f95e | ||
|
|
6d66df9346 | ||
|
|
ed15a01a6a | ||
|
|
462d958b7e | ||
|
|
d888579cbd | ||
|
|
e16a8ed20e | ||
|
|
b11a75d438 | ||
|
|
95df5b9ec9 | ||
|
|
a301a9c4b6 | ||
|
|
e80bb871e4 | ||
|
|
ff4ff98e54 | ||
|
|
88c6cb3877 | ||
|
|
6b3a7e4cd6 | ||
|
|
36ff7506a0 | ||
|
|
a0af35f2dc | ||
|
|
c15da19b84 | ||
|
|
23e88a24f0 | ||
|
|
815c708d19 | ||
|
|
f9f2f39a3c | ||
|
|
490514c274 | ||
|
|
7da339b59c | ||
|
|
1bb31892c2 | ||
|
|
267caf2365 | ||
|
|
4e71a38e31 | ||
|
|
d3d916566a | ||
|
|
fd3258a6d3 | ||
|
|
d1aadb5842 | ||
|
|
d984411911 | ||
|
|
8ed0a4cf29 | ||
|
|
9a407b8668 | ||
|
|
72aa9d8a6a | ||
|
|
dc1c52622e | ||
|
|
44d5ecc926 | ||
|
|
54b0393ebe | ||
|
|
54141ffd3f | ||
|
|
92b823068c | ||
|
|
d4a6377ab3 | ||
|
|
80d07c42ac | ||
|
|
077eeafa69 | ||
|
|
b6ff8c94b1 | ||
|
|
6a1581f2bf | ||
|
|
2dc0d32a29 | ||
|
|
036696f4cd | ||
|
|
89f5b33a5e | ||
|
|
fc52885c21 | ||
|
|
ffa8fc583d | ||
|
|
f18fa07019 | ||
|
|
ce704dd5f7 | ||
|
|
d930755f92 | ||
|
|
196c6d9839 | ||
|
|
cce5358901 | ||
|
|
df7c3d787d | ||
|
|
a6287731f7 | ||
|
|
1667b3f16b | ||
|
|
2aa9d22350 | ||
|
|
3bcb303ef1 | ||
|
|
e6de37cc69 | ||
|
|
d10f5cc9ea | ||
|
|
4921f05189 | ||
|
|
877ad391f0 | ||
|
|
8a5594b9e4 | ||
|
|
a0623d1f97 | ||
|
|
c8f8ef887a | ||
|
|
40ec6d3793 | ||
|
|
0a79d84f9a | ||
|
|
7a7e60ce75 | ||
|
|
6bfaf6b188 | ||
|
|
34a445545c | ||
|
|
3c854a7679 | ||
|
|
b7b6c1a72e | ||
|
|
fdf02cf657 | ||
|
|
acf739df81 | ||
|
|
4801dcaded | ||
|
|
11af0a2d04 | ||
|
|
40b30b94a2 | ||
|
|
902d3f45a2 | ||
|
|
bf887fbc71 | ||
|
|
e5ede7deea | ||
|
|
8b674a44a1 | ||
|
|
e145963d48 | ||
|
|
1bca0ba5f8 | ||
|
|
38531033a1 | ||
|
|
9f1b6a12a5 | ||
|
|
876589f0cd | ||
|
|
bd09ac9030 | ||
|
|
6d143c1ce2 | ||
|
|
f4ceb22d73 | ||
|
|
5839191c37 | ||
|
|
29feccb190 | ||
|
|
a017417849 | ||
|
|
72a7d708b0 | ||
|
|
47be13e6bf | ||
|
|
7d583be8e1 | ||
|
|
ccb3b35694 | ||
|
|
48893d4daa | ||
|
|
0388e5dd7f | ||
|
|
7a68903318 | ||
|
|
64766100fe | ||
|
|
0576dd91b7 | ||
|
|
f4440e992f | ||
|
|
ea83b5a892 | ||
|
|
d148952c99 | ||
|
|
ed9a810908 | ||
|
|
6960cd6853 | ||
|
|
5bd86ba600 | ||
|
|
70bc49479d | ||
|
|
81e0c105d6 | ||
|
|
527e2aec1f | ||
|
|
cd6661260c | ||
|
|
efa522cc73 | ||
|
|
f9bd1b3d30 | ||
|
|
4cfdb14714 | ||
|
|
6fb802e6b9 | ||
|
|
9b30fecb0c | ||
|
|
e77acc1002 | ||
|
|
07e8b780a2 | ||
|
|
e060395786 | ||
|
|
661b14dec5 | ||
|
|
b8e63b7ef6 | ||
|
|
fd78e35a86 | ||
|
|
db55dfe3c7 | ||
|
|
bda3121f98 | ||
|
|
fd4981f3e2 | ||
|
|
ae1bedd94a | ||
|
|
90b67f90fa | ||
|
|
9c821fb5f5 | ||
|
|
1f9691ace1 | ||
|
|
5331cd99c6 | ||
|
|
1c3f24c78f | ||
|
|
e179e74df3 | ||
|
|
98602bd311 | ||
|
|
5f01124c74 | ||
|
|
4b5368be8e | ||
|
|
6379014f13 | ||
|
|
aa640020be | ||
|
|
92f4e600d1 | ||
|
|
25a6b6fa65 | ||
|
|
3cbe1295f9 | ||
|
|
72581fb2b1 | ||
|
|
97c89590e0 | ||
|
|
b6ba86f3c1 | ||
|
|
cedc291872 | ||
|
|
1d30486f82 | ||
|
|
9f1b4c9035 | ||
|
|
80ebb34ad1 | ||
|
|
e0e11fd99d | ||
|
|
578a933f30 | ||
|
|
57493a1f69 | ||
|
|
3a4100fa94 | ||
|
|
0c1af1d613 | ||
|
|
4e46431798 | ||
|
|
bec66f49a2 | ||
|
|
4019768fa1 | ||
|
|
25d902fd3e | ||
|
|
30f006538d | ||
|
|
15b1fee42d | ||
|
|
d69b816459 | ||
|
|
bf79721e97 | ||
|
|
66a0b44284 | ||
|
|
8693294ea6 | ||
|
|
14ac7927f1 | ||
|
|
b4674473d7 | ||
|
|
f01ece1d3d | ||
|
|
08160a41a6 | ||
|
|
e617698770 | ||
|
|
ee31bdf18b | ||
|
|
305b911c0d | ||
|
|
842abf78d2 | ||
|
|
134e8d1c1b | ||
|
|
733e90f747 | ||
|
|
6c92f7a864 | ||
|
|
f69b5b6e8f | ||
|
|
59e53ee7b7 | ||
|
|
62e1b0118c | ||
|
|
b7e9066b9d | ||
|
|
2d6532b8ee | ||
|
|
ebd1f1b00f | ||
|
|
95a1ceb080 | ||
|
|
3f9e7d1dba | ||
|
|
eab80f78d9 | ||
|
|
aa9fdd56ec | ||
|
|
c727261f67 | ||
|
|
703c62aa74 | ||
|
|
6e1f90228b | ||
|
|
3be089d2a5 | ||
|
|
692d3d35cc | ||
|
|
c52cb8362e | ||
|
|
93ac215ab4 | ||
|
|
f9eb86b50a | ||
|
|
a7f9992a4e | ||
|
|
13fde0d135 | ||
|
|
5105c6c50f | ||
|
|
af152ebe50 | ||
|
|
dea4452e42 | ||
|
|
af07631d83 | ||
|
|
d2ca00ca53 | ||
|
|
bb2f7bdfc4 | ||
|
|
b1379d9153 | ||
|
|
ea4b286659 | ||
|
|
2d00cb9a29 | ||
|
|
2ef1a20ae4 | ||
|
|
95defddfff | ||
|
|
009bdd91cc | ||
|
|
63bbead41e | ||
|
|
2c9a96b62a | ||
|
|
ace7fad62a | ||
|
|
3c73cc8bad | ||
|
|
83c41c265d | ||
|
|
c8bc5618dc | ||
|
|
60d770f265 | ||
|
|
6f4b9dcad7 | ||
|
|
1bba31f7af | ||
|
|
4705e584b0 | ||
|
|
80bbe5df6a | ||
|
|
88c4d88e06 | ||
|
|
718f459026 | ||
|
|
5c3ddcff3e | ||
|
|
08acececb2 | ||
|
|
27d6ae2881 | ||
|
|
5c4d9f4ca4 | ||
|
|
9ece327881 | ||
|
|
1b0ef3f358 | ||
|
|
a5eca0614a | ||
|
|
7b2509fadb | ||
|
|
f6e0bc28f4 | ||
|
|
e87056408e | ||
|
|
c945f32989 | ||
|
|
8d37917d8b | ||
|
|
68cc2dff53 | ||
|
|
45babbca92 | ||
|
|
b56dcfb7e9 | ||
|
|
a56114d84a | ||
|
|
de8a26c5b0 | ||
|
|
48f39524c4 | ||
|
|
2b4ef312c3 | ||
|
|
b4d175b811 | ||
|
|
7ff6c2a421 | ||
|
|
cf0a438f32 | ||
|
|
9e1bfa3564 | ||
|
|
3c266183e1 | ||
|
|
5c5f5d064a | ||
|
|
fc18ec4588 | ||
|
|
3fd2fa27e7 | ||
|
|
cf637f8c2f | ||
|
|
228fca9f0c | ||
|
|
c5ce8998e2 | ||
|
|
a4204bf11e | ||
|
|
3e44d15fc1 | ||
|
|
4f07d8688c | ||
|
|
89fda1a4ae | ||
|
|
f678e7ef34 | ||
|
|
24e8208deb | ||
|
|
3c66a1b35d | ||
|
|
5a2299e8b6 | ||
|
|
8087953b90 | ||
|
|
77a15b44c9 | ||
|
|
2177b494b9 | ||
|
|
10497c2bf4 | ||
|
|
e7fd744941 | ||
|
|
b9bfbc9e98 | ||
|
|
ba6f1343cc |
@@ -22,6 +22,7 @@ base_platforms: &base_platforms
|
||||
- homeassistant/components/calendar/**
|
||||
- homeassistant/components/camera/**
|
||||
- homeassistant/components/climate/**
|
||||
- homeassistant/components/conversation/**
|
||||
- homeassistant/components/cover/**
|
||||
- homeassistant/components/date/**
|
||||
- homeassistant/components/datetime/**
|
||||
@@ -53,6 +54,7 @@ base_platforms: &base_platforms
|
||||
- homeassistant/components/update/**
|
||||
- homeassistant/components/vacuum/**
|
||||
- homeassistant/components/valve/**
|
||||
- homeassistant/components/wake_word/**
|
||||
- homeassistant/components/water_heater/**
|
||||
- homeassistant/components/weather/**
|
||||
|
||||
@@ -70,7 +72,6 @@ components: &components
|
||||
- homeassistant/components/cloud/**
|
||||
- homeassistant/components/config/**
|
||||
- homeassistant/components/configurator/**
|
||||
- homeassistant/components/conversation/**
|
||||
- homeassistant/components/demo/**
|
||||
- homeassistant/components/device_automation/**
|
||||
- homeassistant/components/dhcp/**
|
||||
|
||||
@@ -60,7 +60,13 @@
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||
},
|
||||
"[json][jsonc][yaml]": {
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[yaml]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"json.schemas": [
|
||||
|
||||
2
.github/dependabot.yml
vendored
2
.github/dependabot.yml
vendored
@@ -9,3 +9,5 @@ updates:
|
||||
labels:
|
||||
- dependency
|
||||
- github_actions
|
||||
cooldown:
|
||||
default-days: 7
|
||||
|
||||
145
.github/workflows/builder.yml
vendored
145
.github/workflows/builder.yml
vendored
@@ -18,11 +18,19 @@ env:
|
||||
BASE_IMAGE_VERSION: "2026.01.0"
|
||||
ARCHITECTURES: '["amd64", "aarch64"]'
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
init:
|
||||
name: Initialize build
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read # To check out the repository
|
||||
outputs:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
channel: ${{ steps.version.outputs.channel }}
|
||||
@@ -31,6 +39,8 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
@@ -39,16 +49,16 @@ jobs:
|
||||
|
||||
- name: Get information
|
||||
id: info
|
||||
uses: home-assistant/actions/helpers/info@master
|
||||
uses: home-assistant/actions/helpers/info@master # zizmor: ignore[unpinned-uses]
|
||||
|
||||
- name: Get version
|
||||
id: version
|
||||
uses: home-assistant/actions/helpers/version@master
|
||||
uses: home-assistant/actions/helpers/version@master # zizmor: ignore[unpinned-uses]
|
||||
with:
|
||||
type: ${{ env.BUILD_TYPE }}
|
||||
|
||||
- name: Verify version
|
||||
uses: home-assistant/actions/helpers/verify-version@master
|
||||
uses: home-assistant/actions/helpers/verify-version@master # zizmor: ignore[unpinned-uses]
|
||||
with:
|
||||
ignore-dev: true
|
||||
|
||||
@@ -82,9 +92,9 @@ jobs:
|
||||
needs: init
|
||||
runs-on: ${{ matrix.os }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
contents: read # To check out the repository
|
||||
packages: write # To push to GHCR
|
||||
id-token: write # For cosign signing
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -97,6 +107,8 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
@@ -131,11 +143,12 @@ jobs:
|
||||
shell: bash
|
||||
env:
|
||||
UV_PRERELEASE: allow
|
||||
VERSION: ${{ needs.init.outputs.version }}
|
||||
run: |
|
||||
python3 -m pip install "$(grep '^uv' < requirements.txt)"
|
||||
uv pip install packaging tomli
|
||||
uv pip install .
|
||||
python3 script/version_bump.py nightly --set-nightly-version "${{ needs.init.outputs.version }}"
|
||||
python3 script/version_bump.py nightly --set-nightly-version "${VERSION}"
|
||||
|
||||
if [[ "$(ls home_assistant_frontend*.whl)" =~ ^home_assistant_frontend-(.*)-py3-none-any.whl$ ]]; then
|
||||
echo "Found frontend wheel, setting version to: ${BASH_REMATCH[1]}"
|
||||
@@ -165,7 +178,7 @@ jobs:
|
||||
sed -i "s|home-assistant-intents==.*|home-assistant-intents==${BASH_REMATCH[1]}|" \
|
||||
homeassistant/package_constraints.txt
|
||||
|
||||
sed -i "s|home-assistant-intents==.*||" requirements_all.txt
|
||||
sed -i "s|home-assistant-intents==.*||" requirements_all.txt requirements.txt
|
||||
fi
|
||||
|
||||
- name: Download translations
|
||||
@@ -181,7 +194,7 @@ jobs:
|
||||
- name: Write meta info file
|
||||
shell: bash
|
||||
run: |
|
||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||
echo "${GITHUB_SHA};${GITHUB_REF};${GITHUB_EVENT_NAME};${GITHUB_ACTOR}" > rootfs/OFFICIAL_IMAGE
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
@@ -190,8 +203,7 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- &install_cosign
|
||||
name: Install Cosign
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
with:
|
||||
cosign-release: "v2.5.3"
|
||||
@@ -202,30 +214,36 @@ jobs:
|
||||
- name: Build variables
|
||||
id: vars
|
||||
shell: bash
|
||||
env:
|
||||
ARCH: ${{ matrix.arch }}
|
||||
run: |
|
||||
echo "base_image=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant-base:${{ env.BASE_IMAGE_VERSION }}" >> "$GITHUB_OUTPUT"
|
||||
echo "cache_image=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:latest" >> "$GITHUB_OUTPUT"
|
||||
echo "base_image=ghcr.io/home-assistant/${ARCH}-homeassistant-base:${BASE_IMAGE_VERSION}" >> "$GITHUB_OUTPUT"
|
||||
echo "cache_image=ghcr.io/home-assistant/${ARCH}-homeassistant:latest" >> "$GITHUB_OUTPUT"
|
||||
echo "created=$(date --rfc-3339=seconds --utc)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Verify base image signature
|
||||
env:
|
||||
BASE_IMAGE: ${{ steps.vars.outputs.base_image }}
|
||||
run: |
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp "https://github.com/home-assistant/docker/.*" \
|
||||
"${{ steps.vars.outputs.base_image }}"
|
||||
"${BASE_IMAGE}"
|
||||
|
||||
- name: Verify cache image signature
|
||||
id: cache
|
||||
continue-on-error: true
|
||||
env:
|
||||
CACHE_IMAGE: ${{ steps.vars.outputs.cache_image }}
|
||||
run: |
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp "https://github.com/home-assistant/core/.*" \
|
||||
"${{ steps.vars.outputs.cache_image }}"
|
||||
"${CACHE_IMAGE}"
|
||||
|
||||
- name: Build base image
|
||||
id: build
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
@@ -243,8 +261,12 @@ jobs:
|
||||
org.opencontainers.image.version=${{ needs.init.outputs.version }}
|
||||
|
||||
- name: Sign image
|
||||
env:
|
||||
ARCH: ${{ matrix.arch }}
|
||||
VERSION: ${{ needs.init.outputs.version }}
|
||||
DIGEST: ${{ steps.build.outputs.digest }}
|
||||
run: |
|
||||
cosign sign --yes "ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}@${{ steps.build.outputs.digest }}"
|
||||
cosign sign --yes "ghcr.io/home-assistant/${ARCH}-homeassistant:${VERSION}@${DIGEST}"
|
||||
|
||||
build_machine:
|
||||
name: Build ${{ matrix.machine }} machine core image
|
||||
@@ -252,9 +274,9 @@ jobs:
|
||||
needs: ["init", "build_base"]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
contents: read # To check out the repository
|
||||
packages: write # To push to GHCR
|
||||
id-token: write # For cosign signing
|
||||
strategy:
|
||||
matrix:
|
||||
machine:
|
||||
@@ -275,13 +297,17 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set build additional args
|
||||
env:
|
||||
VERSION: ${{ needs.init.outputs.version }}
|
||||
run: |
|
||||
# Create general tags
|
||||
if [[ "${{ needs.init.outputs.version }}" =~ d ]]; then
|
||||
if [[ "${VERSION}" =~ d ]]; then
|
||||
echo "BUILD_ARGS=--additional-tag dev" >> $GITHUB_ENV
|
||||
elif [[ "${{ needs.init.outputs.version }}" =~ b ]]; then
|
||||
elif [[ "${VERSION}" =~ b ]]; then
|
||||
echo "BUILD_ARGS=--additional-tag beta" >> $GITHUB_ENV
|
||||
else
|
||||
echo "BUILD_ARGS=--additional-tag stable" >> $GITHUB_ENV
|
||||
@@ -294,9 +320,8 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.11.0
|
||||
uses: home-assistant/builder@21bc64d76dad7a5184c67826aab41c6b6f89023a # 2025.11.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -310,19 +335,23 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
needs: ["init", "build_machine"]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
uses: home-assistant/actions/helpers/git-init@master # zizmor: ignore[unpinned-uses]
|
||||
with:
|
||||
name: ${{ secrets.GIT_NAME }}
|
||||
email: ${{ secrets.GIT_EMAIL }}
|
||||
token: ${{ secrets.GIT_TOKEN }}
|
||||
|
||||
- name: Update version file
|
||||
uses: home-assistant/actions/helpers/version-push@master
|
||||
uses: home-assistant/actions/helpers/version-push@master # zizmor: ignore[unpinned-uses]
|
||||
with:
|
||||
key: "homeassistant[]"
|
||||
key-description: "Home Assistant Core"
|
||||
@@ -332,7 +361,7 @@ jobs:
|
||||
|
||||
- name: Update version file (stable -> beta)
|
||||
if: needs.init.outputs.channel == 'stable'
|
||||
uses: home-assistant/actions/helpers/version-push@master
|
||||
uses: home-assistant/actions/helpers/version-push@master # zizmor: ignore[unpinned-uses]
|
||||
with:
|
||||
key: "homeassistant[]"
|
||||
key-description: "Home Assistant Core"
|
||||
@@ -347,15 +376,18 @@ jobs:
|
||||
needs: ["init", "build_base"]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
contents: read # To check out the repository
|
||||
packages: write # To push to GHCR
|
||||
id-token: write # For cosign signing
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- *install_cosign
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
with:
|
||||
cosign-release: "v2.5.3"
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
@@ -373,14 +405,17 @@ jobs:
|
||||
|
||||
- name: Verify architecture image signatures
|
||||
shell: bash
|
||||
env:
|
||||
ARCHITECTURES: ${{ needs.init.outputs.architectures }}
|
||||
VERSION: ${{ needs.init.outputs.version }}
|
||||
run: |
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
|
||||
for arch in $ARCHS; do
|
||||
echo "Verifying ${arch} image signature..."
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp https://github.com/home-assistant/core/.* \
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${VERSION}"
|
||||
done
|
||||
echo "✓ All images verified successfully"
|
||||
|
||||
@@ -411,16 +446,19 @@ jobs:
|
||||
- name: Copy architecture images to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
shell: bash
|
||||
env:
|
||||
ARCHITECTURES: ${{ needs.init.outputs.architectures }}
|
||||
VERSION: ${{ needs.init.outputs.version }}
|
||||
run: |
|
||||
# Use imagetools to copy image blobs directly between registries
|
||||
# This preserves provenance/attestations and seems to be much faster than pull/push
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
|
||||
for arch in $ARCHS; do
|
||||
echo "Copying ${arch} image to DockerHub..."
|
||||
for attempt in 1 2 3; do
|
||||
if docker buildx imagetools create \
|
||||
--tag "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}" \
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"; then
|
||||
--tag "docker.io/homeassistant/${arch}-homeassistant:${VERSION}" \
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${VERSION}"; then
|
||||
break
|
||||
fi
|
||||
echo "Attempt ${attempt} failed, retrying in 10 seconds..."
|
||||
@@ -430,23 +468,28 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
cosign sign --yes "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
|
||||
cosign sign --yes "docker.io/homeassistant/${arch}-homeassistant:${VERSION}"
|
||||
done
|
||||
|
||||
- name: Create and push multi-arch manifests
|
||||
shell: bash
|
||||
env:
|
||||
ARCHITECTURES: ${{ needs.init.outputs.architectures }}
|
||||
REGISTRY: ${{ matrix.registry }}
|
||||
VERSION: ${{ needs.init.outputs.version }}
|
||||
META_TAGS: ${{ steps.meta.outputs.tags }}
|
||||
run: |
|
||||
# Build list of architecture images dynamically
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
|
||||
ARCH_IMAGES=()
|
||||
for arch in $ARCHS; do
|
||||
ARCH_IMAGES+=("${{ matrix.registry }}/${arch}-homeassistant:${{ needs.init.outputs.version }}")
|
||||
ARCH_IMAGES+=("${REGISTRY}/${arch}-homeassistant:${VERSION}")
|
||||
done
|
||||
|
||||
# Build list of all tags for single manifest creation
|
||||
# Note: Using sep-tags=',' in metadata-action for easier parsing
|
||||
TAG_ARGS=()
|
||||
IFS=',' read -ra TAGS <<< "${{ steps.meta.outputs.tags }}"
|
||||
IFS=',' read -ra TAGS <<< "${META_TAGS}"
|
||||
for tag in "${TAGS[@]}"; do
|
||||
TAG_ARGS+=("--tag" "${tag}")
|
||||
done
|
||||
@@ -470,12 +513,14 @@ jobs:
|
||||
needs: ["init", "build_base"]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
contents: read # To check out the repository
|
||||
id-token: write # For PyPI trusted publishing
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
@@ -509,10 +554,10 @@ jobs:
|
||||
name: Build and test hassfest image
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
contents: read # To check out the repository
|
||||
packages: write # To push to GHCR
|
||||
attestations: write # For build provenance attestation
|
||||
id-token: write # For build provenance attestation
|
||||
needs: ["init"]
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
env:
|
||||
@@ -521,6 +566,8 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
@@ -530,7 +577,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -538,12 +585,12 @@ jobs:
|
||||
tags: ${{ env.HASSFEST_IMAGE_TAG }}
|
||||
|
||||
- name: Run hassfest against core
|
||||
run: docker run --rm -v ${{ github.workspace }}:/github/workspace ${{ env.HASSFEST_IMAGE_TAG }} --core-path=/github/workspace
|
||||
run: docker run --rm -v "${GITHUB_WORKSPACE}":/github/workspace "${HASSFEST_IMAGE_TAG}" --core-path=/github/workspace
|
||||
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
|
||||
879
.github/workflows/ci.yaml
vendored
879
.github/workflows/ci.yaml
vendored
File diff suppressed because it is too large
Load Diff
14
.github/workflows/codeql.yml
vendored
14
.github/workflows/codeql.yml
vendored
@@ -5,6 +5,8 @@ on:
|
||||
schedule:
|
||||
- cron: "30 18 * * 4"
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
@@ -15,20 +17,22 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 360
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
actions: read # To read workflow information for CodeQL
|
||||
contents: read # To check out the repository
|
||||
security-events: write # To upload CodeQL results
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 # v4.32.2
|
||||
uses: github/codeql-action/init@9e907b5e64f6b83e7804b09294d44122997950d6 # v4.32.3
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 # v4.32.2
|
||||
uses: github/codeql-action/analyze@9e907b5e64f6b83e7804b09294d44122997950d6 # v4.32.3
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
13
.github/workflows/detect-duplicate-issues.yml
vendored
13
.github/workflows/detect-duplicate-issues.yml
vendored
@@ -5,13 +5,18 @@ on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
models: read
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.issue.number }}
|
||||
|
||||
jobs:
|
||||
detect-duplicates:
|
||||
name: Detect duplicate issues
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write # To comment on and label issues
|
||||
models: read # For AI-based duplicate detection
|
||||
|
||||
steps:
|
||||
- name: Check if integration label was added and extract details
|
||||
@@ -231,7 +236,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@a6101c89c6feaecc585efdd8d461f18bb7896f20 # v2.0.5
|
||||
uses: actions/ai-inference@a380166897b5408b8fb7dddd148142794cb5624a # v2.0.6
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
|
||||
13
.github/workflows/detect-non-english-issues.yml
vendored
13
.github/workflows/detect-non-english-issues.yml
vendored
@@ -5,13 +5,18 @@ on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
models: read
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.issue.number }}
|
||||
|
||||
jobs:
|
||||
detect-language:
|
||||
name: Detect non-English issues
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write # To comment on, label, and close issues
|
||||
models: read # For AI-based language detection
|
||||
|
||||
steps:
|
||||
- name: Check issue language
|
||||
@@ -57,7 +62,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@a6101c89c6feaecc585efdd8d461f18bb7896f20 # v2.0.5
|
||||
uses: actions/ai-inference@a380166897b5408b8fb7dddd148142794cb5624a # v2.0.6
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
|
||||
10
.github/workflows/lock.yml
vendored
10
.github/workflows/lock.yml
vendored
@@ -5,10 +5,20 @@ on:
|
||||
schedule:
|
||||
- cron: "0 * * * *"
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
lock:
|
||||
name: Lock inactive threads
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write # To lock issues
|
||||
pull-requests: write # To lock pull requests
|
||||
steps:
|
||||
- uses: dessant/lock-threads@7266a7ce5c1df01b1c6db85bf8cd86c737dadbe7 # v6.0.0
|
||||
with:
|
||||
|
||||
32
.github/workflows/restrict-task-creation.yml
vendored
32
.github/workflows/restrict-task-creation.yml
vendored
@@ -5,9 +5,39 @@ on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.issue.number }}
|
||||
|
||||
jobs:
|
||||
check-authorization:
|
||||
add-no-stale:
|
||||
name: Add no-stale label
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write # To add labels to issues
|
||||
if: >-
|
||||
github.event.issue.type.name == 'Task'
|
||||
|| github.event.issue.type.name == 'Epic'
|
||||
|| github.event.issue.type.name == 'Opportunity'
|
||||
steps:
|
||||
- name: Add no-stale label
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
labels: ['no-stale']
|
||||
});
|
||||
|
||||
check-authorization:
|
||||
name: Check authorization
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read # To read CODEOWNERS file
|
||||
issues: write # To comment on, label, and close issues
|
||||
# Only run if this is a Task issue type (from the issue form)
|
||||
if: github.event.issue.type.name == 'Task'
|
||||
steps:
|
||||
|
||||
10
.github/workflows/stale.yml
vendored
10
.github/workflows/stale.yml
vendored
@@ -6,10 +6,20 @@ on:
|
||||
- cron: "0 * * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
name: Mark stale issues and PRs
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write # To label and close stale issues
|
||||
pull-requests: write # To label and close stale PRs
|
||||
steps:
|
||||
# The 60 day stale policy for PRs
|
||||
# Used for:
|
||||
|
||||
11
.github/workflows/translations.yml
vendored
11
.github/workflows/translations.yml
vendored
@@ -9,6 +9,12 @@ on:
|
||||
paths:
|
||||
- "**strings.json"
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.14.2"
|
||||
|
||||
@@ -20,6 +26,8 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
@@ -27,6 +35,7 @@ jobs:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Upload Translations
|
||||
env:
|
||||
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
|
||||
run: |
|
||||
export LOKALISE_TOKEN="${{ secrets.LOKALISE_TOKEN }}"
|
||||
python3 -m script.translations upload
|
||||
|
||||
62
.github/workflows/wheels.yml
vendored
62
.github/workflows/wheels.yml
vendored
@@ -19,6 +19,8 @@ on:
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.14.2"
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name}}
|
||||
cancel-in-progress: true
|
||||
@@ -29,9 +31,10 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- &checkout
|
||||
name: Checkout the repository
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
@@ -50,7 +53,7 @@ jobs:
|
||||
|
||||
- name: Create requirements_diff file
|
||||
run: |
|
||||
if [[ ${{ github.event_name }} =~ (schedule|workflow_dispatch) ]]; then
|
||||
if [[ "${GITHUB_EVENT_NAME}" =~ (schedule|workflow_dispatch) ]]; then
|
||||
touch requirements_diff.txt
|
||||
else
|
||||
curl -s -o requirements_diff.txt https://raw.githubusercontent.com/home-assistant/core/master/requirements.txt
|
||||
@@ -74,7 +77,7 @@ jobs:
|
||||
) > .env_file
|
||||
|
||||
- name: Upload env_file
|
||||
uses: &actions-upload-artifact actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
@@ -82,7 +85,7 @@ jobs:
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: *actions-upload-artifact
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@@ -94,7 +97,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: *actions-upload-artifact
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
@@ -106,7 +109,7 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: &matrix-build
|
||||
matrix:
|
||||
abi: ["cp313", "cp314"]
|
||||
arch: ["amd64", "aarch64"]
|
||||
include:
|
||||
@@ -115,17 +118,18 @@ jobs:
|
||||
- arch: aarch64
|
||||
os: ubuntu-24.04-arm
|
||||
steps:
|
||||
- *checkout
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- &download-env-file
|
||||
name: Download env_file
|
||||
uses: &actions-download-artifact actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- &download-requirements-diff
|
||||
name: Download requirements_diff
|
||||
uses: *actions-download-artifact
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -136,7 +140,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: &home-assistant-wheels home-assistant/wheels@e5742a69d69f0e274e2689c998900c7d19652c21 # 2025.12.0
|
||||
uses: home-assistant/wheels@e5742a69d69f0e274e2689c998900c7d19652c21 # 2025.12.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -156,16 +160,32 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: *matrix-build
|
||||
matrix:
|
||||
abi: ["cp313", "cp314"]
|
||||
arch: ["amd64", "aarch64"]
|
||||
include:
|
||||
- arch: amd64
|
||||
os: ubuntu-latest
|
||||
- arch: aarch64
|
||||
os: ubuntu-24.04-arm
|
||||
steps:
|
||||
- *checkout
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- *download-env-file
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- *download-requirements-diff
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: *actions-download-artifact
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
@@ -178,7 +198,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: *home-assistant-wheels
|
||||
uses: home-assistant/wheels@e5742a69d69f0e274e2689c998900c7d19652c21 # 2025.12.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.15.0
|
||||
rev: v0.15.1
|
||||
hooks:
|
||||
- id: ruff-check
|
||||
args:
|
||||
@@ -17,6 +17,12 @@ repos:
|
||||
- --quiet-level=2
|
||||
exclude_types: [csv, json, html]
|
||||
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
|
||||
- repo: https://github.com/zizmorcore/zizmor-pre-commit
|
||||
rev: v1.22.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
args:
|
||||
- --pedantic
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
|
||||
@@ -84,6 +84,7 @@ homeassistant.components.androidtv_remote.*
|
||||
homeassistant.components.anel_pwrctrl.*
|
||||
homeassistant.components.anova.*
|
||||
homeassistant.components.anthemav.*
|
||||
homeassistant.components.anthropic.*
|
||||
homeassistant.components.apache_kafka.*
|
||||
homeassistant.components.apcupsd.*
|
||||
homeassistant.components.api.*
|
||||
@@ -242,6 +243,7 @@ homeassistant.components.guardian.*
|
||||
homeassistant.components.habitica.*
|
||||
homeassistant.components.hardkernel.*
|
||||
homeassistant.components.hardware.*
|
||||
homeassistant.components.hdfury.*
|
||||
homeassistant.components.heos.*
|
||||
homeassistant.components.here_travel_time.*
|
||||
homeassistant.components.history.*
|
||||
@@ -287,6 +289,7 @@ homeassistant.components.input_button.*
|
||||
homeassistant.components.input_select.*
|
||||
homeassistant.components.input_text.*
|
||||
homeassistant.components.integration.*
|
||||
homeassistant.components.intelliclima.*
|
||||
homeassistant.components.intent.*
|
||||
homeassistant.components.intent_script.*
|
||||
homeassistant.components.ios.*
|
||||
@@ -363,7 +366,6 @@ homeassistant.components.my.*
|
||||
homeassistant.components.mysensors.*
|
||||
homeassistant.components.myuplink.*
|
||||
homeassistant.components.nam.*
|
||||
homeassistant.components.nanoleaf.*
|
||||
homeassistant.components.nasweb.*
|
||||
homeassistant.components.neato.*
|
||||
homeassistant.components.nest.*
|
||||
@@ -385,6 +387,7 @@ homeassistant.components.ohme.*
|
||||
homeassistant.components.onboarding.*
|
||||
homeassistant.components.oncue.*
|
||||
homeassistant.components.onedrive.*
|
||||
homeassistant.components.onedrive_for_business.*
|
||||
homeassistant.components.onewire.*
|
||||
homeassistant.components.onkyo.*
|
||||
homeassistant.components.open_meteo.*
|
||||
@@ -493,6 +496,7 @@ homeassistant.components.smtp.*
|
||||
homeassistant.components.snooz.*
|
||||
homeassistant.components.solarlog.*
|
||||
homeassistant.components.sonarr.*
|
||||
homeassistant.components.spaceapi.*
|
||||
homeassistant.components.speedtestdotnet.*
|
||||
homeassistant.components.spotify.*
|
||||
homeassistant.components.sql.*
|
||||
|
||||
23
CODEOWNERS
generated
23
CODEOWNERS
generated
@@ -15,7 +15,7 @@
|
||||
.yamllint @home-assistant/core
|
||||
pyproject.toml @home-assistant/core
|
||||
requirements_test.txt @home-assistant/core
|
||||
/.devcontainer/ @home-assistant/core
|
||||
/.devcontainer/ @home-assistant/core @edenhaus
|
||||
/.github/ @home-assistant/core
|
||||
/.vscode/ @home-assistant/core
|
||||
/homeassistant/*.py @home-assistant/core
|
||||
@@ -672,6 +672,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/hdmi_cec/ @inytar
|
||||
/tests/components/hdmi_cec/ @inytar
|
||||
/homeassistant/components/heatmiser/ @andylockran
|
||||
/homeassistant/components/hegel/ @boazca
|
||||
/tests/components/hegel/ @boazca
|
||||
/homeassistant/components/heos/ @andrewsayre
|
||||
/tests/components/heos/ @andrewsayre
|
||||
/homeassistant/components/here_travel_time/ @eifinger
|
||||
@@ -715,8 +717,10 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/homekit_controller/ @Jc2k @bdraco
|
||||
/homeassistant/components/homematic/ @pvizeli
|
||||
/tests/components/homematic/ @pvizeli
|
||||
/homeassistant/components/homematicip_cloud/ @hahn-th
|
||||
/tests/components/homematicip_cloud/ @hahn-th
|
||||
/homeassistant/components/homematicip_cloud/ @hahn-th @lackas
|
||||
/tests/components/homematicip_cloud/ @hahn-th @lackas
|
||||
/homeassistant/components/homevolt/ @danielhiversen
|
||||
/tests/components/homevolt/ @danielhiversen
|
||||
/homeassistant/components/homewizard/ @DCSBL
|
||||
/tests/components/homewizard/ @DCSBL
|
||||
/homeassistant/components/honeywell/ @rdfurman @mkmer
|
||||
@@ -758,6 +762,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/icloud/ @Quentame @nzapponi
|
||||
/homeassistant/components/idasen_desk/ @abmantis
|
||||
/tests/components/idasen_desk/ @abmantis
|
||||
/homeassistant/components/idrive_e2/ @patrickvorgers
|
||||
/tests/components/idrive_e2/ @patrickvorgers
|
||||
/homeassistant/components/igloohome/ @keithle888
|
||||
/tests/components/igloohome/ @keithle888
|
||||
/homeassistant/components/ign_sismologia/ @exxamalte
|
||||
@@ -802,6 +808,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/insteon/ @teharris1
|
||||
/homeassistant/components/integration/ @dgomes
|
||||
/tests/components/integration/ @dgomes
|
||||
/homeassistant/components/intelliclima/ @dvdinth
|
||||
/tests/components/intelliclima/ @dvdinth
|
||||
/homeassistant/components/intellifire/ @jeeftor
|
||||
/tests/components/intellifire/ @jeeftor
|
||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
@@ -1060,6 +1068,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/mqtt/ @emontnemery @jbouwh @bdraco
|
||||
/tests/components/mqtt/ @emontnemery @jbouwh @bdraco
|
||||
/homeassistant/components/msteams/ @peroyvind
|
||||
/homeassistant/components/mta/ @OnFreund
|
||||
/tests/components/mta/ @OnFreund
|
||||
/homeassistant/components/mullvad/ @meichthys
|
||||
/tests/components/mullvad/ @meichthys
|
||||
/homeassistant/components/music_assistant/ @music-assistant @arturpragacz
|
||||
@@ -1078,8 +1088,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/nam/ @bieniu
|
||||
/homeassistant/components/namecheapdns/ @tr4nt0r
|
||||
/tests/components/namecheapdns/ @tr4nt0r
|
||||
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
|
||||
/tests/components/nanoleaf/ @milanmeu @joostlek
|
||||
/homeassistant/components/nanoleaf/ @milanmeu @joostlek @loebi-ch @JaspervRijbroek @jonathanrobichaud4
|
||||
/tests/components/nanoleaf/ @milanmeu @joostlek @loebi-ch @JaspervRijbroek @jonathanrobichaud4
|
||||
/homeassistant/components/nasweb/ @nasWebio
|
||||
/tests/components/nasweb/ @nasWebio
|
||||
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM @heindrichpaul
|
||||
@@ -1172,6 +1182,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ondilo_ico/ @JeromeHXP
|
||||
/homeassistant/components/onedrive/ @zweckj
|
||||
/tests/components/onedrive/ @zweckj
|
||||
/homeassistant/components/onedrive_for_business/ @zweckj
|
||||
/tests/components/onedrive_for_business/ @zweckj
|
||||
/homeassistant/components/onewire/ @garbled1 @epenet
|
||||
/tests/components/onewire/ @garbled1 @epenet
|
||||
/homeassistant/components/onkyo/ @arturpragacz @eclair4151
|
||||
@@ -1565,6 +1577,7 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/speedtestdotnet/ @rohankapoorcom @engrbm87
|
||||
/tests/components/speedtestdotnet/ @rohankapoorcom @engrbm87
|
||||
/homeassistant/components/splunk/ @Bre77
|
||||
/tests/components/splunk/ @Bre77
|
||||
/homeassistant/components/spotify/ @frenck @joostlek
|
||||
/tests/components/spotify/ @frenck @joostlek
|
||||
/homeassistant/components/sql/ @gjohansson-ST @dougiteixeira
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
"microsoft",
|
||||
"msteams",
|
||||
"onedrive",
|
||||
"onedrive_for_business",
|
||||
"xbox"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator
|
||||
from .entity import ActronAirAcEntity, ActronAirZoneEntity
|
||||
from .entity import ActronAirAcEntity, ActronAirZoneEntity, handle_actron_api_errors
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -136,16 +136,19 @@ class ActronSystemClimate(ActronAirAcEntity, ActronAirClimateEntity):
|
||||
"""Return the target temperature."""
|
||||
return self._status.user_aircon_settings.temperature_setpoint_cool_c
|
||||
|
||||
@handle_actron_api_errors
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set a new fan mode."""
|
||||
api_fan_mode = FAN_MODE_MAPPING_HA_TO_ACTRONAIR.get(fan_mode)
|
||||
await self._status.user_aircon_settings.set_fan_mode(api_fan_mode)
|
||||
|
||||
@handle_actron_api_errors
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set the HVAC mode."""
|
||||
ac_mode = HVAC_MODE_MAPPING_HA_TO_ACTRONAIR.get(hvac_mode)
|
||||
await self._status.ac_system.set_system_mode(ac_mode)
|
||||
|
||||
@handle_actron_api_errors
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set the temperature."""
|
||||
temp = kwargs.get(ATTR_TEMPERATURE)
|
||||
@@ -209,11 +212,13 @@ class ActronZoneClimate(ActronAirZoneEntity, ActronAirClimateEntity):
|
||||
"""Return the target temperature."""
|
||||
return self._zone.temperature_setpoint_cool_c
|
||||
|
||||
@handle_actron_api_errors
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set the HVAC mode."""
|
||||
is_enabled = hvac_mode != HVACMode.OFF
|
||||
await self._zone.enable(is_enabled)
|
||||
|
||||
@handle_actron_api_errors
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set the temperature."""
|
||||
await self._zone.set_temperature(temperature=kwargs.get(ATTR_TEMPERATURE))
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
"""Base entity classes for Actron Air integration."""
|
||||
|
||||
from actron_neo_api import ActronAirZone
|
||||
from collections.abc import Callable, Coroutine
|
||||
from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from actron_neo_api import ActronAirAPIError, ActronAirZone
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
@@ -9,6 +14,26 @@ from .const import DOMAIN
|
||||
from .coordinator import ActronAirSystemCoordinator
|
||||
|
||||
|
||||
def handle_actron_api_errors[_EntityT: ActronAirEntity, **_P](
|
||||
func: Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, Any]],
|
||||
) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]:
|
||||
"""Decorate Actron Air API calls to handle ActronAirAPIError exceptions."""
|
||||
|
||||
@wraps(func)
|
||||
async def wrapper(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None:
|
||||
"""Wrap API calls with exception handling."""
|
||||
try:
|
||||
await func(self, *args, **kwargs)
|
||||
except ActronAirAPIError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"error": str(err)},
|
||||
) from err
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class ActronAirEntity(CoordinatorEntity[ActronAirSystemCoordinator]):
|
||||
"""Base class for Actron Air entities."""
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ rules:
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
|
||||
@@ -49,6 +49,9 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"api_error": {
|
||||
"message": "Failed to communicate with Actron Air device: {error}"
|
||||
},
|
||||
"auth_error": {
|
||||
"message": "Authentication failed, please reauthenticate"
|
||||
},
|
||||
|
||||
@@ -10,7 +10,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator
|
||||
from .entity import ActronAirAcEntity
|
||||
from .entity import ActronAirAcEntity, handle_actron_api_errors
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -105,10 +105,12 @@ class ActronAirSwitch(ActronAirAcEntity, SwitchEntity):
|
||||
"""Return true if the switch is on."""
|
||||
return self.entity_description.is_on_fn(self.coordinator)
|
||||
|
||||
@handle_actron_api_errors
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
await self.entity_description.set_fn(self.coordinator, True)
|
||||
|
||||
@handle_actron_api_errors
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
await self.entity_description.set_fn(self.coordinator, False)
|
||||
|
||||
@@ -20,9 +20,10 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryNotReady, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
CONF_FORCE,
|
||||
@@ -45,6 +46,7 @@ SERVICE_REFRESH_SCHEMA = vol.Schema(
|
||||
{vol.Optional(CONF_FORCE, default=False): cv.boolean}
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
PLATFORMS = [Platform.SENSOR, Platform.SWITCH, Platform.UPDATE]
|
||||
type AdGuardConfigEntry = ConfigEntry[AdGuardData]
|
||||
|
||||
@@ -57,6 +59,69 @@ class AdGuardData:
|
||||
version: str
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the component."""
|
||||
|
||||
def _get_adguard_instances(hass: HomeAssistant) -> list[AdGuardHome]:
|
||||
"""Get the AdGuardHome instances."""
|
||||
entries: list[AdGuardConfigEntry] = hass.config_entries.async_loaded_entries(
|
||||
DOMAIN
|
||||
)
|
||||
if not entries:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN, translation_key="config_entry_not_loaded"
|
||||
)
|
||||
return [entry.runtime_data.client for entry in entries]
|
||||
|
||||
async def add_url(call: ServiceCall) -> None:
|
||||
"""Service call to add a new filter subscription to AdGuard Home."""
|
||||
for adguard in _get_adguard_instances(call.hass):
|
||||
await adguard.filtering.add_url(
|
||||
allowlist=False, name=call.data[CONF_NAME], url=call.data[CONF_URL]
|
||||
)
|
||||
|
||||
async def remove_url(call: ServiceCall) -> None:
|
||||
"""Service call to remove a filter subscription from AdGuard Home."""
|
||||
for adguard in _get_adguard_instances(call.hass):
|
||||
await adguard.filtering.remove_url(allowlist=False, url=call.data[CONF_URL])
|
||||
|
||||
async def enable_url(call: ServiceCall) -> None:
|
||||
"""Service call to enable a filter subscription in AdGuard Home."""
|
||||
for adguard in _get_adguard_instances(call.hass):
|
||||
await adguard.filtering.enable_url(allowlist=False, url=call.data[CONF_URL])
|
||||
|
||||
async def disable_url(call: ServiceCall) -> None:
|
||||
"""Service call to disable a filter subscription in AdGuard Home."""
|
||||
for adguard in _get_adguard_instances(call.hass):
|
||||
await adguard.filtering.disable_url(
|
||||
allowlist=False, url=call.data[CONF_URL]
|
||||
)
|
||||
|
||||
async def refresh(call: ServiceCall) -> None:
|
||||
"""Service call to refresh the filter subscriptions in AdGuard Home."""
|
||||
for adguard in _get_adguard_instances(call.hass):
|
||||
await adguard.filtering.refresh(
|
||||
allowlist=False, force=call.data[CONF_FORCE]
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_ADD_URL, add_url, schema=SERVICE_ADD_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_REMOVE_URL, remove_url, schema=SERVICE_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_ENABLE_URL, enable_url, schema=SERVICE_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_DISABLE_URL, disable_url, schema=SERVICE_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_REFRESH, refresh, schema=SERVICE_REFRESH_SCHEMA
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> bool:
|
||||
"""Set up AdGuard Home from a config entry."""
|
||||
session = async_get_clientsession(hass, entry.data[CONF_VERIFY_SSL])
|
||||
@@ -79,56 +144,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> b
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
async def add_url(call: ServiceCall) -> None:
|
||||
"""Service call to add a new filter subscription to AdGuard Home."""
|
||||
await adguard.filtering.add_url(
|
||||
allowlist=False, name=call.data[CONF_NAME], url=call.data[CONF_URL]
|
||||
)
|
||||
|
||||
async def remove_url(call: ServiceCall) -> None:
|
||||
"""Service call to remove a filter subscription from AdGuard Home."""
|
||||
await adguard.filtering.remove_url(allowlist=False, url=call.data[CONF_URL])
|
||||
|
||||
async def enable_url(call: ServiceCall) -> None:
|
||||
"""Service call to enable a filter subscription in AdGuard Home."""
|
||||
await adguard.filtering.enable_url(allowlist=False, url=call.data[CONF_URL])
|
||||
|
||||
async def disable_url(call: ServiceCall) -> None:
|
||||
"""Service call to disable a filter subscription in AdGuard Home."""
|
||||
await adguard.filtering.disable_url(allowlist=False, url=call.data[CONF_URL])
|
||||
|
||||
async def refresh(call: ServiceCall) -> None:
|
||||
"""Service call to refresh the filter subscriptions in AdGuard Home."""
|
||||
await adguard.filtering.refresh(allowlist=False, force=call.data[CONF_FORCE])
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_ADD_URL, add_url, schema=SERVICE_ADD_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_REMOVE_URL, remove_url, schema=SERVICE_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_ENABLE_URL, enable_url, schema=SERVICE_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_DISABLE_URL, disable_url, schema=SERVICE_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_REFRESH, refresh, schema=SERVICE_REFRESH_SCHEMA
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> bool:
|
||||
"""Unload AdGuard Home config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
# This is the last loaded instance of AdGuard, deregister any services
|
||||
hass.services.async_remove(DOMAIN, SERVICE_ADD_URL)
|
||||
hass.services.async_remove(DOMAIN, SERVICE_REMOVE_URL)
|
||||
hass.services.async_remove(DOMAIN, SERVICE_ENABLE_URL)
|
||||
hass.services.async_remove(DOMAIN, SERVICE_DISABLE_URL)
|
||||
hass.services.async_remove(DOMAIN, SERVICE_REFRESH)
|
||||
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
@@ -76,6 +76,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"config_entry_not_loaded": {
|
||||
"message": "Config entry not loaded."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"add_url": {
|
||||
"description": "Adds a new filter subscription to AdGuard Home.",
|
||||
|
||||
108
homeassistant/components/advantage_air/quality_scale.yaml
Normal file
108
homeassistant/components/advantage_air/quality_scale.yaml
Normal file
@@ -0,0 +1,108 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: todo
|
||||
comment: https://developers.home-assistant.io/blog/2025/09/25/entity-services-api-changes/
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules:
|
||||
status: todo
|
||||
comment: |
|
||||
Move coordinator from __init__.py to coordinator.py.
|
||||
Consider using entity descriptions for binary_sensor and switch.
|
||||
Consider simplifying climate supported features flow.
|
||||
config-flow-test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
Add mock_setup_entry common fixture.
|
||||
Test unique_id of the entry in happy flow.
|
||||
Split duplicate entry test from happy flow, use mock_config_entry.
|
||||
Error flow should end in CREATE_ENTRY to test recovery.
|
||||
Add data_description for ip_address (and port) to strings.json - tests fail with:
|
||||
"Translation not found for advantage_air: config.step.user.data_description.ip_address"
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: Data descriptions missing
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: todo
|
||||
docs-removal-instructions: todo
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: Entities do not explicitly subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data:
|
||||
status: done
|
||||
comment: Consider extending coordinator to access API via coordinator and remove extra dataclass.
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: No options to be set.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable:
|
||||
status: todo
|
||||
comment: MyZone temp entity should be unavailable when MyZone is disabled rather than returning None.
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: todo
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: Integration connects to local device without authentication.
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
Patch the library instead of mocking at integration level.
|
||||
Split binary sensor tests into multiple tests (enable entities etc).
|
||||
Split tests into Creation (right entities with right values), Actions (right library calls), and Other behaviors.
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: todo
|
||||
comment: Consider making every zone its own device for better naming and room assignment. Breaking change to split cover entities to separate devices.
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Device is a generic Android device (android-xxxxxxxx) indistinguishable from other Android devices, not discoverable.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: Check mDNS, DHCP, SSDP confirmed not feasible. Device is a generic Android device (android-xxxxxxxx) indistinguishable from other Android devices.
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: AC zones are static per unit and configured on the device itself.
|
||||
entity-category: done
|
||||
entity-device-class:
|
||||
status: todo
|
||||
comment: Consider using UPDATE device class for app update binary sensor instead of custom.
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: todo
|
||||
exception-translations:
|
||||
status: todo
|
||||
comment: UpdateFailed in the coordinator
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: Integration does not raise repair issues.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Zones are part of the AC unit, not separate removable devices.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
@@ -29,3 +29,24 @@ COUNTRY_DOMAINS = {
|
||||
|
||||
CATEGORY_SENSORS = "sensors"
|
||||
CATEGORY_NOTIFICATIONS = "notifications"
|
||||
|
||||
# Map service translation keys to Alexa API
|
||||
INFO_SKILLS_MAPPING = {
|
||||
"calendar_today": "Alexa.Calendar.PlayToday",
|
||||
"calendar_tomorrow": "Alexa.Calendar.PlayTomorrow",
|
||||
"calendar_next": "Alexa.Calendar.PlayNext",
|
||||
"date": "Alexa.Date.Play",
|
||||
"time": "Alexa.Time.Play",
|
||||
"national_news": "Alexa.News.NationalNews",
|
||||
"flash_briefing": "Alexa.FlashBriefing.Play",
|
||||
"traffic": "Alexa.Traffic.Play",
|
||||
"weather": "Alexa.Weather.Play",
|
||||
"cleanup": "Alexa.CleanUp.Play",
|
||||
"good_morning": "Alexa.GoodMorning.Play",
|
||||
"sing_song": "Alexa.SingASong.Play",
|
||||
"fun_fact": "Alexa.FunFact.Play",
|
||||
"tell_joke": "Alexa.Joke.Play",
|
||||
"tell_story": "Alexa.TellStory.Play",
|
||||
"im_home": "Alexa.ImHome.Play",
|
||||
"goodnight": "Alexa.GoodNight.Play",
|
||||
}
|
||||
|
||||
@@ -1,5 +1,15 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"voc_index": {
|
||||
"default": "mdi:molecule"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"send_info_skill": {
|
||||
"service": "mdi:information"
|
||||
},
|
||||
"send_sound": {
|
||||
"service": "mdi:cast-audio"
|
||||
},
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==11.1.3"]
|
||||
"requirements": ["aioamazondevices==12.0.0"]
|
||||
}
|
||||
|
||||
@@ -20,7 +20,13 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import LIGHT_LUX, UnitOfTemperature
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
LIGHT_LUX,
|
||||
PERCENTAGE,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
@@ -77,6 +83,41 @@ SENSORS: Final = (
|
||||
native_unit_of_measurement=LIGHT_LUX,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
AmazonSensorEntityDescription(
|
||||
key="Humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
AmazonSensorEntityDescription(
|
||||
key="PM10",
|
||||
device_class=SensorDeviceClass.PM10,
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
AmazonSensorEntityDescription(
|
||||
key="PM25",
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
AmazonSensorEntityDescription(
|
||||
key="CO",
|
||||
device_class=SensorDeviceClass.CO,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
AmazonSensorEntityDescription(
|
||||
key="VOC",
|
||||
# No device class as this is an index not a concentration
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="voc_index",
|
||||
),
|
||||
AmazonSensorEntityDescription(
|
||||
key="Air Quality",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
)
|
||||
NOTIFICATIONS: Final = (
|
||||
AmazonNotificationEntityDescription(
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Support for services."""
|
||||
|
||||
from aioamazondevices.const.metadata import ALEXA_INFO_SKILLS
|
||||
from aioamazondevices.const.sounds import SOUNDS_LIST
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -9,13 +10,15 @@ from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DOMAIN, INFO_SKILLS_MAPPING
|
||||
from .coordinator import AmazonConfigEntry
|
||||
|
||||
ATTR_TEXT_COMMAND = "text_command"
|
||||
ATTR_SOUND = "sound"
|
||||
ATTR_INFO_SKILL = "info_skill"
|
||||
SERVICE_TEXT_COMMAND = "send_text_command"
|
||||
SERVICE_SOUND_NOTIFICATION = "send_sound"
|
||||
SERVICE_INFO_SKILL = "send_info_skill"
|
||||
|
||||
SCHEMA_SOUND_SERVICE = vol.Schema(
|
||||
{
|
||||
@@ -29,6 +32,12 @@ SCHEMA_CUSTOM_COMMAND = vol.Schema(
|
||||
vol.Required(ATTR_DEVICE_ID): cv.string,
|
||||
}
|
||||
)
|
||||
SCHEMA_INFO_SKILL = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_INFO_SKILL): cv.string,
|
||||
vol.Required(ATTR_DEVICE_ID): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
@@ -86,6 +95,17 @@ async def _async_execute_action(call: ServiceCall, attribute: str) -> None:
|
||||
await coordinator.api.call_alexa_text_command(
|
||||
coordinator.data[device.serial_number], value
|
||||
)
|
||||
elif attribute == ATTR_INFO_SKILL:
|
||||
info_skill = INFO_SKILLS_MAPPING.get(value)
|
||||
if info_skill not in ALEXA_INFO_SKILLS:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_info_skill_value",
|
||||
translation_placeholders={"info_skill": value},
|
||||
)
|
||||
await coordinator.api.call_alexa_info_skill(
|
||||
coordinator.data[device.serial_number], info_skill
|
||||
)
|
||||
|
||||
|
||||
async def async_send_sound_notification(call: ServiceCall) -> None:
|
||||
@@ -98,6 +118,11 @@ async def async_send_text_command(call: ServiceCall) -> None:
|
||||
await _async_execute_action(call, ATTR_TEXT_COMMAND)
|
||||
|
||||
|
||||
async def async_send_info_skill(call: ServiceCall) -> None:
|
||||
"""Send an info skill command to a AmazonDevice."""
|
||||
await _async_execute_action(call, ATTR_INFO_SKILL)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Amazon Devices integration."""
|
||||
@@ -112,5 +137,10 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
async_send_text_command,
|
||||
SCHEMA_CUSTOM_COMMAND,
|
||||
),
|
||||
(
|
||||
SERVICE_INFO_SKILL,
|
||||
async_send_info_skill,
|
||||
SCHEMA_INFO_SKILL,
|
||||
),
|
||||
):
|
||||
hass.services.async_register(DOMAIN, service_name, method, schema=schema)
|
||||
|
||||
@@ -67,3 +67,36 @@ send_sound:
|
||||
- squeaky_12
|
||||
- zap_01
|
||||
translation_key: sound
|
||||
|
||||
send_info_skill:
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: alexa_devices
|
||||
info_skill:
|
||||
required: true
|
||||
example: date
|
||||
default: date
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- calendar_today
|
||||
- calendar_tomorrow
|
||||
- calendar_next
|
||||
- date
|
||||
- time
|
||||
- national_news
|
||||
- flash_briefing
|
||||
- traffic
|
||||
- weather
|
||||
- cleanup
|
||||
- good_morning
|
||||
- sing_song
|
||||
- fun_fact
|
||||
- tell_joke
|
||||
- tell_story
|
||||
- im_home
|
||||
- goodnight
|
||||
translation_key: info_skill
|
||||
|
||||
@@ -75,6 +75,9 @@
|
||||
},
|
||||
"timer": {
|
||||
"name": "Next timer"
|
||||
},
|
||||
"voc_index": {
|
||||
"name": "Volatile organic compounds index"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
@@ -102,11 +105,35 @@
|
||||
"invalid_device_id": {
|
||||
"message": "Invalid device ID specified: {device_id}"
|
||||
},
|
||||
"invalid_info_skill_value": {
|
||||
"message": "Invalid info skill {info_skill} specified"
|
||||
},
|
||||
"invalid_sound_value": {
|
||||
"message": "Invalid sound {sound} specified"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"info_skill": {
|
||||
"options": {
|
||||
"calendar_next": "Calendar: Next event",
|
||||
"calendar_today": "Calendar: Today's Calendar",
|
||||
"calendar_tomorrow": "Calendar: Tomorrow's Calendar",
|
||||
"cleanup": "Encourage me to clean up",
|
||||
"date": "Date",
|
||||
"flash_briefing": "Flash Briefing",
|
||||
"fun_fact": "Tell me a fun fact",
|
||||
"good_morning": "Good morning",
|
||||
"goodnight": "Wish me a good night",
|
||||
"im_home": "Welcome me home",
|
||||
"national_news": "National News",
|
||||
"sing_song": "Sing a song",
|
||||
"tell_joke": "Tell me a joke",
|
||||
"tell_story": "Tell me a story",
|
||||
"time": "Time",
|
||||
"traffic": "Traffic",
|
||||
"weather": "Weather"
|
||||
}
|
||||
},
|
||||
"sound": {
|
||||
"options": {
|
||||
"air_horn_03": "Air horn",
|
||||
@@ -154,6 +181,20 @@
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"send_info_skill": {
|
||||
"description": "Sends an info skill command to a device",
|
||||
"fields": {
|
||||
"device_id": {
|
||||
"description": "[%key:component::alexa_devices::common::device_id_description%]",
|
||||
"name": "Device"
|
||||
},
|
||||
"info_skill": {
|
||||
"description": "The info skill command to send.",
|
||||
"name": "Alexa info skill command"
|
||||
}
|
||||
},
|
||||
"name": "Send info skill command"
|
||||
},
|
||||
"send_sound": {
|
||||
"description": "Sends a sound to a device",
|
||||
"fields": {
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from amberelectric.models.channel import ChannelType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_CONFIG_ENTRY_ID
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
@@ -13,6 +12,7 @@ from homeassistant.core import (
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import service
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
from homeassistant.util.json import JsonValueType
|
||||
|
||||
@@ -37,23 +37,6 @@ GET_FORECASTS_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def async_get_entry(hass: HomeAssistant, config_entry_id: str) -> AmberConfigEntry:
|
||||
"""Get the Amber config entry."""
|
||||
if not (entry := hass.config_entries.async_get_entry(config_entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": config_entry_id},
|
||||
)
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_loaded",
|
||||
translation_placeholders={"target": entry.title},
|
||||
)
|
||||
return entry
|
||||
|
||||
|
||||
def get_forecasts(channel_type: str, data: dict) -> list[JsonValueType]:
|
||||
"""Return an array of forecasts."""
|
||||
results: list[JsonValueType] = []
|
||||
@@ -109,7 +92,9 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
|
||||
async def handle_get_forecasts(call: ServiceCall) -> ServiceResponse:
|
||||
channel_type = call.data[ATTR_CHANNEL_TYPE]
|
||||
entry = async_get_entry(hass, call.data[ATTR_CONFIG_ENTRY_ID])
|
||||
entry: AmberConfigEntry = service.async_get_config_entry(
|
||||
hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY_ID]
|
||||
)
|
||||
coordinator = entry.runtime_data
|
||||
forecasts = get_forecasts(channel_type, coordinator.data)
|
||||
return {"forecasts": forecasts}
|
||||
|
||||
@@ -25,12 +25,6 @@
|
||||
"exceptions": {
|
||||
"channel_not_found": {
|
||||
"message": "There is no {channel_type} channel at this site."
|
||||
},
|
||||
"integration_not_found": {
|
||||
"message": "Config entry \"{target}\" not found in registry."
|
||||
},
|
||||
"not_loaded": {
|
||||
"message": "{target} is not loaded."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
|
||||
@@ -33,13 +33,19 @@ from .const import ATTR_LAST_DATA, TYPE_SOLARRADIATION, TYPE_SOLARRADIATION_LX
|
||||
from .entity import AmbientWeatherEntity
|
||||
|
||||
TYPE_24HOURRAININ = "24hourrainin"
|
||||
TYPE_AQI_PM10_24H_AQIN = "aqi_pm10_24h_aqin"
|
||||
TYPE_AQI_PM10_AQIN = "aqi_pm10_aqin"
|
||||
TYPE_AQI_PM25 = "aqi_pm25"
|
||||
TYPE_AQI_PM25_24H = "aqi_pm25_24h"
|
||||
TYPE_AQI_PM25_24H_AQIN = "aqi_pm25_24h_aqin"
|
||||
TYPE_AQI_PM25_AQIN = "aqi_pm25_aqin"
|
||||
TYPE_AQI_PM25_IN = "aqi_pm25_in"
|
||||
TYPE_AQI_PM25_IN_24H = "aqi_pm25_in_24h"
|
||||
TYPE_BAROMABSIN = "baromabsin"
|
||||
TYPE_BAROMRELIN = "baromrelin"
|
||||
TYPE_CO2 = "co2"
|
||||
TYPE_CO2_IN_24H_AQIN = "co2_in_24h_aqin"
|
||||
TYPE_CO2_IN_AQIN = "co2_in_aqin"
|
||||
TYPE_DAILYRAININ = "dailyrainin"
|
||||
TYPE_DEWPOINT = "dewPoint"
|
||||
TYPE_EVENTRAININ = "eventrainin"
|
||||
@@ -57,17 +63,23 @@ TYPE_HUMIDITY7 = "humidity7"
|
||||
TYPE_HUMIDITY8 = "humidity8"
|
||||
TYPE_HUMIDITY9 = "humidity9"
|
||||
TYPE_HUMIDITYIN = "humidityin"
|
||||
TYPE_LASTLIGHTNING = "lightning_time"
|
||||
TYPE_LASTLIGHTNING_DISTANCE = "lightning_distance"
|
||||
TYPE_LASTRAIN = "lastRain"
|
||||
TYPE_LIGHTNING_PER_DAY = "lightning_day"
|
||||
TYPE_LIGHTNING_PER_HOUR = "lightning_hour"
|
||||
TYPE_LASTLIGHTNING_DISTANCE = "lightning_distance"
|
||||
TYPE_LASTLIGHTNING = "lightning_time"
|
||||
TYPE_MAXDAILYGUST = "maxdailygust"
|
||||
TYPE_MONTHLYRAININ = "monthlyrainin"
|
||||
TYPE_PM_IN_HUMIDITY_AQIN = "pm_in_humidity_aqin"
|
||||
TYPE_PM_IN_TEMP_AQIN = "pm_in_temp_aqin"
|
||||
TYPE_PM10_IN_24H_AQIN = "pm10_in_24h_aqin"
|
||||
TYPE_PM10_IN_AQIN = "pm10_in_aqin"
|
||||
TYPE_PM25 = "pm25"
|
||||
TYPE_PM25_24H = "pm25_24h"
|
||||
TYPE_PM25_IN = "pm25_in"
|
||||
TYPE_PM25_IN_24H = "pm25_in_24h"
|
||||
TYPE_PM25_IN_24H_AQIN = "pm25_in_24h_aqin"
|
||||
TYPE_PM25_IN_AQIN = "pm25_in_aqin"
|
||||
TYPE_SOILHUM1 = "soilhum1"
|
||||
TYPE_SOILHUM10 = "soilhum10"
|
||||
TYPE_SOILHUM2 = "soilhum2"
|
||||
@@ -78,8 +90,8 @@ TYPE_SOILHUM6 = "soilhum6"
|
||||
TYPE_SOILHUM7 = "soilhum7"
|
||||
TYPE_SOILHUM8 = "soilhum8"
|
||||
TYPE_SOILHUM9 = "soilhum9"
|
||||
TYPE_SOILTEMP1F = "soiltemp1f"
|
||||
TYPE_SOILTEMP10F = "soiltemp10f"
|
||||
TYPE_SOILTEMP1F = "soiltemp1f"
|
||||
TYPE_SOILTEMP2F = "soiltemp2f"
|
||||
TYPE_SOILTEMP3F = "soiltemp3f"
|
||||
TYPE_SOILTEMP4F = "soiltemp4f"
|
||||
@@ -143,6 +155,86 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="pm25_indoor_aqi_24h_average",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_PM25_IN_AQIN,
|
||||
translation_key="pm25_indoor_aqin",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_PM25_IN_24H_AQIN,
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
translation_key="pm25_indoor_24h_aqin",
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_PM10_IN_AQIN,
|
||||
translation_key="pm10_indoor_aqin",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM10,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_PM10_IN_24H_AQIN,
|
||||
translation_key="pm10_indoor_24h_aqin",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM10,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_CO2_IN_AQIN,
|
||||
translation_key="co2_indoor_aqin",
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_CO2_IN_24H_AQIN,
|
||||
translation_key="co2_indoor_24h_aqin",
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_PM_IN_TEMP_AQIN,
|
||||
translation_key="pm_indoor_temp_aqin",
|
||||
native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_PM_IN_HUMIDITY_AQIN,
|
||||
translation_key="pm_indoor_humidity_aqin",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_AQI_PM25_AQIN,
|
||||
translation_key="pm25_aqi_aqin",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_AQI_PM25_24H_AQIN,
|
||||
translation_key="pm25_aqi_24h_aqin",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_AQI_PM10_AQIN,
|
||||
translation_key="pm10_aqi_aqin",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_AQI_PM10_24H_AQIN,
|
||||
translation_key="pm10_aqi_24h_aqin",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_BAROMABSIN,
|
||||
translation_key="absolute_pressure",
|
||||
|
||||
@@ -156,6 +156,12 @@
|
||||
"absolute_pressure": {
|
||||
"name": "Absolute pressure"
|
||||
},
|
||||
"co2_indoor_24h_aqin": {
|
||||
"name": "CO2 Indoor 24h Average AQIN"
|
||||
},
|
||||
"co2_indoor_aqin": {
|
||||
"name": "CO2 Indoor AQIN"
|
||||
},
|
||||
"daily_rain": {
|
||||
"name": "Daily rain"
|
||||
},
|
||||
@@ -228,18 +234,39 @@
|
||||
"monthly_rain": {
|
||||
"name": "Monthly rain"
|
||||
},
|
||||
"pm10_aqi_24h_aqin": {
|
||||
"name": "PM10 Indoor AQI 24h Average AQIN"
|
||||
},
|
||||
"pm10_aqi_aqin": {
|
||||
"name": "PM10 Indoor AQI AQIN"
|
||||
},
|
||||
"pm10_indoor_24h_aqin": {
|
||||
"name": "PM10 Indoor 24h Average AQIN"
|
||||
},
|
||||
"pm10_indoor_aqin": {
|
||||
"name": "PM10 Indoor AQIN"
|
||||
},
|
||||
"pm25_24h_average": {
|
||||
"name": "PM2.5 24 hour average"
|
||||
},
|
||||
"pm25_aqi": {
|
||||
"name": "PM2.5 AQI"
|
||||
},
|
||||
"pm25_aqi_24h_aqin": {
|
||||
"name": "PM2.5 Indoor AQI 24h Average AQIN"
|
||||
},
|
||||
"pm25_aqi_24h_average": {
|
||||
"name": "PM2.5 AQI 24 hour average"
|
||||
},
|
||||
"pm25_aqi_aqin": {
|
||||
"name": "PM2.5 Indoor AQI AQIN"
|
||||
},
|
||||
"pm25_indoor": {
|
||||
"name": "PM2.5 indoor"
|
||||
},
|
||||
"pm25_indoor_24h_aqin": {
|
||||
"name": "PM2.5 Indoor 24h AQIN"
|
||||
},
|
||||
"pm25_indoor_24h_average": {
|
||||
"name": "PM2.5 indoor 24 hour average"
|
||||
},
|
||||
@@ -249,6 +276,15 @@
|
||||
"pm25_indoor_aqi_24h_average": {
|
||||
"name": "PM2.5 indoor AQI"
|
||||
},
|
||||
"pm25_indoor_aqin": {
|
||||
"name": "PM2.5 Indoor AQIN"
|
||||
},
|
||||
"pm_indoor_humidity_aqin": {
|
||||
"name": "Indoor Humidity AQIN"
|
||||
},
|
||||
"pm_indoor_temp_aqin": {
|
||||
"name": "Indoor Temperature AQIN"
|
||||
},
|
||||
"relative_pressure": {
|
||||
"name": "Relative pressure"
|
||||
},
|
||||
|
||||
@@ -73,31 +73,21 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
started = False
|
||||
|
||||
async def _async_handle_labs_update(
|
||||
event: Event[labs.EventLabsUpdatedData],
|
||||
event_data: labs.EventLabsUpdatedData,
|
||||
) -> None:
|
||||
"""Handle labs feature toggle."""
|
||||
await analytics.save_preferences({ATTR_SNAPSHOTS: event.data["enabled"]})
|
||||
await analytics.save_preferences({ATTR_SNAPSHOTS: event_data["enabled"]})
|
||||
if started:
|
||||
await analytics.async_schedule()
|
||||
|
||||
@callback
|
||||
def _async_labs_event_filter(event_data: labs.EventLabsUpdatedData) -> bool:
|
||||
"""Filter labs events for this integration's snapshot feature."""
|
||||
return (
|
||||
event_data["domain"] == DOMAIN
|
||||
and event_data["preview_feature"] == LABS_SNAPSHOT_FEATURE
|
||||
)
|
||||
|
||||
async def start_schedule(_event: Event) -> None:
|
||||
"""Start the send schedule after the started event."""
|
||||
nonlocal started
|
||||
started = True
|
||||
await analytics.async_schedule()
|
||||
|
||||
hass.bus.async_listen(
|
||||
labs.EVENT_LABS_UPDATED,
|
||||
_async_handle_labs_update,
|
||||
event_filter=_async_labs_event_filter,
|
||||
labs.async_subscribe_preview_feature(
|
||||
hass, DOMAIN, LABS_SNAPSHOT_FEATURE, _async_handle_labs_update
|
||||
)
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, start_schedule)
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ import anthropic
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
@@ -47,8 +47,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
|
||||
try:
|
||||
await client.models.list(timeout=10.0)
|
||||
except anthropic.AuthenticationError as err:
|
||||
LOGGER.error("Invalid API key: %s", err)
|
||||
return False
|
||||
raise ConfigEntryAuthFailed(err) from err
|
||||
except anthropic.AnthropicError as err:
|
||||
raise ConfigEntryNotReady(err) from err
|
||||
|
||||
@@ -77,7 +76,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool:
|
||||
"""Unload Anthropic."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
@@ -105,7 +104,7 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
if not any(entry.version == 1 for entry in entries):
|
||||
return
|
||||
|
||||
api_keys_entries: dict[str, tuple[ConfigEntry, bool]] = {}
|
||||
api_keys_entries: dict[str, tuple[AnthropicConfigEntry, bool]] = {}
|
||||
entity_registry = er.async_get(hass)
|
||||
device_registry = dr.async_get(hass)
|
||||
|
||||
|
||||
@@ -4,9 +4,9 @@ from __future__ import annotations
|
||||
|
||||
from json import JSONDecodeError
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.components import ai_task, conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
@@ -14,12 +14,15 @@ from homeassistant.util.json import json_loads
|
||||
|
||||
from .entity import AnthropicBaseLLMEntity
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import AnthropicConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: AnthropicConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AI Task entities."""
|
||||
@@ -50,7 +53,9 @@ class AnthropicTaskEntity(
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> ai_task.GenDataTaskResult:
|
||||
"""Handle a generate data task."""
|
||||
await self._async_handle_chat_log(chat_log, task.name, task.structure)
|
||||
await self._async_handle_chat_log(
|
||||
chat_log, task.name, task.structure, max_iterations=1000
|
||||
)
|
||||
|
||||
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
|
||||
raise HomeAssistantError(
|
||||
|
||||
@@ -2,10 +2,11 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Any, cast
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
import anthropic
|
||||
import voluptuous as vol
|
||||
@@ -13,7 +14,7 @@ from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components.zone import ENTITY_ID_HOME
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
SOURCE_REAUTH,
|
||||
ConfigEntryState,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
@@ -65,6 +66,9 @@ from .const import (
|
||||
WEB_SEARCH_UNSUPPORTED_MODELS,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import AnthropicConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
@@ -162,6 +166,10 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
if self.source == SOURCE_REAUTH:
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data_updates=user_input
|
||||
)
|
||||
return self.async_create_entry(
|
||||
title="Claude",
|
||||
data=user_input,
|
||||
@@ -182,13 +190,34 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors or None
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
errors=errors or None,
|
||||
description_placeholders={
|
||||
"instructions_url": "https://www.home-assistant.io/integrations/anthropic/#generating-an-api-key",
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauth upon an API authentication error."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Dialog that informs the user that reauth is required."""
|
||||
if not user_input:
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm", data_schema=STEP_USER_DATA_SCHEMA
|
||||
)
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
@classmethod
|
||||
@callback
|
||||
def async_get_supported_subentry_types(
|
||||
cls, config_entry: ConfigEntry
|
||||
cls, config_entry: AnthropicConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this integration."""
|
||||
return {
|
||||
@@ -491,22 +520,24 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"role": "user",
|
||||
"content": "Where are the following coordinates located: "
|
||||
f"({zone_home.attributes[ATTR_LATITUDE]},"
|
||||
f" {zone_home.attributes[ATTR_LONGITUDE]})? Please respond "
|
||||
"only with a JSON object using the following schema:\n"
|
||||
f"{convert(location_schema)}",
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "{", # hints the model to skip any preamble
|
||||
},
|
||||
f" {zone_home.attributes[ATTR_LONGITUDE]})?",
|
||||
}
|
||||
],
|
||||
max_tokens=cast(int, DEFAULT[CONF_MAX_TOKENS]),
|
||||
output_config={
|
||||
"format": {
|
||||
"type": "json_schema",
|
||||
"schema": {
|
||||
**convert(location_schema),
|
||||
"additionalProperties": False,
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
_LOGGER.debug("Model response: %s", response.content)
|
||||
location_data = location_schema(
|
||||
json.loads(
|
||||
"{"
|
||||
+ "".join(
|
||||
"".join(
|
||||
block.text
|
||||
for block in response.content
|
||||
if isinstance(block, anthropic.types.TextBlock)
|
||||
|
||||
@@ -56,6 +56,15 @@ NON_ADAPTIVE_THINKING_MODELS = [
|
||||
"claude-3",
|
||||
]
|
||||
|
||||
UNSUPPORTED_STRUCTURED_OUTPUT_MODELS = [
|
||||
"claude-opus-4-1",
|
||||
"claude-opus-4-0",
|
||||
"claude-opus-4-20250514",
|
||||
"claude-sonnet-4-0",
|
||||
"claude-sonnet-4-20250514",
|
||||
"claude-3",
|
||||
]
|
||||
|
||||
WEB_SEARCH_UNSUPPORTED_MODELS = [
|
||||
"claude-3-haiku",
|
||||
"claude-3-opus",
|
||||
|
||||
@@ -20,6 +20,7 @@ from anthropic.types import (
|
||||
DocumentBlockParam,
|
||||
ImageBlockParam,
|
||||
InputJSONDelta,
|
||||
JSONOutputFormatParam,
|
||||
MessageDeltaUsage,
|
||||
MessageParam,
|
||||
MessageStreamEvent,
|
||||
@@ -94,6 +95,7 @@ from .const import (
|
||||
MIN_THINKING_BUDGET,
|
||||
NON_ADAPTIVE_THINKING_MODELS,
|
||||
NON_THINKING_MODELS,
|
||||
UNSUPPORTED_STRUCTURED_OUTPUT_MODELS,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
@@ -597,6 +599,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
chat_log: conversation.ChatLog,
|
||||
structure_name: str | None = None,
|
||||
structure: vol.Schema | None = None,
|
||||
max_iterations: int = MAX_TOOL_ITERATIONS,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.subentry.data
|
||||
@@ -697,8 +700,25 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
)
|
||||
|
||||
if structure and structure_name:
|
||||
structure_name = slugify(structure_name)
|
||||
if model_args["thinking"]["type"] == "disabled":
|
||||
if not model.startswith(tuple(UNSUPPORTED_STRUCTURED_OUTPUT_MODELS)):
|
||||
# Native structured output for those models who support it.
|
||||
structure_name = None
|
||||
model_args.setdefault("output_config", OutputConfigParam())[
|
||||
"format"
|
||||
] = JSONOutputFormatParam(
|
||||
type="json_schema",
|
||||
schema={
|
||||
**convert(
|
||||
structure,
|
||||
custom_serializer=chat_log.llm_api.custom_serializer
|
||||
if chat_log.llm_api
|
||||
else llm.selector_serializer,
|
||||
),
|
||||
"additionalProperties": False,
|
||||
},
|
||||
)
|
||||
elif model_args["thinking"]["type"] == "disabled":
|
||||
structure_name = slugify(structure_name)
|
||||
if not tools:
|
||||
# Simplest case: no tools and no extended thinking
|
||||
# Add a tool and force its use
|
||||
@@ -718,6 +738,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
# force tool use or disable text responses, so we add a hint to the
|
||||
# system prompt instead. With extended thinking, the model should be
|
||||
# smart enough to use the tool.
|
||||
structure_name = slugify(structure_name)
|
||||
model_args["tool_choice"] = ToolChoiceAutoParam(
|
||||
type="auto",
|
||||
)
|
||||
@@ -725,22 +746,24 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
model_args["system"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(
|
||||
type="text",
|
||||
text=f"Claude MUST use the '{structure_name}' tool to provide the final answer instead of plain text.",
|
||||
text=f"Claude MUST use the '{structure_name}' tool to provide "
|
||||
"the final answer instead of plain text.",
|
||||
)
|
||||
)
|
||||
|
||||
tools.append(
|
||||
ToolParam(
|
||||
name=structure_name,
|
||||
description="Use this tool to reply to the user",
|
||||
input_schema=convert(
|
||||
structure,
|
||||
custom_serializer=chat_log.llm_api.custom_serializer
|
||||
if chat_log.llm_api
|
||||
else llm.selector_serializer,
|
||||
),
|
||||
if structure_name:
|
||||
tools.append(
|
||||
ToolParam(
|
||||
name=structure_name,
|
||||
description="Use this tool to reply to the user",
|
||||
input_schema=convert(
|
||||
structure,
|
||||
custom_serializer=chat_log.llm_api.custom_serializer
|
||||
if chat_log.llm_api
|
||||
else llm.selector_serializer,
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
if tools:
|
||||
model_args["tools"] = tools
|
||||
@@ -748,7 +771,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
client = self.entry.runtime_data
|
||||
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
for _iteration in range(max_iterations):
|
||||
try:
|
||||
stream = await client.messages.create(**model_args)
|
||||
|
||||
@@ -761,7 +784,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
_transform_stream(
|
||||
chat_log,
|
||||
stream,
|
||||
output_tool=structure_name if structure else None,
|
||||
output_tool=structure_name or None,
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
@@ -3,13 +3,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterator
|
||||
from typing import cast
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.repairs import RepairsFlow
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigSubentry
|
||||
from homeassistant.config_entries import ConfigEntryState, ConfigSubentry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
||||
@@ -23,6 +23,9 @@ from .const import (
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import AnthropicConfigEntry
|
||||
|
||||
|
||||
class ModelDeprecatedRepairFlow(RepairsFlow):
|
||||
"""Handler for an issue fixing flow."""
|
||||
@@ -110,7 +113,7 @@ class ModelDeprecatedRepairFlow(RepairsFlow):
|
||||
|
||||
async def _async_next_target(
|
||||
self,
|
||||
) -> tuple[ConfigEntry, ConfigSubentry, str] | None:
|
||||
) -> tuple[AnthropicConfigEntry, ConfigSubentry, str] | None:
|
||||
"""Return the next deprecated subentry target."""
|
||||
if self._subentry_iter is None:
|
||||
self._subentry_iter = self._iter_deprecated_subentries()
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"authentication_error": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
@@ -10,10 +11,23 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "[%key:component::anthropic::config::step::user::data_description::api_key%]"
|
||||
},
|
||||
"description": "Reauthentication required. Please enter your updated API key."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
}
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "Your Anthropic API key."
|
||||
},
|
||||
"description": "Set up Anthropic integration by providing your Anthropic API key. Instructions to obtain an API key can be found in [the documentation]({instructions_url})."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -35,6 +49,11 @@
|
||||
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::max_tokens%]",
|
||||
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::temperature%]"
|
||||
},
|
||||
"data_description": {
|
||||
"chat_model": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::chat_model%]",
|
||||
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::max_tokens%]",
|
||||
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::temperature%]"
|
||||
},
|
||||
"title": "[%key:component::anthropic::config_subentries::conversation::step::advanced::title%]"
|
||||
},
|
||||
"init": {
|
||||
@@ -42,6 +61,10 @@
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"recommended": "[%key:component::anthropic::config_subentries::conversation::step::init::data::recommended%]"
|
||||
},
|
||||
"data_description": {
|
||||
"name": "[%key:component::anthropic::config_subentries::conversation::step::init::data_description::name%]",
|
||||
"recommended": "[%key:component::anthropic::config_subentries::conversation::step::init::data_description::recommended%]"
|
||||
},
|
||||
"title": "[%key:component::anthropic::config_subentries::conversation::step::init::title%]"
|
||||
},
|
||||
"model": {
|
||||
@@ -80,6 +103,11 @@
|
||||
"max_tokens": "Maximum tokens to return in response",
|
||||
"temperature": "Temperature"
|
||||
},
|
||||
"data_description": {
|
||||
"chat_model": "The model to serve the responses.",
|
||||
"max_tokens": "Limit the number of response tokens.",
|
||||
"temperature": "Control the randomness of the response, trading off between creativity and coherence."
|
||||
},
|
||||
"title": "Advanced settings"
|
||||
},
|
||||
"init": {
|
||||
@@ -90,7 +118,10 @@
|
||||
"recommended": "Recommended model settings"
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template."
|
||||
"llm_hass_api": "Allow the LLM to control Home Assistant.",
|
||||
"name": "The name of this configuration",
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||
"recommended": "Use default configuration"
|
||||
},
|
||||
"title": "Basic settings"
|
||||
},
|
||||
@@ -122,6 +153,9 @@
|
||||
"data": {
|
||||
"chat_model": "[%key:common::generic::model%]"
|
||||
},
|
||||
"data_description": {
|
||||
"chat_model": "Select the new model to use."
|
||||
},
|
||||
"description": "You are updating {subentry_name} ({subentry_type}) in {entry_name}. The current model {model} is deprecated. Select a supported model to continue.",
|
||||
"title": "Update model"
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aosmith",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["py-aosmith==1.0.16"]
|
||||
"requirements": ["py-aosmith==1.0.17"]
|
||||
}
|
||||
|
||||
@@ -120,7 +120,7 @@ class AOSmithWaterHeaterEntity(AOSmithStatusEntity, WaterHeaterEntity):
|
||||
return MODE_AOSMITH_TO_HA.get(self.device.status.current_mode, STATE_OFF)
|
||||
|
||||
@property
|
||||
def is_away_mode_on(self):
|
||||
def is_away_mode_on(self) -> bool:
|
||||
"""Return True if away mode is on."""
|
||||
return self.device.status.current_mode == AOSmithOperationMode.VACATION
|
||||
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aranet",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aranet4==2.5.1"]
|
||||
"requirements": ["aranet4==2.6.0"]
|
||||
}
|
||||
|
||||
@@ -37,15 +37,15 @@ class AtagWaterHeater(AtagEntity, WaterHeaterEntity):
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
@property
|
||||
def current_temperature(self):
|
||||
def current_temperature(self) -> float:
|
||||
"""Return the current temperature."""
|
||||
return self.coordinator.atag.dhw.temperature
|
||||
|
||||
@property
|
||||
def current_operation(self):
|
||||
def current_operation(self) -> str:
|
||||
"""Return current operation."""
|
||||
operation = self.coordinator.atag.dhw.current_operation
|
||||
return operation if operation in self.operation_list else STATE_OFF
|
||||
return operation if operation in OPERATION_LIST else STATE_OFF
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
@@ -53,7 +53,7 @@ class AtagWaterHeater(AtagEntity, WaterHeaterEntity):
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def target_temperature(self):
|
||||
def target_temperature(self) -> float:
|
||||
"""Return the setpoint if water demand, otherwise return base temp (comfort level)."""
|
||||
return self.coordinator.atag.dhw.target_temperature
|
||||
|
||||
|
||||
@@ -30,6 +30,9 @@
|
||||
"title": "Set up one-time password delivered by notify component"
|
||||
},
|
||||
"setup": {
|
||||
"data": {
|
||||
"code": "Code"
|
||||
},
|
||||
"description": "A one-time password has been sent via **notify.{notify_service}**. Please enter it below:",
|
||||
"title": "Verify setup"
|
||||
}
|
||||
@@ -42,6 +45,9 @@
|
||||
},
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"code": "Code"
|
||||
},
|
||||
"description": "To activate two-factor authentication using time-based one-time passwords, scan the QR code with your authentication app. If you don't have one, we recommend either [Google Authenticator]({google_authenticator_url}) or [Authy]({authy_url}).\n\n{qr_code}\n\nAfter scanning the code, enter the six-digit code from your app to verify the setup. If you have problems scanning the QR code, do a manual setup with code **`{code}`**.",
|
||||
"title": "Set up two-factor authentication using TOTP"
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import labs, websocket_api
|
||||
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
|
||||
from homeassistant.components.labs import async_listen as async_labs_listen
|
||||
from homeassistant.components.labs import async_subscribe_preview_feature
|
||||
from homeassistant.const import (
|
||||
ATTR_AREA_ID,
|
||||
ATTR_ENTITY_ID,
|
||||
@@ -386,14 +386,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
schema=vol.Schema({vol.Optional(CONF_ID): str}),
|
||||
)
|
||||
|
||||
@callback
|
||||
def new_triggers_conditions_listener() -> None:
|
||||
async def new_triggers_conditions_listener(
|
||||
_event_data: labs.EventLabsUpdatedData,
|
||||
) -> None:
|
||||
"""Handle new_triggers_conditions flag change."""
|
||||
hass.async_create_task(
|
||||
reload_helper.execute_service(ServiceCall(hass, DOMAIN, SERVICE_RELOAD))
|
||||
)
|
||||
await reload_helper.execute_service(ServiceCall(hass, DOMAIN, SERVICE_RELOAD))
|
||||
|
||||
async_labs_listen(
|
||||
async_subscribe_preview_feature(
|
||||
hass,
|
||||
DOMAIN,
|
||||
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
|
||||
|
||||
@@ -5,7 +5,7 @@ import functools
|
||||
import json
|
||||
import logging
|
||||
from time import time
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from botocore.exceptions import BotoCoreError
|
||||
|
||||
@@ -189,48 +189,68 @@ class S3BackupAgent(BackupAgent):
|
||||
)
|
||||
upload_id = multipart_upload["UploadId"]
|
||||
try:
|
||||
parts = []
|
||||
parts: list[dict[str, Any]] = []
|
||||
part_number = 1
|
||||
buffer_size = 0 # bytes
|
||||
buffer: list[bytes] = []
|
||||
buffer = bytearray() # bytes buffer to store the data
|
||||
offset = 0 # start index of unread data inside buffer
|
||||
|
||||
stream = await open_stream()
|
||||
async for chunk in stream:
|
||||
buffer_size += len(chunk)
|
||||
buffer.append(chunk)
|
||||
buffer.extend(chunk)
|
||||
|
||||
# If buffer size meets minimum part size, upload it as a part
|
||||
if buffer_size >= MULTIPART_MIN_PART_SIZE_BYTES:
|
||||
_LOGGER.debug(
|
||||
"Uploading part number %d, size %d", part_number, buffer_size
|
||||
)
|
||||
part = await self._client.upload_part(
|
||||
Bucket=self._bucket,
|
||||
Key=tar_filename,
|
||||
PartNumber=part_number,
|
||||
UploadId=upload_id,
|
||||
Body=b"".join(buffer),
|
||||
)
|
||||
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
|
||||
part_number += 1
|
||||
buffer_size = 0
|
||||
buffer = []
|
||||
# Upload parts of exactly MULTIPART_MIN_PART_SIZE_BYTES to ensure
|
||||
# all non-trailing parts have the same size (defensive implementation)
|
||||
view = memoryview(buffer)
|
||||
try:
|
||||
while len(buffer) - offset >= MULTIPART_MIN_PART_SIZE_BYTES:
|
||||
start = offset
|
||||
end = offset + MULTIPART_MIN_PART_SIZE_BYTES
|
||||
part_data = view[start:end]
|
||||
offset = end
|
||||
|
||||
_LOGGER.debug(
|
||||
"Uploading part number %d, size %d",
|
||||
part_number,
|
||||
len(part_data),
|
||||
)
|
||||
part = await cast(Any, self._client).upload_part(
|
||||
Bucket=self._bucket,
|
||||
Key=tar_filename,
|
||||
PartNumber=part_number,
|
||||
UploadId=upload_id,
|
||||
Body=part_data.tobytes(),
|
||||
)
|
||||
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
|
||||
part_number += 1
|
||||
finally:
|
||||
view.release()
|
||||
|
||||
# Compact the buffer if the consumed offset has grown large enough. This
|
||||
# avoids unnecessary memory copies when compacting after every part upload.
|
||||
if offset and offset >= MULTIPART_MIN_PART_SIZE_BYTES:
|
||||
buffer = bytearray(buffer[offset:])
|
||||
offset = 0
|
||||
|
||||
# Upload the final buffer as the last part (no minimum size requirement)
|
||||
if buffer:
|
||||
# Offset should be 0 after the last compaction, but we use it as the start
|
||||
# index to be defensive in case the buffer was not compacted.
|
||||
if offset < len(buffer):
|
||||
remaining_data = memoryview(buffer)[offset:]
|
||||
_LOGGER.debug(
|
||||
"Uploading final part number %d, size %d", part_number, buffer_size
|
||||
"Uploading final part number %d, size %d",
|
||||
part_number,
|
||||
len(remaining_data),
|
||||
)
|
||||
part = await self._client.upload_part(
|
||||
part = await cast(Any, self._client).upload_part(
|
||||
Bucket=self._bucket,
|
||||
Key=tar_filename,
|
||||
PartNumber=part_number,
|
||||
UploadId=upload_id,
|
||||
Body=b"".join(buffer),
|
||||
Body=remaining_data.tobytes(),
|
||||
)
|
||||
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
|
||||
|
||||
await self._client.complete_multipart_upload(
|
||||
await cast(Any, self._client).complete_multipart_upload(
|
||||
Bucket=self._bucket,
|
||||
Key=tar_filename,
|
||||
UploadId=upload_id,
|
||||
@@ -297,14 +317,14 @@ class S3BackupAgent(BackupAgent):
|
||||
return self._backup_cache
|
||||
|
||||
backups = {}
|
||||
response = await self._client.list_objects_v2(Bucket=self._bucket)
|
||||
|
||||
# Filter for metadata files only
|
||||
metadata_files = [
|
||||
obj
|
||||
for obj in response.get("Contents", [])
|
||||
if obj["Key"].endswith(".metadata.json")
|
||||
]
|
||||
paginator = self._client.get_paginator("list_objects_v2")
|
||||
metadata_files: list[dict[str, Any]] = []
|
||||
async for page in paginator.paginate(Bucket=self._bucket):
|
||||
metadata_files.extend(
|
||||
obj
|
||||
for obj in page.get("Contents", [])
|
||||
if obj["Key"].endswith(".metadata.json")
|
||||
)
|
||||
|
||||
for metadata_file in metadata_files:
|
||||
try:
|
||||
|
||||
@@ -16,12 +16,18 @@ CONNECTION_TIMEOUT = 120 # 2 minutes
|
||||
# Default TIMEOUT_FOR_UPLOAD is 128 seconds, which is too short for large backups
|
||||
TIMEOUT_FOR_UPLOAD = 43200 # 12 hours
|
||||
|
||||
# Reduced retry count for download operations
|
||||
# Default is 20 retries with exponential backoff, which can hang for 30+ minutes
|
||||
# when there are persistent connection errors (e.g., SSL failures)
|
||||
TRY_COUNT_DOWNLOAD = 3
|
||||
|
||||
|
||||
class B2Http(BaseB2Http): # type: ignore[misc]
|
||||
"""B2Http with extended timeouts for backup operations."""
|
||||
|
||||
CONNECTION_TIMEOUT = CONNECTION_TIMEOUT
|
||||
TIMEOUT_FOR_UPLOAD = TIMEOUT_FOR_UPLOAD
|
||||
TRY_COUNT_DOWNLOAD = TRY_COUNT_DOWNLOAD
|
||||
|
||||
|
||||
class B2Session(BaseB2Session): # type: ignore[misc]
|
||||
|
||||
@@ -40,6 +40,10 @@ CACHE_TTL = 300
|
||||
# This prevents uploads from hanging indefinitely
|
||||
UPLOAD_TIMEOUT = 43200 # 12 hours (matches B2 HTTP timeout)
|
||||
|
||||
# Timeout for metadata download operations (in seconds)
|
||||
# This prevents the backup system from hanging when B2 connections fail
|
||||
METADATA_DOWNLOAD_TIMEOUT = 60
|
||||
|
||||
|
||||
def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
|
||||
"""Return the suggested filenames for the backup and metadata files."""
|
||||
@@ -413,12 +417,21 @@ class BackblazeBackupAgent(BackupAgent):
|
||||
backups = {}
|
||||
for file_name, file_version in all_files_in_prefix.items():
|
||||
if file_name.endswith(METADATA_FILE_SUFFIX):
|
||||
backup = await self._hass.async_add_executor_job(
|
||||
self._process_metadata_file_sync,
|
||||
file_name,
|
||||
file_version,
|
||||
all_files_in_prefix,
|
||||
)
|
||||
try:
|
||||
backup = await asyncio.wait_for(
|
||||
self._hass.async_add_executor_job(
|
||||
self._process_metadata_file_sync,
|
||||
file_name,
|
||||
file_version,
|
||||
all_files_in_prefix,
|
||||
),
|
||||
timeout=METADATA_DOWNLOAD_TIMEOUT,
|
||||
)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"Timeout downloading metadata file %s", file_name
|
||||
)
|
||||
continue
|
||||
if backup:
|
||||
backups[backup.backup_id] = backup
|
||||
self._backup_list_cache = backups
|
||||
@@ -442,10 +455,18 @@ class BackblazeBackupAgent(BackupAgent):
|
||||
if not file or not metadata_file_version:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
metadata_content = await self._hass.async_add_executor_job(
|
||||
self._download_and_parse_metadata_sync,
|
||||
metadata_file_version,
|
||||
)
|
||||
try:
|
||||
metadata_content = await asyncio.wait_for(
|
||||
self._hass.async_add_executor_job(
|
||||
self._download_and_parse_metadata_sync,
|
||||
metadata_file_version,
|
||||
),
|
||||
timeout=METADATA_DOWNLOAD_TIMEOUT,
|
||||
)
|
||||
except TimeoutError:
|
||||
raise BackupAgentError(
|
||||
f"Timeout downloading metadata for backup {backup_id}"
|
||||
) from None
|
||||
|
||||
_LOGGER.debug(
|
||||
"Successfully retrieved metadata for backup ID %s from file %s",
|
||||
@@ -468,16 +489,27 @@ class BackblazeBackupAgent(BackupAgent):
|
||||
# Process metadata files sequentially to avoid exhausting executor pool
|
||||
for file_name, file_version in all_files_in_prefix.items():
|
||||
if file_name.endswith(METADATA_FILE_SUFFIX):
|
||||
(
|
||||
result_backup_file,
|
||||
result_metadata_file_version,
|
||||
) = await self._hass.async_add_executor_job(
|
||||
self._process_metadata_file_for_id_sync,
|
||||
file_name,
|
||||
file_version,
|
||||
backup_id,
|
||||
all_files_in_prefix,
|
||||
)
|
||||
try:
|
||||
(
|
||||
result_backup_file,
|
||||
result_metadata_file_version,
|
||||
) = await asyncio.wait_for(
|
||||
self._hass.async_add_executor_job(
|
||||
self._process_metadata_file_for_id_sync,
|
||||
file_name,
|
||||
file_version,
|
||||
backup_id,
|
||||
all_files_in_prefix,
|
||||
),
|
||||
timeout=METADATA_DOWNLOAD_TIMEOUT,
|
||||
)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"Timeout downloading metadata file %s while searching for backup %s",
|
||||
file_name,
|
||||
backup_id,
|
||||
)
|
||||
continue
|
||||
if result_backup_file and result_metadata_file_version:
|
||||
return result_backup_file, result_metadata_file_version
|
||||
|
||||
|
||||
@@ -8,11 +8,10 @@ from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_CONFIG_ENTRY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import ATTR_DATETIME, DOMAIN, SERVICE_SET_DATE_TIME
|
||||
@@ -41,21 +40,10 @@ SET_DATE_TIME_SCHEMA = vol.Schema(
|
||||
|
||||
async def async_set_panel_date(call: ServiceCall) -> None:
|
||||
"""Set the date and time on a bosch alarm panel."""
|
||||
config_entry: BoschAlarmConfigEntry | None
|
||||
value: dt.datetime = call.data.get(ATTR_DATETIME, dt_util.now())
|
||||
entry_id = call.data[ATTR_CONFIG_ENTRY_ID]
|
||||
if not (config_entry := call.hass.config_entries.async_get_entry(entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": entry_id},
|
||||
)
|
||||
if config_entry.state is not ConfigEntryState.LOADED:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_loaded",
|
||||
translation_placeholders={"target": config_entry.title},
|
||||
)
|
||||
config_entry: BoschAlarmConfigEntry = service.async_get_config_entry(
|
||||
call.hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY_ID]
|
||||
)
|
||||
panel = config_entry.runtime_data
|
||||
try:
|
||||
await panel.set_panel_date(value)
|
||||
|
||||
@@ -155,12 +155,6 @@
|
||||
"incorrect_door_state": {
|
||||
"message": "Door cannot be manipulated while it is momentarily unlocked."
|
||||
},
|
||||
"integration_not_found": {
|
||||
"message": "Integration \"{target}\" not found in registry."
|
||||
},
|
||||
"not_loaded": {
|
||||
"message": "{target} is not loaded."
|
||||
},
|
||||
"unknown_error": {
|
||||
"message": "An unknown error occurred while setting the date and time on \"{target}\"."
|
||||
}
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""Actions for Bring! integration."""
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from bring_api import (
|
||||
ActivityType,
|
||||
BringAuthException,
|
||||
@@ -13,7 +11,6 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components.event import ATTR_EVENT_TYPE
|
||||
from homeassistant.components.todo import DOMAIN as TODO_DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
@@ -46,19 +43,6 @@ SERVICE_ACTIVITY_STREAM_REACTION_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def get_config_entry(hass: HomeAssistant, entry_id: str) -> BringConfigEntry:
|
||||
"""Return config entry or raise if not found or not loaded."""
|
||||
entry = hass.config_entries.async_get_entry(entry_id)
|
||||
if TYPE_CHECKING:
|
||||
assert entry
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_loaded",
|
||||
)
|
||||
return entry
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up services for Bring! integration."""
|
||||
@@ -78,7 +62,9 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
ATTR_ENTITY_ID: call.data[ATTR_ENTITY_ID],
|
||||
},
|
||||
)
|
||||
config_entry = get_config_entry(hass, entity.config_entry_id)
|
||||
config_entry: BringConfigEntry = service.async_get_config_entry(
|
||||
hass, DOMAIN, entity.config_entry_id
|
||||
)
|
||||
|
||||
coordinator = config_entry.runtime_data.data
|
||||
|
||||
|
||||
@@ -124,10 +124,6 @@
|
||||
"entity_not_found": {
|
||||
"message": "Failed to send reaction for Bring! — Unknown entity {entity_id}"
|
||||
},
|
||||
|
||||
"entry_not_loaded": {
|
||||
"message": "The account associated with this Bring! list is either not loaded or disabled in Home Assistant."
|
||||
},
|
||||
"notify_missing_argument": {
|
||||
"message": "This action requires field {field}, please enter a valid value for {field}"
|
||||
},
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any, Final
|
||||
|
||||
from bsblan import BSBLANError
|
||||
from bsblan import BSBLANError, get_hvac_action_category
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_HVAC_MODE,
|
||||
@@ -13,6 +13,7 @@ from homeassistant.components.climate import (
|
||||
PRESET_NONE,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE
|
||||
@@ -128,6 +129,15 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
||||
return BSBLAN_TO_HA_HVAC_MODE.get(hvac_mode_value)
|
||||
return try_parse_enum(HVACMode, hvac_mode_value)
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
"""Return the current running hvac action."""
|
||||
action = self.coordinator.data.state.hvac_action
|
||||
if not action or not isinstance(action.value, int):
|
||||
return None
|
||||
category = get_hvac_action_category(action.value)
|
||||
return HVACAction(category.name.lower())
|
||||
|
||||
@property
|
||||
def preset_mode(self) -> str | None:
|
||||
"""Return the current preset mode."""
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"requirements": ["python-bsblan==4.1.0"],
|
||||
"requirements": ["python-bsblan==4.2.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "bsb-lan*",
|
||||
|
||||
@@ -9,10 +9,11 @@ from bsblan import BSBLANError, SetHotWaterParam
|
||||
from homeassistant.components.water_heater import (
|
||||
STATE_ECO,
|
||||
STATE_OFF,
|
||||
STATE_PERFORMANCE,
|
||||
WaterHeaterEntity,
|
||||
WaterHeaterEntityFeature,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, STATE_ON
|
||||
from homeassistant.const import ATTR_TEMPERATURE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
@@ -24,14 +25,16 @@ from .entity import BSBLanDualCoordinatorEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
# Mapping between BSBLan and HA operation modes
|
||||
OPERATION_MODES = {
|
||||
"Eco": STATE_ECO, # Energy saving mode
|
||||
"Off": STATE_OFF, # Protection mode
|
||||
"On": STATE_ON, # Continuous comfort mode
|
||||
# Mapping between BSBLan operating mode values and HA operation modes
|
||||
BSBLAN_TO_HA_OPERATION_MODE: dict[int, str] = {
|
||||
0: STATE_OFF, # Protection mode
|
||||
1: STATE_PERFORMANCE, # Continuous comfort mode
|
||||
2: STATE_ECO, # Eco/automatic mode
|
||||
}
|
||||
|
||||
OPERATION_MODES_REVERSE = {v: k for k, v in OPERATION_MODES.items()}
|
||||
HA_TO_BSBLAN_OPERATION_MODE: dict[str, int] = {
|
||||
v: k for k, v in BSBLAN_TO_HA_OPERATION_MODE.items()
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -63,13 +66,14 @@ class BSBLANWaterHeater(BSBLanDualCoordinatorEntity, WaterHeaterEntity):
|
||||
_attr_supported_features = (
|
||||
WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
||||
| WaterHeaterEntityFeature.OPERATION_MODE
|
||||
| WaterHeaterEntityFeature.ON_OFF
|
||||
)
|
||||
|
||||
def __init__(self, data: BSBLanData) -> None:
|
||||
"""Initialize BSBLAN water heater."""
|
||||
super().__init__(data.fast_coordinator, data.slow_coordinator, data)
|
||||
self._attr_unique_id = format_mac(data.device.MAC)
|
||||
self._attr_operation_list = list(OPERATION_MODES_REVERSE.keys())
|
||||
self._attr_operation_list = list(HA_TO_BSBLAN_OPERATION_MODE.keys())
|
||||
|
||||
# Set temperature unit
|
||||
self._attr_temperature_unit = data.fast_coordinator.client.get_temperature_unit
|
||||
@@ -110,8 +114,11 @@ class BSBLANWaterHeater(BSBLanDualCoordinatorEntity, WaterHeaterEntity):
|
||||
"""Return current operation."""
|
||||
if self.coordinator.data.dhw.operating_mode is None:
|
||||
return None
|
||||
current_mode = self.coordinator.data.dhw.operating_mode.desc
|
||||
return OPERATION_MODES.get(current_mode)
|
||||
# The operating_mode.value is an integer (0=Off, 1=On, 2=Eco)
|
||||
current_mode_value = self.coordinator.data.dhw.operating_mode.value
|
||||
if isinstance(current_mode_value, int):
|
||||
return BSBLAN_TO_HA_OPERATION_MODE.get(current_mode_value)
|
||||
return None
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
@@ -144,10 +151,12 @@ class BSBLANWaterHeater(BSBLanDualCoordinatorEntity, WaterHeaterEntity):
|
||||
|
||||
async def async_set_operation_mode(self, operation_mode: str) -> None:
|
||||
"""Set new operation mode."""
|
||||
bsblan_mode = OPERATION_MODES_REVERSE.get(operation_mode)
|
||||
# Base class validates operation_mode is in operation_list before calling
|
||||
bsblan_mode = HA_TO_BSBLAN_OPERATION_MODE[operation_mode]
|
||||
try:
|
||||
# Send numeric value as string - BSB-LAN API expects numeric mode values
|
||||
await self.coordinator.client.set_hot_water(
|
||||
SetHotWaterParam(operating_mode=bsblan_mode)
|
||||
SetHotWaterParam(operating_mode=str(bsblan_mode))
|
||||
)
|
||||
except BSBLANError as err:
|
||||
raise HomeAssistantError(
|
||||
@@ -156,3 +165,11 @@ class BSBLANWaterHeater(BSBLanDualCoordinatorEntity, WaterHeaterEntity):
|
||||
) from err
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the water heater on."""
|
||||
await self.async_set_operation_mode(STATE_PERFORMANCE)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the water heater off."""
|
||||
await self.async_set_operation_mode(STATE_OFF)
|
||||
|
||||
@@ -29,6 +29,9 @@
|
||||
"state": {
|
||||
"off": "mdi:volume-low"
|
||||
}
|
||||
},
|
||||
"room_correction": {
|
||||
"default": "mdi:arrow-oscillating"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiostreammagic"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiostreammagic==2.11.0"],
|
||||
"requirements": ["aiostreammagic==2.12.1"],
|
||||
"zeroconf": ["_stream-magic._tcp.local.", "_smoip._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -62,6 +62,9 @@
|
||||
},
|
||||
"pre_amp": {
|
||||
"name": "Pre-Amp"
|
||||
},
|
||||
"room_correction": {
|
||||
"name": "Room correction"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"""Support for Cambridge Audio switch entities."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
from dataclasses import dataclass, field
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from aiostreammagic import StreamMagicClient
|
||||
|
||||
@@ -21,10 +21,18 @@ PARALLEL_UPDATES = 0
|
||||
class CambridgeAudioSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Describes Cambridge Audio switch entity."""
|
||||
|
||||
load_fn: Callable[[StreamMagicClient], bool] = field(default=lambda _: True)
|
||||
value_fn: Callable[[StreamMagicClient], bool]
|
||||
set_value_fn: Callable[[StreamMagicClient, bool], Awaitable[None]]
|
||||
|
||||
|
||||
def room_correction_enabled(client: StreamMagicClient) -> bool:
|
||||
"""Check if room correction is enabled."""
|
||||
if TYPE_CHECKING:
|
||||
assert client.audio.tilt_eq is not None
|
||||
return client.audio.tilt_eq.enabled
|
||||
|
||||
|
||||
CONTROL_ENTITIES: tuple[CambridgeAudioSwitchEntityDescription, ...] = (
|
||||
CambridgeAudioSwitchEntityDescription(
|
||||
key="pre_amp",
|
||||
@@ -40,6 +48,14 @@ CONTROL_ENTITIES: tuple[CambridgeAudioSwitchEntityDescription, ...] = (
|
||||
value_fn=lambda client: client.update.early_update,
|
||||
set_value_fn=lambda client, value: client.set_early_update(value),
|
||||
),
|
||||
CambridgeAudioSwitchEntityDescription(
|
||||
key="room_correction",
|
||||
translation_key="room_correction",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
load_fn=lambda client: client.audio.tilt_eq is not None,
|
||||
value_fn=room_correction_enabled,
|
||||
set_value_fn=lambda client, value: client.set_room_correction_mode(value),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -49,9 +65,11 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Cambridge Audio switch entities based on a config entry."""
|
||||
client: StreamMagicClient = entry.runtime_data
|
||||
async_add_entities(
|
||||
CambridgeAudioSwitch(entry.runtime_data, description)
|
||||
for description in CONTROL_ENTITIES
|
||||
if description.load_fn(client)
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.13.0", "openai==2.15.0"],
|
||||
"requirements": ["hass-nabucasa==1.15.0", "openai==2.21.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -2,86 +2,23 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
import socket
|
||||
|
||||
import pycfdns
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_TOKEN, CONF_ZONE
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryNotReady,
|
||||
HomeAssistantError,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.util.location import async_detect_location_info
|
||||
from homeassistant.util.network import is_ipv4_address
|
||||
|
||||
from .const import CONF_RECORDS, DEFAULT_UPDATE_INTERVAL, DOMAIN, SERVICE_UPDATE_RECORDS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type CloudflareConfigEntry = ConfigEntry[CloudflareRuntimeData]
|
||||
|
||||
|
||||
@dataclass
|
||||
class CloudflareRuntimeData:
|
||||
"""Runtime data for Cloudflare config entry."""
|
||||
|
||||
client: pycfdns.Client
|
||||
dns_zone: pycfdns.ZoneModel
|
||||
from .const import DOMAIN, SERVICE_UPDATE_RECORDS
|
||||
from .coordinator import CloudflareConfigEntry, CloudflareCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: CloudflareConfigEntry) -> bool:
|
||||
"""Set up Cloudflare from a config entry."""
|
||||
session = async_get_clientsession(hass)
|
||||
client = pycfdns.Client(
|
||||
api_token=entry.data[CONF_API_TOKEN],
|
||||
client_session=session,
|
||||
)
|
||||
entry.runtime_data = CloudflareCoordinator(hass, entry)
|
||||
await entry.runtime_data.async_config_entry_first_refresh()
|
||||
|
||||
try:
|
||||
dns_zones = await client.list_zones()
|
||||
dns_zone = next(
|
||||
zone for zone in dns_zones if zone["name"] == entry.data[CONF_ZONE]
|
||||
)
|
||||
except pycfdns.AuthenticationException as error:
|
||||
raise ConfigEntryAuthFailed from error
|
||||
except pycfdns.ComunicationException as error:
|
||||
raise ConfigEntryNotReady from error
|
||||
# Since we are not using coordinator for data reads, we need to add dummy listener
|
||||
entry.async_on_unload(entry.runtime_data.async_add_listener(lambda: None))
|
||||
|
||||
entry.runtime_data = CloudflareRuntimeData(client, dns_zone)
|
||||
|
||||
async def update_records(now: datetime) -> None:
|
||||
"""Set up recurring update."""
|
||||
try:
|
||||
await _async_update_cloudflare(hass, entry)
|
||||
except (
|
||||
pycfdns.AuthenticationException,
|
||||
pycfdns.ComunicationException,
|
||||
) as error:
|
||||
_LOGGER.error("Error updating zone %s: %s", entry.data[CONF_ZONE], error)
|
||||
|
||||
async def update_records_service(call: ServiceCall) -> None:
|
||||
async def update_records_service(_: ServiceCall) -> None:
|
||||
"""Set up service for manual trigger."""
|
||||
try:
|
||||
await _async_update_cloudflare(hass, entry)
|
||||
except (
|
||||
pycfdns.AuthenticationException,
|
||||
pycfdns.ComunicationException,
|
||||
) as error:
|
||||
_LOGGER.error("Error updating zone %s: %s", entry.data[CONF_ZONE], error)
|
||||
|
||||
update_interval = timedelta(minutes=DEFAULT_UPDATE_INTERVAL)
|
||||
entry.async_on_unload(
|
||||
async_track_time_interval(hass, update_records, update_interval)
|
||||
)
|
||||
await entry.runtime_data.async_request_refresh()
|
||||
|
||||
hass.services.async_register(DOMAIN, SERVICE_UPDATE_RECORDS, update_records_service)
|
||||
|
||||
@@ -92,49 +29,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: CloudflareConfigEntry)
|
||||
"""Unload Cloudflare config entry."""
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def _async_update_cloudflare(
|
||||
hass: HomeAssistant,
|
||||
entry: CloudflareConfigEntry,
|
||||
) -> None:
|
||||
client = entry.runtime_data.client
|
||||
dns_zone = entry.runtime_data.dns_zone
|
||||
target_records: list[str] = entry.data[CONF_RECORDS]
|
||||
|
||||
_LOGGER.debug("Starting update for zone %s", dns_zone["name"])
|
||||
|
||||
records = await client.list_dns_records(zone_id=dns_zone["id"], type="A")
|
||||
_LOGGER.debug("Records: %s", records)
|
||||
|
||||
session = async_get_clientsession(hass, family=socket.AF_INET)
|
||||
location_info = await async_detect_location_info(session)
|
||||
|
||||
if not location_info or not is_ipv4_address(location_info.ip):
|
||||
raise HomeAssistantError("Could not get external IPv4 address")
|
||||
|
||||
filtered_records = [
|
||||
record
|
||||
for record in records
|
||||
if record["name"] in target_records and record["content"] != location_info.ip
|
||||
]
|
||||
|
||||
if len(filtered_records) == 0:
|
||||
_LOGGER.debug("All target records are up to date")
|
||||
return
|
||||
|
||||
await asyncio.gather(
|
||||
*[
|
||||
client.update_dns_record(
|
||||
zone_id=dns_zone["id"],
|
||||
record_id=record["id"],
|
||||
record_content=location_info.ip,
|
||||
record_name=record["name"],
|
||||
record_type=record["type"],
|
||||
record_proxied=record["proxied"],
|
||||
)
|
||||
for record in filtered_records
|
||||
]
|
||||
)
|
||||
|
||||
_LOGGER.debug("Update for zone %s is complete", dns_zone["name"])
|
||||
|
||||
116
homeassistant/components/cloudflare/coordinator.py
Normal file
116
homeassistant/components/cloudflare/coordinator.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""Contains the Coordinator for updating the IP addresses of your Cloudflare DNS records."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from logging import getLogger
|
||||
import socket
|
||||
|
||||
import pycfdns
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_TOKEN, CONF_ZONE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util.location import async_detect_location_info
|
||||
from homeassistant.util.network import is_ipv4_address
|
||||
|
||||
from .const import CONF_RECORDS, DEFAULT_UPDATE_INTERVAL
|
||||
|
||||
_LOGGER = getLogger(__name__)
|
||||
|
||||
type CloudflareConfigEntry = ConfigEntry[CloudflareCoordinator]
|
||||
|
||||
|
||||
class CloudflareCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Coordinates records updates."""
|
||||
|
||||
config_entry: CloudflareConfigEntry
|
||||
client: pycfdns.Client
|
||||
zone: pycfdns.ZoneModel
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, config_entry: CloudflareConfigEntry
|
||||
) -> None:
|
||||
"""Initialize an coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=config_entry.title,
|
||||
update_interval=timedelta(minutes=DEFAULT_UPDATE_INTERVAL),
|
||||
)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Set up the coordinator."""
|
||||
self.client = pycfdns.Client(
|
||||
api_token=self.config_entry.data[CONF_API_TOKEN],
|
||||
client_session=async_get_clientsession(self.hass),
|
||||
)
|
||||
|
||||
try:
|
||||
self.zone = next(
|
||||
zone
|
||||
for zone in await self.client.list_zones()
|
||||
if zone["name"] == self.config_entry.data[CONF_ZONE]
|
||||
)
|
||||
except pycfdns.AuthenticationException as e:
|
||||
raise ConfigEntryAuthFailed from e
|
||||
except pycfdns.ComunicationException as e:
|
||||
raise UpdateFailed("Error communicating with API") from e
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Update records."""
|
||||
_LOGGER.debug("Starting update for zone %s", self.zone["name"])
|
||||
try:
|
||||
records = await self.client.list_dns_records(
|
||||
zone_id=self.zone["id"], type="A"
|
||||
)
|
||||
_LOGGER.debug("Records: %s", records)
|
||||
|
||||
target_records: list[str] = self.config_entry.data[CONF_RECORDS]
|
||||
|
||||
location_info = await async_detect_location_info(
|
||||
async_get_clientsession(self.hass, family=socket.AF_INET)
|
||||
)
|
||||
|
||||
if not location_info or not is_ipv4_address(location_info.ip):
|
||||
raise UpdateFailed("Could not get external IPv4 address")
|
||||
|
||||
filtered_records = [
|
||||
record
|
||||
for record in records
|
||||
if record["name"] in target_records
|
||||
and record["content"] != location_info.ip
|
||||
]
|
||||
|
||||
if len(filtered_records) == 0:
|
||||
_LOGGER.debug("All target records are up to date")
|
||||
return
|
||||
|
||||
await asyncio.gather(
|
||||
*[
|
||||
self.client.update_dns_record(
|
||||
zone_id=self.zone["id"],
|
||||
record_id=record["id"],
|
||||
record_content=location_info.ip,
|
||||
record_name=record["name"],
|
||||
record_type=record["type"],
|
||||
record_proxied=record["proxied"],
|
||||
)
|
||||
for record in filtered_records
|
||||
]
|
||||
)
|
||||
|
||||
_LOGGER.debug("Update for zone %s is complete", self.zone["name"])
|
||||
|
||||
except (
|
||||
pycfdns.AuthenticationException,
|
||||
pycfdns.ComunicationException,
|
||||
) as e:
|
||||
raise UpdateFailed(
|
||||
f"Error updating zone {self.config_entry.data[CONF_ZONE]}"
|
||||
) from e
|
||||
@@ -5,7 +5,7 @@ import functools
|
||||
import json
|
||||
import logging
|
||||
from time import time
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from botocore.exceptions import BotoCoreError
|
||||
|
||||
@@ -190,58 +190,77 @@ class R2BackupAgent(BackupAgent):
|
||||
:param open_stream: A function returning an async iterator that yields bytes.
|
||||
"""
|
||||
_LOGGER.debug("Starting multipart upload for %s", tar_filename)
|
||||
key = self._with_prefix(tar_filename)
|
||||
multipart_upload = await self._client.create_multipart_upload(
|
||||
Bucket=self._bucket,
|
||||
Key=self._with_prefix(tar_filename),
|
||||
Key=key,
|
||||
)
|
||||
upload_id = multipart_upload["UploadId"]
|
||||
try:
|
||||
parts: list[dict[str, Any]] = []
|
||||
part_number = 1
|
||||
buffer = bytearray() # bytes buffer to store the data
|
||||
offset = 0 # start index of unread data inside buffer
|
||||
|
||||
stream = await open_stream()
|
||||
async for chunk in stream:
|
||||
buffer.extend(chunk)
|
||||
|
||||
# upload parts of exactly MULTIPART_MIN_PART_SIZE_BYTES to ensure
|
||||
# all non-trailing parts have the same size (required by S3/R2)
|
||||
while len(buffer) >= MULTIPART_MIN_PART_SIZE_BYTES:
|
||||
part_data = bytes(buffer[:MULTIPART_MIN_PART_SIZE_BYTES])
|
||||
del buffer[:MULTIPART_MIN_PART_SIZE_BYTES]
|
||||
# Upload parts of exactly MULTIPART_MIN_PART_SIZE_BYTES to ensure
|
||||
# all non-trailing parts have the same size (defensive implementation)
|
||||
view = memoryview(buffer)
|
||||
try:
|
||||
while len(buffer) - offset >= MULTIPART_MIN_PART_SIZE_BYTES:
|
||||
start = offset
|
||||
end = offset + MULTIPART_MIN_PART_SIZE_BYTES
|
||||
part_data = view[start:end]
|
||||
offset = end
|
||||
|
||||
_LOGGER.debug(
|
||||
"Uploading part number %d, size %d",
|
||||
part_number,
|
||||
len(part_data),
|
||||
)
|
||||
part = await self._client.upload_part(
|
||||
Bucket=self._bucket,
|
||||
Key=self._with_prefix(tar_filename),
|
||||
PartNumber=part_number,
|
||||
UploadId=upload_id,
|
||||
Body=part_data,
|
||||
)
|
||||
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
|
||||
part_number += 1
|
||||
_LOGGER.debug(
|
||||
"Uploading part number %d, size %d",
|
||||
part_number,
|
||||
len(part_data),
|
||||
)
|
||||
part = await cast(Any, self._client).upload_part(
|
||||
Bucket=self._bucket,
|
||||
Key=key,
|
||||
PartNumber=part_number,
|
||||
UploadId=upload_id,
|
||||
Body=part_data.tobytes(),
|
||||
)
|
||||
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
|
||||
part_number += 1
|
||||
finally:
|
||||
view.release()
|
||||
|
||||
# Compact the buffer if the consumed offset has grown large enough. This
|
||||
# avoids unnecessary memory copies when compacting after every part upload.
|
||||
if offset and offset >= MULTIPART_MIN_PART_SIZE_BYTES:
|
||||
buffer = bytearray(buffer[offset:])
|
||||
offset = 0
|
||||
|
||||
# Upload the final buffer as the last part (no minimum size requirement)
|
||||
if buffer:
|
||||
# Offset should be 0 after the last compaction, but we use it as the start
|
||||
# index to be defensive in case the buffer was not compacted.
|
||||
if offset < len(buffer):
|
||||
remaining_data = memoryview(buffer)[offset:]
|
||||
_LOGGER.debug(
|
||||
"Uploading final part number %d, size %d", part_number, len(buffer)
|
||||
"Uploading final part number %d, size %d",
|
||||
part_number,
|
||||
len(remaining_data),
|
||||
)
|
||||
part = await self._client.upload_part(
|
||||
part = await cast(Any, self._client).upload_part(
|
||||
Bucket=self._bucket,
|
||||
Key=self._with_prefix(tar_filename),
|
||||
Key=key,
|
||||
PartNumber=part_number,
|
||||
UploadId=upload_id,
|
||||
Body=bytes(buffer),
|
||||
Body=remaining_data.tobytes(),
|
||||
)
|
||||
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
|
||||
|
||||
await self._client.complete_multipart_upload(
|
||||
await cast(Any, self._client).complete_multipart_upload(
|
||||
Bucket=self._bucket,
|
||||
Key=self._with_prefix(tar_filename),
|
||||
Key=key,
|
||||
UploadId=upload_id,
|
||||
MultipartUpload={"Parts": parts},
|
||||
)
|
||||
@@ -250,7 +269,7 @@ class R2BackupAgent(BackupAgent):
|
||||
try:
|
||||
await self._client.abort_multipart_upload(
|
||||
Bucket=self._bucket,
|
||||
Key=self._with_prefix(tar_filename),
|
||||
Key=key,
|
||||
UploadId=upload_id,
|
||||
)
|
||||
except BotoCoreError:
|
||||
|
||||
@@ -19,11 +19,11 @@
|
||||
"secret_access_key": "Secret access key"
|
||||
},
|
||||
"data_description": {
|
||||
"access_key_id": "Access key ID to connect to Cloudflare R2 (this is your Account ID)",
|
||||
"access_key_id": "Access key ID to connect to Cloudflare R2",
|
||||
"bucket": "Bucket must already exist and be writable by the provided credentials.",
|
||||
"endpoint_url": "Cloudflare R2 S3-compatible endpoint.",
|
||||
"prefix": "Optional folder path inside the bucket. Example: backups/homeassistant",
|
||||
"secret_access_key": "Secret access key to connect to Cloudflare R2. See [Docs]({auth_docs_url})"
|
||||
"secret_access_key": "Secret access key to connect to Cloudflare R2. See [Cloudflare documentation]({auth_docs_url})"
|
||||
},
|
||||
"title": "Add Cloudflare R2 bucket"
|
||||
}
|
||||
|
||||
@@ -144,7 +144,7 @@ class ComelitAlarmEntity(
|
||||
"""Update state after action."""
|
||||
self._area.human_status = area_state
|
||||
self._area.armed = armed
|
||||
await self.async_update_ha_state()
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_alarm_disarm(self, code: str | None = None) -> None:
|
||||
"""Send disarm command."""
|
||||
|
||||
@@ -11,7 +11,9 @@ from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.CLIMATE,
|
||||
Platform.NUMBER,
|
||||
Platform.SELECT,
|
||||
Platform.WATER_HEATER,
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,43 @@
|
||||
{
|
||||
"entity": {
|
||||
"number": {
|
||||
"boiler_target_temperature": {
|
||||
"default": "mdi:water-boiler"
|
||||
},
|
||||
"boiler_target_temperature_const": {
|
||||
"default": "mdi:water-boiler"
|
||||
},
|
||||
"heating_target_temperature_const": {
|
||||
"default": "mdi:radiator"
|
||||
},
|
||||
"mixer_target_temperature": {
|
||||
"default": "mdi:valve"
|
||||
},
|
||||
"mixer_target_temperature_zone": {
|
||||
"default": "mdi:valve"
|
||||
},
|
||||
"target_temperature_comfort": {
|
||||
"default": "mdi:thermometer"
|
||||
},
|
||||
"target_temperature_const": {
|
||||
"default": "mdi:thermometer-lines"
|
||||
},
|
||||
"target_temperature_eco": {
|
||||
"default": "mdi:leaf"
|
||||
},
|
||||
"target_temperature_eco_cooling": {
|
||||
"default": "mdi:snowflake-thermometer"
|
||||
},
|
||||
"target_temperature_eco_winter": {
|
||||
"default": "mdi:thermometer"
|
||||
},
|
||||
"target_temperature_holiday": {
|
||||
"default": "mdi:beach"
|
||||
},
|
||||
"target_temperature_out_of_home": {
|
||||
"default": "mdi:thermometer-off"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"aero_by_pass": {
|
||||
"default": "mdi:valve",
|
||||
|
||||
339
homeassistant/components/compit/number.py
Normal file
339
homeassistant/components/compit/number.py
Normal file
@@ -0,0 +1,339 @@
|
||||
"""Number platform for Compit integration."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from compit_inext_api.consts import CompitParameter
|
||||
|
||||
from homeassistant.components.number import (
|
||||
NumberDeviceClass,
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
NumberMode,
|
||||
)
|
||||
from homeassistant.const import EntityCategory, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER_NAME
|
||||
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class CompitDeviceDescription:
|
||||
"""Class to describe a Compit device."""
|
||||
|
||||
name: str
|
||||
"""Name of the device."""
|
||||
|
||||
parameters: list[NumberEntityDescription]
|
||||
"""Parameters of the device."""
|
||||
|
||||
|
||||
DESCRIPTIONS: dict[CompitParameter, NumberEntityDescription] = {
|
||||
CompitParameter.TARGET_TEMPERATURE_COMFORT: NumberEntityDescription(
|
||||
key=CompitParameter.TARGET_TEMPERATURE_COMFORT.value,
|
||||
translation_key="target_temperature_comfort",
|
||||
native_min_value=0,
|
||||
native_max_value=40,
|
||||
native_step=0.1,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
mode=NumberMode.SLIDER,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
CompitParameter.TARGET_TEMPERATURE_ECO_WINTER: NumberEntityDescription(
|
||||
key=CompitParameter.TARGET_TEMPERATURE_ECO_WINTER.value,
|
||||
translation_key="target_temperature_eco_winter",
|
||||
native_min_value=0,
|
||||
native_max_value=40,
|
||||
native_step=0.1,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
mode=NumberMode.SLIDER,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
CompitParameter.TARGET_TEMPERATURE_ECO_COOLING: NumberEntityDescription(
|
||||
key=CompitParameter.TARGET_TEMPERATURE_ECO_COOLING.value,
|
||||
translation_key="target_temperature_eco_cooling",
|
||||
native_min_value=0,
|
||||
native_max_value=40,
|
||||
native_step=0.1,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
mode=NumberMode.SLIDER,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
CompitParameter.TARGET_TEMPERATURE_OUT_OF_HOME: NumberEntityDescription(
|
||||
key=CompitParameter.TARGET_TEMPERATURE_OUT_OF_HOME.value,
|
||||
translation_key="target_temperature_out_of_home",
|
||||
native_min_value=0,
|
||||
native_max_value=40,
|
||||
native_step=0.1,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
mode=NumberMode.SLIDER,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
CompitParameter.TARGET_TEMPERATURE_ECO: NumberEntityDescription(
|
||||
key=CompitParameter.TARGET_TEMPERATURE_ECO.value,
|
||||
translation_key="target_temperature_eco",
|
||||
native_min_value=0,
|
||||
native_max_value=40,
|
||||
native_step=0.1,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
mode=NumberMode.SLIDER,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
CompitParameter.TARGET_TEMPERATURE_HOLIDAY: NumberEntityDescription(
|
||||
key=CompitParameter.TARGET_TEMPERATURE_HOLIDAY.value,
|
||||
translation_key="target_temperature_holiday",
|
||||
native_min_value=0,
|
||||
native_max_value=40,
|
||||
native_step=0.1,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
mode=NumberMode.SLIDER,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
CompitParameter.TARGET_TEMPERATURE_CONST: NumberEntityDescription(
|
||||
key=CompitParameter.TARGET_TEMPERATURE_CONST.value,
|
||||
translation_key="target_temperature_const",
|
||||
native_min_value=0,
|
||||
native_max_value=95,
|
||||
native_step=0.1,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
mode=NumberMode.SLIDER,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
CompitParameter.HEATING_TARGET_TEMPERATURE_CONST: NumberEntityDescription(
|
||||
key=CompitParameter.HEATING_TARGET_TEMPERATURE_CONST.value,
|
||||
translation_key="heating_target_temperature_const",
|
||||
native_min_value=0,
|
||||
native_max_value=95,
|
||||
native_step=0.1,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
mode=NumberMode.SLIDER,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
CompitParameter.MIXER_TARGET_TEMPERATURE: NumberEntityDescription(
|
||||
key=CompitParameter.MIXER_TARGET_TEMPERATURE.value,
|
||||
translation_key="mixer_target_temperature",
|
||||
native_min_value=0,
|
||||
native_max_value=90,
|
||||
native_step=0.1,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
mode=NumberMode.SLIDER,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
CompitParameter.MIXER1_TARGET_TEMPERATURE: NumberEntityDescription(
|
||||
key=CompitParameter.MIXER1_TARGET_TEMPERATURE.value,
|
||||
translation_key="mixer_target_temperature_zone",
|
||||
native_min_value=0,
|
||||
native_max_value=95,
|
||||
native_step=0.1,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
mode=NumberMode.SLIDER,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_placeholders={"zone": "1"},
|
||||
),
|
||||
CompitParameter.MIXER2_TARGET_TEMPERATURE: NumberEntityDescription(
|
||||
key=CompitParameter.MIXER2_TARGET_TEMPERATURE.value,
|
||||
translation_key="mixer_target_temperature_zone",
|
||||
native_min_value=0,
|
||||
native_max_value=95,
|
||||
native_step=0.1,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
mode=NumberMode.SLIDER,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_placeholders={"zone": "2"},
|
||||
),
|
||||
CompitParameter.BOILER_TARGET_TEMPERATURE: NumberEntityDescription(
|
||||
key=CompitParameter.BOILER_TARGET_TEMPERATURE.value,
|
||||
translation_key="boiler_target_temperature",
|
||||
native_min_value=0,
|
||||
native_max_value=95,
|
||||
native_step=0.1,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
mode=NumberMode.SLIDER,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
CompitParameter.BOILER_TARGET_TEMPERATURE_CONST: NumberEntityDescription(
|
||||
key=CompitParameter.BOILER_TARGET_TEMPERATURE_CONST.value,
|
||||
translation_key="boiler_target_temperature_const",
|
||||
native_min_value=0,
|
||||
native_max_value=90,
|
||||
native_step=0.1,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
mode=NumberMode.SLIDER,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
DEVICE_DEFINITIONS: dict[int, CompitDeviceDescription] = {
|
||||
7: CompitDeviceDescription(
|
||||
name="Nano One",
|
||||
parameters=[
|
||||
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_COMFORT],
|
||||
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_ECO],
|
||||
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_HOLIDAY],
|
||||
],
|
||||
),
|
||||
12: CompitDeviceDescription(
|
||||
name="Nano Color",
|
||||
parameters=[
|
||||
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_COMFORT],
|
||||
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_ECO_WINTER],
|
||||
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_ECO_COOLING],
|
||||
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_OUT_OF_HOME],
|
||||
],
|
||||
),
|
||||
223: CompitDeviceDescription(
|
||||
name="Nano Color 2",
|
||||
parameters=[
|
||||
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_COMFORT],
|
||||
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_ECO_WINTER],
|
||||
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_ECO_COOLING],
|
||||
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_OUT_OF_HOME],
|
||||
],
|
||||
),
|
||||
3: CompitDeviceDescription(
|
||||
name="R810",
|
||||
parameters=[
|
||||
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_CONST],
|
||||
],
|
||||
),
|
||||
34: CompitDeviceDescription(
|
||||
name="r470",
|
||||
parameters=[
|
||||
DESCRIPTIONS[CompitParameter.HEATING_TARGET_TEMPERATURE_CONST],
|
||||
],
|
||||
),
|
||||
221: CompitDeviceDescription(
|
||||
name="R350.M",
|
||||
parameters=[
|
||||
DESCRIPTIONS[CompitParameter.MIXER_TARGET_TEMPERATURE],
|
||||
],
|
||||
),
|
||||
91: CompitDeviceDescription(
|
||||
name="R770RS / R771RS",
|
||||
parameters=[
|
||||
DESCRIPTIONS[CompitParameter.MIXER1_TARGET_TEMPERATURE],
|
||||
DESCRIPTIONS[CompitParameter.MIXER2_TARGET_TEMPERATURE],
|
||||
],
|
||||
),
|
||||
212: CompitDeviceDescription(
|
||||
name="BioMax742",
|
||||
parameters=[
|
||||
DESCRIPTIONS[CompitParameter.BOILER_TARGET_TEMPERATURE],
|
||||
],
|
||||
),
|
||||
210: CompitDeviceDescription(
|
||||
name="EL750",
|
||||
parameters=[
|
||||
DESCRIPTIONS[CompitParameter.BOILER_TARGET_TEMPERATURE],
|
||||
],
|
||||
),
|
||||
36: CompitDeviceDescription(
|
||||
name="BioMax742",
|
||||
parameters=[
|
||||
DESCRIPTIONS[CompitParameter.BOILER_TARGET_TEMPERATURE_CONST],
|
||||
],
|
||||
),
|
||||
75: CompitDeviceDescription(
|
||||
name="BioMax772",
|
||||
parameters=[
|
||||
DESCRIPTIONS[CompitParameter.BOILER_TARGET_TEMPERATURE_CONST],
|
||||
],
|
||||
),
|
||||
201: CompitDeviceDescription(
|
||||
name="BioMax775",
|
||||
parameters=[
|
||||
DESCRIPTIONS[CompitParameter.BOILER_TARGET_TEMPERATURE_CONST],
|
||||
],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: CompitConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Compit number entities from a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
CompitNumber(
|
||||
coordinator,
|
||||
device_id,
|
||||
device_definition.name,
|
||||
entity_description,
|
||||
)
|
||||
for device_id, device in coordinator.connector.all_devices.items()
|
||||
if (device_definition := DEVICE_DEFINITIONS.get(device.definition.code))
|
||||
for entity_description in device_definition.parameters
|
||||
)
|
||||
|
||||
|
||||
class CompitNumber(CoordinatorEntity[CompitDataUpdateCoordinator], NumberEntity):
|
||||
"""Representation of a Compit number entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: CompitDataUpdateCoordinator,
|
||||
device_id: int,
|
||||
device_name: str,
|
||||
entity_description: NumberEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the number entity."""
|
||||
super().__init__(coordinator)
|
||||
self.device_id = device_id
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = f"{device_id}_{entity_description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, str(device_id))},
|
||||
name=device_name,
|
||||
manufacturer=MANUFACTURER_NAME,
|
||||
model=device_name,
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and self.coordinator.connector.get_device(self.device_id) is not None
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the current value."""
|
||||
value = self.coordinator.connector.get_current_value(
|
||||
self.device_id, CompitParameter(self.entity_description.key)
|
||||
)
|
||||
if value is None or isinstance(value, str):
|
||||
return None
|
||||
return value
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set new value."""
|
||||
await self.coordinator.connector.set_device_parameter(
|
||||
self.device_id, CompitParameter(self.entity_description.key), value
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
@@ -33,6 +33,44 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"number": {
|
||||
"boiler_target_temperature": {
|
||||
"name": "Boiler target temperature"
|
||||
},
|
||||
"boiler_target_temperature_const": {
|
||||
"name": "Constant boiler target temperature"
|
||||
},
|
||||
"heating_target_temperature_const": {
|
||||
"name": "Constant heating target temperature"
|
||||
},
|
||||
"mixer_target_temperature": {
|
||||
"name": "Mixer target temperature"
|
||||
},
|
||||
"mixer_target_temperature_zone": {
|
||||
"name": "Mixer {zone} target temperature"
|
||||
},
|
||||
"target_temperature_comfort": {
|
||||
"name": "Target comfort temperature"
|
||||
},
|
||||
"target_temperature_const": {
|
||||
"name": "Constant target temperature"
|
||||
},
|
||||
"target_temperature_eco": {
|
||||
"name": "Target eco temperature"
|
||||
},
|
||||
"target_temperature_eco_cooling": {
|
||||
"name": "Target eco cooling temperature"
|
||||
},
|
||||
"target_temperature_eco_winter": {
|
||||
"name": "Target eco winter temperature"
|
||||
},
|
||||
"target_temperature_holiday": {
|
||||
"name": "Target holiday temperature"
|
||||
},
|
||||
"target_temperature_out_of_home": {
|
||||
"name": "Target out of home temperature"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"aero_by_pass": {
|
||||
"name": "Bypass",
|
||||
|
||||
315
homeassistant/components/compit/water_heater.py
Normal file
315
homeassistant/components/compit/water_heater.py
Normal file
@@ -0,0 +1,315 @@
|
||||
"""Water heater platform for Compit integration."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from compit_inext_api.consts import CompitParameter
|
||||
from propcache.api import cached_property
|
||||
|
||||
from homeassistant.components.water_heater import (
|
||||
STATE_ECO,
|
||||
STATE_OFF,
|
||||
STATE_ON,
|
||||
STATE_PERFORMANCE,
|
||||
WaterHeaterEntity,
|
||||
WaterHeaterEntityDescription,
|
||||
WaterHeaterEntityFeature,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER_NAME
|
||||
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
STATE_SCHEDULE = "schedule"
|
||||
COMPIT_STATE_TO_HA = {
|
||||
STATE_OFF: STATE_OFF,
|
||||
STATE_ON: STATE_PERFORMANCE,
|
||||
STATE_SCHEDULE: STATE_ECO,
|
||||
}
|
||||
HA_STATE_TO_COMPIT = {value: key for key, value in COMPIT_STATE_TO_HA.items()}
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class CompitWaterHeaterEntityDescription(WaterHeaterEntityDescription):
|
||||
"""Class to describe a Compit water heater device."""
|
||||
|
||||
min_temp: float
|
||||
max_temp: float
|
||||
supported_features: WaterHeaterEntityFeature
|
||||
supports_current_temperature: bool = True
|
||||
|
||||
|
||||
DEVICE_DEFINITIONS: dict[int, CompitWaterHeaterEntityDescription] = {
|
||||
34: CompitWaterHeaterEntityDescription(
|
||||
key="r470",
|
||||
min_temp=0.0,
|
||||
max_temp=75.0,
|
||||
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
||||
| WaterHeaterEntityFeature.ON_OFF
|
||||
| WaterHeaterEntityFeature.OPERATION_MODE,
|
||||
),
|
||||
91: CompitWaterHeaterEntityDescription(
|
||||
key="R770RS / R771RS",
|
||||
min_temp=30.0,
|
||||
max_temp=80.0,
|
||||
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
||||
| WaterHeaterEntityFeature.ON_OFF
|
||||
| WaterHeaterEntityFeature.OPERATION_MODE,
|
||||
),
|
||||
92: CompitWaterHeaterEntityDescription(
|
||||
key="r490",
|
||||
min_temp=30.0,
|
||||
max_temp=80.0,
|
||||
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
||||
| WaterHeaterEntityFeature.ON_OFF
|
||||
| WaterHeaterEntityFeature.OPERATION_MODE,
|
||||
),
|
||||
215: CompitWaterHeaterEntityDescription(
|
||||
key="R480",
|
||||
min_temp=30.0,
|
||||
max_temp=80.0,
|
||||
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
||||
| WaterHeaterEntityFeature.ON_OFF
|
||||
| WaterHeaterEntityFeature.OPERATION_MODE,
|
||||
),
|
||||
222: CompitWaterHeaterEntityDescription(
|
||||
key="R377B",
|
||||
min_temp=30.0,
|
||||
max_temp=75.0,
|
||||
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
||||
| WaterHeaterEntityFeature.ON_OFF
|
||||
| WaterHeaterEntityFeature.OPERATION_MODE,
|
||||
),
|
||||
224: CompitWaterHeaterEntityDescription(
|
||||
key="R 900",
|
||||
min_temp=0.0,
|
||||
max_temp=70.0,
|
||||
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
||||
| WaterHeaterEntityFeature.ON_OFF
|
||||
| WaterHeaterEntityFeature.OPERATION_MODE,
|
||||
),
|
||||
36: CompitWaterHeaterEntityDescription(
|
||||
key="BioMax742",
|
||||
min_temp=0.0,
|
||||
max_temp=75.0,
|
||||
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
||||
| WaterHeaterEntityFeature.ON_OFF
|
||||
| WaterHeaterEntityFeature.OPERATION_MODE,
|
||||
),
|
||||
75: CompitWaterHeaterEntityDescription(
|
||||
key="BioMax772",
|
||||
min_temp=0.0,
|
||||
max_temp=75.0,
|
||||
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
||||
| WaterHeaterEntityFeature.ON_OFF
|
||||
| WaterHeaterEntityFeature.OPERATION_MODE,
|
||||
),
|
||||
201: CompitWaterHeaterEntityDescription(
|
||||
key="BioMax775",
|
||||
min_temp=0.0,
|
||||
max_temp=75.0,
|
||||
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
||||
| WaterHeaterEntityFeature.ON_OFF
|
||||
| WaterHeaterEntityFeature.OPERATION_MODE,
|
||||
),
|
||||
210: CompitWaterHeaterEntityDescription(
|
||||
key="EL750",
|
||||
min_temp=30.0,
|
||||
max_temp=80.0,
|
||||
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
||||
| WaterHeaterEntityFeature.ON_OFF
|
||||
| WaterHeaterEntityFeature.OPERATION_MODE,
|
||||
),
|
||||
44: CompitWaterHeaterEntityDescription(
|
||||
key="SolarComp 951",
|
||||
min_temp=0.0,
|
||||
max_temp=85.0,
|
||||
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE,
|
||||
supports_current_temperature=False,
|
||||
),
|
||||
45: CompitWaterHeaterEntityDescription(
|
||||
key="SolarComp971",
|
||||
min_temp=0.0,
|
||||
max_temp=75.0,
|
||||
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE,
|
||||
supports_current_temperature=False,
|
||||
),
|
||||
99: CompitWaterHeaterEntityDescription(
|
||||
key="SolarComp971C",
|
||||
min_temp=0.0,
|
||||
max_temp=75.0,
|
||||
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE,
|
||||
supports_current_temperature=False,
|
||||
),
|
||||
53: CompitWaterHeaterEntityDescription(
|
||||
key="R350.CWU",
|
||||
min_temp=0.0,
|
||||
max_temp=80.0,
|
||||
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: CompitConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Compit water heater entities from a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(
|
||||
CompitWaterHeater(coordinator, device_id, entity_description)
|
||||
for device_id, device in coordinator.connector.all_devices.items()
|
||||
if (entity_description := DEVICE_DEFINITIONS.get(device.definition.code))
|
||||
)
|
||||
|
||||
|
||||
class CompitWaterHeater(
|
||||
CoordinatorEntity[CompitDataUpdateCoordinator], WaterHeaterEntity
|
||||
):
|
||||
"""Representation of a Compit Water Heater."""
|
||||
|
||||
_attr_target_temperature_step = PRECISION_WHOLE
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
entity_description: CompitWaterHeaterEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: CompitDataUpdateCoordinator,
|
||||
device_id: int,
|
||||
entity_description: CompitWaterHeaterEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the water heater."""
|
||||
super().__init__(coordinator)
|
||||
self.device_id = device_id
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = f"{device_id}_{entity_description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, str(device_id))},
|
||||
name=entity_description.key,
|
||||
manufacturer=MANUFACTURER_NAME,
|
||||
model=entity_description.key,
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and self.coordinator.connector.get_device(self.device_id) is not None
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def min_temp(self) -> float:
|
||||
"""Return the minimum temperature."""
|
||||
return self.entity_description.min_temp
|
||||
|
||||
@cached_property
|
||||
def max_temp(self) -> float:
|
||||
"""Return the maximum temperature."""
|
||||
return self.entity_description.max_temp
|
||||
|
||||
@cached_property
|
||||
def supported_features(self) -> WaterHeaterEntityFeature:
|
||||
"""Return the supported features."""
|
||||
return self.entity_description.supported_features
|
||||
|
||||
@cached_property
|
||||
def operation_list(self) -> list[str] | None:
|
||||
"""Return the list of available operation modes."""
|
||||
if (
|
||||
self.entity_description.supported_features
|
||||
& WaterHeaterEntityFeature.OPERATION_MODE
|
||||
):
|
||||
return [STATE_OFF, STATE_PERFORMANCE, STATE_ECO]
|
||||
return None
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the set target temperature."""
|
||||
value = self.coordinator.connector.get_current_value(
|
||||
self.device_id, CompitParameter.DHW_TARGET_TEMPERATURE
|
||||
)
|
||||
|
||||
if isinstance(value, float):
|
||||
return value
|
||||
|
||||
return None
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
if self.entity_description.supports_current_temperature is False:
|
||||
return None
|
||||
|
||||
value = self.coordinator.connector.get_current_value(
|
||||
self.device_id, CompitParameter.DHW_CURRENT_TEMPERATURE
|
||||
)
|
||||
|
||||
if isinstance(value, float):
|
||||
return value
|
||||
|
||||
return None
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
temperature = kwargs.get(ATTR_TEMPERATURE)
|
||||
|
||||
if temperature is None:
|
||||
return
|
||||
|
||||
self._attr_target_temperature = temperature
|
||||
|
||||
await self.coordinator.connector.set_device_parameter(
|
||||
self.device_id,
|
||||
CompitParameter.DHW_TARGET_TEMPERATURE,
|
||||
float(temperature),
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the water heater on."""
|
||||
await self.coordinator.connector.select_device_option(
|
||||
self.device_id,
|
||||
CompitParameter.DHW_ON_OFF,
|
||||
HA_STATE_TO_COMPIT[STATE_PERFORMANCE],
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the water heater off."""
|
||||
await self.coordinator.connector.select_device_option(
|
||||
self.device_id,
|
||||
CompitParameter.DHW_ON_OFF,
|
||||
HA_STATE_TO_COMPIT[STATE_OFF],
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_operation_mode(self, operation_mode: str) -> None:
|
||||
"""Set new operation mode."""
|
||||
await self.coordinator.connector.select_device_option(
|
||||
self.device_id,
|
||||
CompitParameter.DHW_ON_OFF,
|
||||
HA_STATE_TO_COMPIT[operation_mode],
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def current_operation(self) -> str | None:
|
||||
"""Return the current operation mode."""
|
||||
on_off = self.coordinator.connector.get_current_option(
|
||||
self.device_id, CompitParameter.DHW_ON_OFF
|
||||
)
|
||||
|
||||
if on_off is None:
|
||||
return None
|
||||
|
||||
return COMPIT_STATE_TO_HA.get(on_off)
|
||||
@@ -34,20 +34,33 @@ CONTROL4_CATEGORY = "comfort"
|
||||
# Control4 variable names
|
||||
CONTROL4_HVAC_STATE = "HVAC_STATE"
|
||||
CONTROL4_HVAC_MODE = "HVAC_MODE"
|
||||
CONTROL4_CURRENT_TEMPERATURE = "TEMPERATURE_F"
|
||||
CONTROL4_HUMIDITY = "HUMIDITY"
|
||||
CONTROL4_COOL_SETPOINT = "COOL_SETPOINT_F"
|
||||
CONTROL4_HEAT_SETPOINT = "HEAT_SETPOINT_F"
|
||||
CONTROL4_SCALE = "SCALE" # "FAHRENHEIT" or "CELSIUS"
|
||||
|
||||
# Temperature variables - Fahrenheit
|
||||
CONTROL4_CURRENT_TEMPERATURE_F = "TEMPERATURE_F"
|
||||
CONTROL4_COOL_SETPOINT_F = "COOL_SETPOINT_F"
|
||||
CONTROL4_HEAT_SETPOINT_F = "HEAT_SETPOINT_F"
|
||||
|
||||
# Temperature variables - Celsius
|
||||
CONTROL4_CURRENT_TEMPERATURE_C = "TEMPERATURE_C"
|
||||
CONTROL4_COOL_SETPOINT_C = "COOL_SETPOINT_C"
|
||||
CONTROL4_HEAT_SETPOINT_C = "HEAT_SETPOINT_C"
|
||||
|
||||
CONTROL4_FAN_MODE = "FAN_MODE"
|
||||
CONTROL4_FAN_MODES_LIST = "FAN_MODES_LIST"
|
||||
|
||||
VARIABLES_OF_INTEREST = {
|
||||
CONTROL4_HVAC_STATE,
|
||||
CONTROL4_HVAC_MODE,
|
||||
CONTROL4_CURRENT_TEMPERATURE,
|
||||
CONTROL4_HUMIDITY,
|
||||
CONTROL4_COOL_SETPOINT,
|
||||
CONTROL4_HEAT_SETPOINT,
|
||||
CONTROL4_CURRENT_TEMPERATURE_F,
|
||||
CONTROL4_CURRENT_TEMPERATURE_C,
|
||||
CONTROL4_COOL_SETPOINT_F,
|
||||
CONTROL4_HEAT_SETPOINT_F,
|
||||
CONTROL4_COOL_SETPOINT_C,
|
||||
CONTROL4_HEAT_SETPOINT_C,
|
||||
CONTROL4_SCALE,
|
||||
CONTROL4_FAN_MODE,
|
||||
CONTROL4_FAN_MODES_LIST,
|
||||
}
|
||||
@@ -156,7 +169,6 @@ class Control4Climate(Control4Entity, ClimateEntity):
|
||||
"""Control4 climate entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_temperature_unit = UnitOfTemperature.FAHRENHEIT
|
||||
_attr_translation_key = "thermostat"
|
||||
_attr_hvac_modes = [HVACMode.OFF, HVACMode.HEAT, HVACMode.COOL, HVACMode.HEAT_COOL]
|
||||
|
||||
@@ -213,13 +225,45 @@ class Control4Climate(Control4Entity, ClimateEntity):
|
||||
features |= ClimateEntityFeature.FAN_MODE
|
||||
return features
|
||||
|
||||
@property
|
||||
def temperature_unit(self) -> str:
|
||||
"""Return the temperature unit based on the thermostat's SCALE setting."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return UnitOfTemperature.CELSIUS # Default per HA conventions
|
||||
if data.get(CONTROL4_SCALE) == "FAHRENHEIT":
|
||||
return UnitOfTemperature.FAHRENHEIT
|
||||
return UnitOfTemperature.CELSIUS
|
||||
|
||||
@property
|
||||
def _cool_setpoint(self) -> float | None:
|
||||
"""Return the cooling setpoint from the appropriate variable."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
if self.temperature_unit == UnitOfTemperature.CELSIUS:
|
||||
return data.get(CONTROL4_COOL_SETPOINT_C)
|
||||
return data.get(CONTROL4_COOL_SETPOINT_F)
|
||||
|
||||
@property
|
||||
def _heat_setpoint(self) -> float | None:
|
||||
"""Return the heating setpoint from the appropriate variable."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
if self.temperature_unit == UnitOfTemperature.CELSIUS:
|
||||
return data.get(CONTROL4_HEAT_SETPOINT_C)
|
||||
return data.get(CONTROL4_HEAT_SETPOINT_F)
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
return data.get(CONTROL4_CURRENT_TEMPERATURE)
|
||||
if self.temperature_unit == UnitOfTemperature.CELSIUS:
|
||||
return data.get(CONTROL4_CURRENT_TEMPERATURE_C)
|
||||
return data.get(CONTROL4_CURRENT_TEMPERATURE_F)
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> int | None:
|
||||
@@ -257,34 +301,25 @@ class Control4Climate(Control4Entity, ClimateEntity):
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the target temperature."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
hvac_mode = self.hvac_mode
|
||||
if hvac_mode == HVACMode.COOL:
|
||||
return data.get(CONTROL4_COOL_SETPOINT)
|
||||
return self._cool_setpoint
|
||||
if hvac_mode == HVACMode.HEAT:
|
||||
return data.get(CONTROL4_HEAT_SETPOINT)
|
||||
return self._heat_setpoint
|
||||
return None
|
||||
|
||||
@property
|
||||
def target_temperature_high(self) -> float | None:
|
||||
"""Return the high target temperature for auto mode."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
if self.hvac_mode == HVACMode.HEAT_COOL:
|
||||
return data.get(CONTROL4_COOL_SETPOINT)
|
||||
return self._cool_setpoint
|
||||
return None
|
||||
|
||||
@property
|
||||
def target_temperature_low(self) -> float | None:
|
||||
"""Return the low target temperature for auto mode."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
if self.hvac_mode == HVACMode.HEAT_COOL:
|
||||
return data.get(CONTROL4_HEAT_SETPOINT)
|
||||
return self._heat_setpoint
|
||||
return None
|
||||
|
||||
@property
|
||||
@@ -326,15 +361,27 @@ class Control4Climate(Control4Entity, ClimateEntity):
|
||||
# Handle temperature range for auto mode
|
||||
if self.hvac_mode == HVACMode.HEAT_COOL:
|
||||
if low_temp is not None:
|
||||
await c4_climate.setHeatSetpointF(low_temp)
|
||||
if self.temperature_unit == UnitOfTemperature.CELSIUS:
|
||||
await c4_climate.setHeatSetpointC(low_temp)
|
||||
else:
|
||||
await c4_climate.setHeatSetpointF(low_temp)
|
||||
if high_temp is not None:
|
||||
await c4_climate.setCoolSetpointF(high_temp)
|
||||
if self.temperature_unit == UnitOfTemperature.CELSIUS:
|
||||
await c4_climate.setCoolSetpointC(high_temp)
|
||||
else:
|
||||
await c4_climate.setCoolSetpointF(high_temp)
|
||||
# Handle single temperature setpoint
|
||||
elif temp is not None:
|
||||
if self.hvac_mode == HVACMode.COOL:
|
||||
await c4_climate.setCoolSetpointF(temp)
|
||||
if self.temperature_unit == UnitOfTemperature.CELSIUS:
|
||||
await c4_climate.setCoolSetpointC(temp)
|
||||
else:
|
||||
await c4_climate.setCoolSetpointF(temp)
|
||||
elif self.hvac_mode == HVACMode.HEAT:
|
||||
await c4_climate.setHeatSetpointF(temp)
|
||||
if self.temperature_unit == UnitOfTemperature.CELSIUS:
|
||||
await c4_climate.setHeatSetpointC(temp)
|
||||
else:
|
||||
await c4_climate.setHeatSetpointF(temp)
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
|
||||
@@ -598,7 +598,7 @@ class DefaultAgent(ConversationEntity):
|
||||
error_response_type, error_response_args = _get_match_error_response(
|
||||
self.hass, match_error
|
||||
)
|
||||
return _make_error_result(
|
||||
intent_response = _make_error_result(
|
||||
language,
|
||||
intent.IntentResponseErrorCode.NO_VALID_TARGETS,
|
||||
self._get_error_text(
|
||||
@@ -609,7 +609,7 @@ class DefaultAgent(ConversationEntity):
|
||||
# Intent was valid and entities matched constraints, but an error
|
||||
# occurred during handling.
|
||||
_LOGGER.exception("Intent handling error")
|
||||
return _make_error_result(
|
||||
intent_response = _make_error_result(
|
||||
language,
|
||||
intent.IntentResponseErrorCode.FAILED_TO_HANDLE,
|
||||
self._get_error_text(
|
||||
@@ -618,7 +618,7 @@ class DefaultAgent(ConversationEntity):
|
||||
)
|
||||
except intent.IntentUnexpectedError:
|
||||
_LOGGER.exception("Unexpected intent error")
|
||||
return _make_error_result(
|
||||
intent_response = _make_error_result(
|
||||
language,
|
||||
intent.IntentResponseErrorCode.UNKNOWN,
|
||||
self._get_error_text(ErrorKey.HANDLE_ERROR, lang_intents),
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.1.28"]
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.2.13"]
|
||||
}
|
||||
|
||||
@@ -70,6 +70,10 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity):
|
||||
|
||||
_attr_name = None
|
||||
|
||||
# TODO(2026.7.0): When support for unknown fan speeds is removed, delete this variable.
|
||||
# Holds unknown fan speeds we have already warned about.
|
||||
warned_unknown_fan_speeds: set[str] = set()
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: CoolmasterDataUpdateCoordinator,
|
||||
@@ -125,8 +129,20 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity):
|
||||
def fan_mode(self):
|
||||
"""Return the fan setting."""
|
||||
|
||||
# Normalize to lowercase for lookup, and pass unknown values through.
|
||||
return CM_TO_HA_FAN.get(self._unit.fan_speed.lower(), self._unit.fan_speed)
|
||||
# Normalize to lowercase for lookup, and pass unknown lowercase values through.
|
||||
fan_speed_lower = self._unit.fan_speed.lower()
|
||||
if fan_speed_lower not in CM_TO_HA_FAN:
|
||||
# TODO(2026.7.0): Stop supporting unknown fan speeds.
|
||||
if fan_speed_lower not in CoolmasterClimate.warned_unknown_fan_speeds:
|
||||
CoolmasterClimate.warned_unknown_fan_speeds.add(fan_speed_lower)
|
||||
_LOGGER.warning(
|
||||
"Detected unknown fan speed value from HVAC unit: %s. "
|
||||
"Support for unknown fan speeds will be removed in 2026.7.0",
|
||||
fan_speed_lower,
|
||||
)
|
||||
return fan_speed_lower
|
||||
|
||||
return CM_TO_HA_FAN[fan_speed_lower]
|
||||
|
||||
@property
|
||||
def fan_modes(self):
|
||||
|
||||
@@ -131,23 +131,29 @@ class CyncLightEntity(CyncBaseEntity, LightEntity):
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Process an action on the light."""
|
||||
if not kwargs:
|
||||
await self._device.turn_on()
|
||||
converted_brightness: int | None = None
|
||||
converted_color_temp: int | None = None
|
||||
rgb: tuple[int, int, int] | None = None
|
||||
|
||||
elif kwargs.get(ATTR_COLOR_TEMP_KELVIN) is not None:
|
||||
if kwargs.get(ATTR_COLOR_TEMP_KELVIN) is not None:
|
||||
color_temp = kwargs.get(ATTR_COLOR_TEMP_KELVIN)
|
||||
converted_color_temp = self._normalize_color_temp(color_temp)
|
||||
|
||||
await self._device.set_color_temp(converted_color_temp)
|
||||
elif kwargs.get(ATTR_RGB_COLOR) is not None:
|
||||
rgb = kwargs.get(ATTR_RGB_COLOR)
|
||||
elif self.color_mode == ColorMode.RGB:
|
||||
rgb = self._device.rgb
|
||||
elif self.color_mode == ColorMode.COLOR_TEMP:
|
||||
converted_color_temp = self._device.color_temp
|
||||
|
||||
await self._device.set_rgb(rgb)
|
||||
elif kwargs.get(ATTR_BRIGHTNESS) is not None:
|
||||
if kwargs.get(ATTR_BRIGHTNESS) is not None:
|
||||
brightness = kwargs.get(ATTR_BRIGHTNESS)
|
||||
converted_brightness = self._normalize_brightness(brightness)
|
||||
elif self.color_mode != ColorMode.ONOFF:
|
||||
converted_brightness = self._device.brightness
|
||||
|
||||
await self._device.set_brightness(converted_brightness)
|
||||
await self._device.set_combo(
|
||||
True, converted_brightness, converted_color_temp, rgb
|
||||
)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off the light."""
|
||||
|
||||
@@ -2,9 +2,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pydaikin.daikin_base import Appliance
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_FAN_MODE,
|
||||
ATTR_HVAC_MODE,
|
||||
@@ -21,6 +24,7 @@ from homeassistant.components.climate import (
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import (
|
||||
@@ -29,12 +33,19 @@ from .const import (
|
||||
ATTR_STATE_OFF,
|
||||
ATTR_STATE_ON,
|
||||
ATTR_TARGET_TEMPERATURE,
|
||||
DOMAIN,
|
||||
ZONE_NAME_UNCONFIGURED,
|
||||
)
|
||||
from .coordinator import DaikinConfigEntry, DaikinCoordinator
|
||||
from .entity import DaikinEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type DaikinZone = Sequence[str | int]
|
||||
|
||||
DAIKIN_ZONE_TEMP_HEAT = "lztemp_h"
|
||||
DAIKIN_ZONE_TEMP_COOL = "lztemp_c"
|
||||
|
||||
|
||||
HA_STATE_TO_DAIKIN = {
|
||||
HVACMode.FAN_ONLY: "fan",
|
||||
@@ -78,6 +89,70 @@ HA_ATTR_TO_DAIKIN = {
|
||||
}
|
||||
|
||||
DAIKIN_ATTR_ADVANCED = "adv"
|
||||
ZONE_TEMPERATURE_WINDOW = 2
|
||||
|
||||
|
||||
def _zone_error(
|
||||
translation_key: str, placeholders: dict[str, str] | None = None
|
||||
) -> HomeAssistantError:
|
||||
"""Return a Home Assistant error with Daikin translation info."""
|
||||
return HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key=translation_key,
|
||||
translation_placeholders=placeholders,
|
||||
)
|
||||
|
||||
|
||||
def _zone_is_configured(zone: DaikinZone) -> bool:
|
||||
"""Return True if the Daikin zone represents a configured zone."""
|
||||
if not zone:
|
||||
return False
|
||||
return zone[0] != ZONE_NAME_UNCONFIGURED
|
||||
|
||||
|
||||
def _zone_temperature_lists(device: Appliance) -> tuple[list[str], list[str]]:
|
||||
"""Return the decoded zone temperature lists."""
|
||||
try:
|
||||
heating = device.represent(DAIKIN_ZONE_TEMP_HEAT)[1]
|
||||
cooling = device.represent(DAIKIN_ZONE_TEMP_COOL)[1]
|
||||
except AttributeError:
|
||||
return ([], [])
|
||||
return (list(heating or []), list(cooling or []))
|
||||
|
||||
|
||||
def _supports_zone_temperature_control(device: Appliance) -> bool:
|
||||
"""Return True if the device exposes zone temperature settings."""
|
||||
zones = device.zones
|
||||
if not zones:
|
||||
return False
|
||||
heating, cooling = _zone_temperature_lists(device)
|
||||
return bool(
|
||||
heating
|
||||
and cooling
|
||||
and len(heating) >= len(zones)
|
||||
and len(cooling) >= len(zones)
|
||||
)
|
||||
|
||||
|
||||
def _system_target_temperature(device: Appliance) -> float | None:
|
||||
"""Return the system target temperature when available."""
|
||||
target = device.target_temperature
|
||||
if target is None:
|
||||
return None
|
||||
try:
|
||||
return float(target)
|
||||
except TypeError, ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def _zone_temperature_from_list(values: list[str], zone_id: int) -> float | None:
|
||||
"""Return the parsed temperature for a zone from a Daikin list."""
|
||||
if zone_id >= len(values):
|
||||
return None
|
||||
try:
|
||||
return float(values[zone_id])
|
||||
except TypeError, ValueError:
|
||||
return None
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -86,8 +161,16 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Daikin climate based on config_entry."""
|
||||
daikin_api = entry.runtime_data
|
||||
async_add_entities([DaikinClimate(daikin_api)])
|
||||
coordinator = entry.runtime_data
|
||||
entities: list[ClimateEntity] = [DaikinClimate(coordinator)]
|
||||
if _supports_zone_temperature_control(coordinator.device):
|
||||
zones = coordinator.device.zones or []
|
||||
entities.extend(
|
||||
DaikinZoneClimate(coordinator, zone_id)
|
||||
for zone_id, zone in enumerate(zones)
|
||||
if _zone_is_configured(zone)
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
def format_target_temperature(target_temperature: float) -> str:
|
||||
@@ -284,3 +367,130 @@ class DaikinClimate(DaikinEntity, ClimateEntity):
|
||||
{HA_ATTR_TO_DAIKIN[ATTR_HVAC_MODE]: HA_STATE_TO_DAIKIN[HVACMode.OFF]}
|
||||
)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
|
||||
class DaikinZoneClimate(DaikinEntity, ClimateEntity):
|
||||
"""Representation of a Daikin zone temperature controller."""
|
||||
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_has_entity_name = True
|
||||
_attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
_attr_target_temperature_step = 1
|
||||
|
||||
def __init__(self, coordinator: DaikinCoordinator, zone_id: int) -> None:
|
||||
"""Initialize the zone climate entity."""
|
||||
super().__init__(coordinator)
|
||||
self._zone_id = zone_id
|
||||
self._attr_unique_id = f"{self.device.mac}-zone{zone_id}-temperature"
|
||||
zone_name = self.device.zones[self._zone_id][0]
|
||||
self._attr_name = f"{zone_name} temperature"
|
||||
|
||||
@property
|
||||
def hvac_modes(self) -> list[HVACMode]:
|
||||
"""Return the hvac modes (mirrors the main unit)."""
|
||||
return [self.hvac_mode]
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return the current HVAC mode."""
|
||||
daikin_mode = self.device.represent(HA_ATTR_TO_DAIKIN[ATTR_HVAC_MODE])[1]
|
||||
return DAIKIN_TO_HA_STATE.get(daikin_mode, HVACMode.HEAT_COOL)
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
"""Return the current HVAC action."""
|
||||
return HA_STATE_TO_CURRENT_HVAC.get(self.hvac_mode)
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the zone target temperature for the active mode."""
|
||||
heating, cooling = _zone_temperature_lists(self.device)
|
||||
mode = self.hvac_mode
|
||||
if mode == HVACMode.HEAT:
|
||||
return _zone_temperature_from_list(heating, self._zone_id)
|
||||
if mode == HVACMode.COOL:
|
||||
return _zone_temperature_from_list(cooling, self._zone_id)
|
||||
return None
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
"""Return the minimum selectable temperature."""
|
||||
target = _system_target_temperature(self.device)
|
||||
if target is None:
|
||||
return super().min_temp
|
||||
return target - ZONE_TEMPERATURE_WINDOW
|
||||
|
||||
@property
|
||||
def max_temp(self) -> float:
|
||||
"""Return the maximum selectable temperature."""
|
||||
target = _system_target_temperature(self.device)
|
||||
if target is None:
|
||||
return super().max_temp
|
||||
return target + ZONE_TEMPERATURE_WINDOW
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and _supports_zone_temperature_control(self.device)
|
||||
and _system_target_temperature(self.device) is not None
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return additional metadata."""
|
||||
return {"zone_id": self._zone_id}
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set the zone temperature."""
|
||||
if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="zone_temperature_missing",
|
||||
)
|
||||
zones = self.device.zones
|
||||
if not zones or not _supports_zone_temperature_control(self.device):
|
||||
raise _zone_error("zone_parameters_unavailable")
|
||||
|
||||
try:
|
||||
zone = zones[self._zone_id]
|
||||
except (IndexError, TypeError) as err:
|
||||
raise _zone_error(
|
||||
"zone_missing",
|
||||
{
|
||||
"zone_id": str(self._zone_id),
|
||||
"max_zone": str(len(zones) - 1),
|
||||
},
|
||||
) from err
|
||||
|
||||
if not _zone_is_configured(zone):
|
||||
raise _zone_error("zone_inactive", {"zone_id": str(self._zone_id)})
|
||||
|
||||
temperature_value = float(temperature)
|
||||
target = _system_target_temperature(self.device)
|
||||
if target is None:
|
||||
raise _zone_error("zone_parameters_unavailable")
|
||||
|
||||
mode = self.hvac_mode
|
||||
if mode == HVACMode.HEAT:
|
||||
zone_key = DAIKIN_ZONE_TEMP_HEAT
|
||||
elif mode == HVACMode.COOL:
|
||||
zone_key = DAIKIN_ZONE_TEMP_COOL
|
||||
else:
|
||||
raise _zone_error("zone_hvac_mode_unsupported")
|
||||
|
||||
zone_value = str(round(temperature_value))
|
||||
try:
|
||||
await self.device.set_zone(self._zone_id, zone_key, zone_value)
|
||||
except (AttributeError, KeyError, NotImplementedError, TypeError) as err:
|
||||
raise _zone_error("zone_set_failed") from err
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Disallow changing HVAC mode via zone climate."""
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="zone_hvac_read_only",
|
||||
)
|
||||
|
||||
@@ -24,4 +24,6 @@ ATTR_STATE_OFF = "off"
|
||||
KEY_MAC = "mac"
|
||||
KEY_IP = "ip"
|
||||
|
||||
ZONE_NAME_UNCONFIGURED = "-"
|
||||
|
||||
TIMEOUT_SEC = 120
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pydaikin"],
|
||||
"requirements": ["pydaikin==2.17.1"],
|
||||
"requirements": ["pydaikin==2.17.2"],
|
||||
"zeroconf": ["_dkapi._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -57,5 +57,28 @@
|
||||
"name": "Power"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"zone_hvac_mode_unsupported": {
|
||||
"message": "Zone temperature can only be changed when the main climate mode is heat or cool."
|
||||
},
|
||||
"zone_hvac_read_only": {
|
||||
"message": "Zone HVAC mode is controlled by the main climate entity."
|
||||
},
|
||||
"zone_inactive": {
|
||||
"message": "Zone {zone_id} is not active. Enable the zone on your Daikin device first."
|
||||
},
|
||||
"zone_missing": {
|
||||
"message": "Zone {zone_id} does not exist. Available zones are 0-{max_zone}."
|
||||
},
|
||||
"zone_parameters_unavailable": {
|
||||
"message": "This device does not expose the required zone temperature parameters."
|
||||
},
|
||||
"zone_set_failed": {
|
||||
"message": "Failed to set zone temperature. The device may not support this operation."
|
||||
},
|
||||
"zone_temperature_missing": {
|
||||
"message": "Provide a temperature value when adjusting a zone."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import ZONE_NAME_UNCONFIGURED
|
||||
from .coordinator import DaikinConfigEntry, DaikinCoordinator
|
||||
from .entity import DaikinEntity
|
||||
|
||||
@@ -28,7 +29,7 @@ async def async_setup_entry(
|
||||
switches.extend(
|
||||
DaikinZoneSwitch(daikin_api, zone_id)
|
||||
for zone_id, zone in enumerate(zones)
|
||||
if zone[0] != "-"
|
||||
if zone[0] != ZONE_NAME_UNCONFIGURED
|
||||
)
|
||||
if daikin_api.device.support_advanced_modes:
|
||||
# It isn't possible to find out from the API responses if a specific
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["HomeControl", "Mydevolo", "MprmRest", "MprmWebsocket", "Mprm"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["devolo-home-control-api==0.19.0"],
|
||||
"zeroconf": ["_dvl-deviceapi._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not poll.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have an options flow.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not poll.
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
The information provided by the discovery is not used for more than displaying the integration in the UI.
|
||||
discovery: done
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not define custom icons. All entities use device class icons.
|
||||
reconfiguration-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
No configuration besides credentials.
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration doesn't have any cases where raising an issue is needed.
|
||||
stale-devices: done
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: |
|
||||
Integration does not use a web session.
|
||||
strict-typing: done
|
||||
@@ -8,7 +8,7 @@ import voluptuous as vol
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_DOMAIN
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
|
||||
@@ -47,13 +47,9 @@ def get_config_entry(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_selected",
|
||||
)
|
||||
return entries[0]
|
||||
if not (entry := hass.config_entries.async_get_entry(entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_found",
|
||||
)
|
||||
return entry
|
||||
entry_id = entries[0].entry_id
|
||||
|
||||
return service.async_get_config_entry(hass, DOMAIN, entry_id)
|
||||
|
||||
|
||||
async def update_domain_service(call: ServiceCall) -> None:
|
||||
|
||||
@@ -10,7 +10,6 @@ from typing import Final
|
||||
from easyenergy import Electricity, Gas, VatOption
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
@@ -19,7 +18,7 @@ from homeassistant.core import (
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import selector
|
||||
from homeassistant.helpers import selector, service
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -88,28 +87,9 @@ def __serialize_prices(prices: list[dict[str, float | datetime]]) -> ServiceResp
|
||||
|
||||
def __get_coordinator(call: ServiceCall) -> EasyEnergyDataUpdateCoordinator:
|
||||
"""Get the coordinator from the entry."""
|
||||
entry_id: str = call.data[ATTR_CONFIG_ENTRY]
|
||||
entry: EasyEnergyConfigEntry | None = call.hass.config_entries.async_get_entry(
|
||||
entry_id
|
||||
entry: EasyEnergyConfigEntry = service.async_get_config_entry(
|
||||
call.hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY]
|
||||
)
|
||||
|
||||
if not entry:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_config_entry",
|
||||
translation_placeholders={
|
||||
"config_entry": entry_id,
|
||||
},
|
||||
)
|
||||
if entry.state != ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="unloaded_config_entry",
|
||||
translation_placeholders={
|
||||
"config_entry": entry.title,
|
||||
},
|
||||
)
|
||||
|
||||
return entry.runtime_data
|
||||
|
||||
|
||||
|
||||
@@ -44,14 +44,8 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"invalid_config_entry": {
|
||||
"message": "Invalid config entry provided. Got {config_entry}"
|
||||
},
|
||||
"invalid_date": {
|
||||
"message": "Invalid date provided. Got {date}"
|
||||
},
|
||||
"unloaded_config_entry": {
|
||||
"message": "Invalid config entry provided. {config_entry} is not loaded."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -136,12 +136,12 @@ class EcoNetWaterHeater(EcoNetEntity[WaterHeater], WaterHeaterEntity):
|
||||
return self.water_heater.set_point
|
||||
|
||||
@property
|
||||
def min_temp(self):
|
||||
def min_temp(self) -> float:
|
||||
"""Return the minimum temperature."""
|
||||
return self.water_heater.set_point_limits[0]
|
||||
|
||||
@property
|
||||
def max_temp(self):
|
||||
def max_temp(self) -> float:
|
||||
"""Return the maximum temperature."""
|
||||
return self.water_heater.set_point_limits[1]
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ from homeassistant.helpers import config_validation as cv, singleton, storage
|
||||
from .const import DOMAIN
|
||||
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_MINOR_VERSION = 2
|
||||
STORAGE_MINOR_VERSION = 3
|
||||
STORAGE_KEY = DOMAIN
|
||||
|
||||
|
||||
@@ -92,8 +92,11 @@ class GridPowerSourceType(TypedDict, total=False):
|
||||
power_config: PowerConfig
|
||||
|
||||
|
||||
class GridSourceType(TypedDict):
|
||||
"""Dictionary holding the source of grid energy consumption."""
|
||||
class LegacyGridSourceType(TypedDict):
|
||||
"""Legacy dictionary holding the source of grid energy consumption.
|
||||
|
||||
This format is deprecated and will be migrated to GridSourceType.
|
||||
"""
|
||||
|
||||
type: Literal["grid"]
|
||||
|
||||
@@ -104,6 +107,40 @@ class GridSourceType(TypedDict):
|
||||
cost_adjustment_day: float
|
||||
|
||||
|
||||
class GridSourceType(TypedDict):
|
||||
"""Dictionary holding a unified grid connection (like batteries).
|
||||
|
||||
Each grid connection represents a single import/export pair with
|
||||
optional power tracking. Multiple grid sources are allowed.
|
||||
"""
|
||||
|
||||
type: Literal["grid"]
|
||||
|
||||
# Import meter - kWh consumed from grid
|
||||
# Can be None for export-only or power-only grids migrated from legacy format
|
||||
stat_energy_from: str | None
|
||||
|
||||
# Export meter (optional) - kWh returned to grid (solar/battery export)
|
||||
stat_energy_to: str | None
|
||||
|
||||
# Cost tracking for import
|
||||
stat_cost: str | None # statistic_id of costs ($) incurred
|
||||
entity_energy_price: str | None # entity_id providing price ($/kWh)
|
||||
number_energy_price: float | None # Fixed price ($/kWh)
|
||||
|
||||
# Compensation tracking for export
|
||||
stat_compensation: str | None # statistic_id of compensation ($) received
|
||||
entity_energy_price_export: str | None # entity_id providing export price ($/kWh)
|
||||
number_energy_price_export: float | None # Fixed export price ($/kWh)
|
||||
|
||||
# Power measurement (optional)
|
||||
# positive when consuming from grid, negative when exporting
|
||||
stat_rate: NotRequired[str]
|
||||
power_config: NotRequired[PowerConfig]
|
||||
|
||||
cost_adjustment_day: float
|
||||
|
||||
|
||||
class SolarSourceType(TypedDict):
|
||||
"""Dictionary holding the source of energy production."""
|
||||
|
||||
@@ -308,23 +345,77 @@ def _generate_unique_value_validator(key: str) -> Callable[[list[dict]], list[di
|
||||
return validate_uniqueness
|
||||
|
||||
|
||||
GRID_SOURCE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required("type"): "grid",
|
||||
vol.Required("flow_from"): vol.All(
|
||||
[FLOW_FROM_GRID_SOURCE_SCHEMA],
|
||||
_generate_unique_value_validator("stat_energy_from"),
|
||||
),
|
||||
vol.Required("flow_to"): vol.All(
|
||||
[FLOW_TO_GRID_SOURCE_SCHEMA],
|
||||
_generate_unique_value_validator("stat_energy_to"),
|
||||
),
|
||||
vol.Optional("power"): vol.All(
|
||||
[GRID_POWER_SOURCE_SCHEMA],
|
||||
_generate_unique_value_validator("stat_rate"),
|
||||
),
|
||||
vol.Required("cost_adjustment_day"): vol.Coerce(float),
|
||||
}
|
||||
def _grid_ensure_single_price_import(
|
||||
val: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""Ensure we use a single price source for import."""
|
||||
if (
|
||||
val.get("entity_energy_price") is not None
|
||||
and val.get("number_energy_price") is not None
|
||||
):
|
||||
raise vol.Invalid("Define either an entity or a fixed number for import price")
|
||||
return val
|
||||
|
||||
|
||||
def _grid_ensure_single_price_export(
|
||||
val: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""Ensure we use a single price source for export."""
|
||||
if (
|
||||
val.get("entity_energy_price_export") is not None
|
||||
and val.get("number_energy_price_export") is not None
|
||||
):
|
||||
raise vol.Invalid("Define either an entity or a fixed number for export price")
|
||||
return val
|
||||
|
||||
|
||||
def _grid_ensure_at_least_one_stat(
|
||||
val: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""Ensure at least one of import, export, or power is configured."""
|
||||
if (
|
||||
val.get("stat_energy_from") is None
|
||||
and val.get("stat_energy_to") is None
|
||||
and val.get("stat_rate") is None
|
||||
and val.get("power_config") is None
|
||||
):
|
||||
raise vol.Invalid(
|
||||
"Grid must have at least one of: import meter, export meter, or power sensor"
|
||||
)
|
||||
return val
|
||||
|
||||
|
||||
GRID_SOURCE_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required("type"): "grid",
|
||||
# Import meter (can be None for export-only grids from legacy migration)
|
||||
vol.Optional("stat_energy_from", default=None): vol.Any(str, None),
|
||||
# Export meter (optional)
|
||||
vol.Optional("stat_energy_to", default=None): vol.Any(str, None),
|
||||
# Import cost tracking
|
||||
vol.Optional("stat_cost", default=None): vol.Any(str, None),
|
||||
vol.Optional("entity_energy_price", default=None): vol.Any(str, None),
|
||||
vol.Optional("number_energy_price", default=None): vol.Any(
|
||||
vol.Coerce(float), None
|
||||
),
|
||||
# Export compensation tracking
|
||||
vol.Optional("stat_compensation", default=None): vol.Any(str, None),
|
||||
vol.Optional("entity_energy_price_export", default=None): vol.Any(
|
||||
str, None
|
||||
),
|
||||
vol.Optional("number_energy_price_export", default=None): vol.Any(
|
||||
vol.Coerce(float), None
|
||||
),
|
||||
# Power measurement (optional)
|
||||
vol.Optional("stat_rate"): str,
|
||||
vol.Optional("power_config"): POWER_CONFIG_SCHEMA,
|
||||
vol.Required("cost_adjustment_day"): vol.Coerce(float),
|
||||
}
|
||||
),
|
||||
_grid_ensure_single_price_import,
|
||||
_grid_ensure_single_price_export,
|
||||
_grid_ensure_at_least_one_stat,
|
||||
)
|
||||
SOLAR_SOURCE_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -369,10 +460,46 @@ WATER_SOURCE_SCHEMA = vol.Schema(
|
||||
|
||||
def check_type_limits(value: list[SourceType]) -> list[SourceType]:
|
||||
"""Validate that we don't have too many of certain types."""
|
||||
types = Counter([val["type"] for val in value])
|
||||
# Currently no type limits - multiple grid sources are allowed (like batteries)
|
||||
return value
|
||||
|
||||
if types.get("grid", 0) > 1:
|
||||
raise vol.Invalid("You cannot have more than 1 grid source")
|
||||
|
||||
def _validate_grid_stat_uniqueness(value: list[SourceType]) -> list[SourceType]:
|
||||
"""Validate that grid statistics are unique across all sources."""
|
||||
seen_import: set[str] = set()
|
||||
seen_export: set[str] = set()
|
||||
seen_rate: set[str] = set()
|
||||
|
||||
for source in value:
|
||||
if source.get("type") != "grid":
|
||||
continue
|
||||
|
||||
# Cast to GridSourceType since we've filtered for grid type
|
||||
grid_source: GridSourceType = source # type: ignore[assignment]
|
||||
|
||||
# Check import meter uniqueness
|
||||
if (stat_from := grid_source.get("stat_energy_from")) is not None:
|
||||
if stat_from in seen_import:
|
||||
raise vol.Invalid(
|
||||
f"Import meter {stat_from} is used in multiple grid connections"
|
||||
)
|
||||
seen_import.add(stat_from)
|
||||
|
||||
# Check export meter uniqueness
|
||||
if (stat_to := grid_source.get("stat_energy_to")) is not None:
|
||||
if stat_to in seen_export:
|
||||
raise vol.Invalid(
|
||||
f"Export meter {stat_to} is used in multiple grid connections"
|
||||
)
|
||||
seen_export.add(stat_to)
|
||||
|
||||
# Check power stat uniqueness
|
||||
if (stat_rate := grid_source.get("stat_rate")) is not None:
|
||||
if stat_rate in seen_rate:
|
||||
raise vol.Invalid(
|
||||
f"Power stat {stat_rate} is used in multiple grid connections"
|
||||
)
|
||||
seen_rate.add(stat_rate)
|
||||
|
||||
return value
|
||||
|
||||
@@ -393,6 +520,7 @@ ENERGY_SOURCE_SCHEMA = vol.All(
|
||||
]
|
||||
),
|
||||
check_type_limits,
|
||||
_validate_grid_stat_uniqueness,
|
||||
)
|
||||
|
||||
DEVICE_CONSUMPTION_SCHEMA = vol.Schema(
|
||||
@@ -405,6 +533,82 @@ DEVICE_CONSUMPTION_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def _migrate_legacy_grid_to_unified(
|
||||
old_grid: dict[str, Any],
|
||||
) -> list[dict[str, Any]]:
|
||||
"""Migrate legacy grid format (flow_from/flow_to/power arrays) to unified format.
|
||||
|
||||
Each grid connection can have any combination of import, export, and power -
|
||||
all are optional as long as at least one is configured.
|
||||
|
||||
Migration pairs arrays by index position:
|
||||
- flow_from[i], flow_to[i], and power[i] combine into grid connection i
|
||||
- If arrays have different lengths, missing entries get None for that field
|
||||
- The number of grid connections equals max(len(flow_from), len(flow_to), len(power))
|
||||
"""
|
||||
flow_from = old_grid.get("flow_from", [])
|
||||
flow_to = old_grid.get("flow_to", [])
|
||||
power_list = old_grid.get("power", [])
|
||||
cost_adj = old_grid.get("cost_adjustment_day", 0.0)
|
||||
|
||||
new_sources: list[dict[str, Any]] = []
|
||||
# Number of grid connections = max length across all three arrays
|
||||
# If all arrays are empty, don't create any grid sources
|
||||
max_len = max(len(flow_from), len(flow_to), len(power_list))
|
||||
if max_len == 0:
|
||||
return []
|
||||
|
||||
for i in range(max_len):
|
||||
source: dict[str, Any] = {
|
||||
"type": "grid",
|
||||
"cost_adjustment_day": cost_adj,
|
||||
}
|
||||
|
||||
# Import fields from flow_from
|
||||
if i < len(flow_from):
|
||||
ff = flow_from[i]
|
||||
source["stat_energy_from"] = ff.get("stat_energy_from") or None
|
||||
source["stat_cost"] = ff.get("stat_cost")
|
||||
source["entity_energy_price"] = ff.get("entity_energy_price")
|
||||
source["number_energy_price"] = ff.get("number_energy_price")
|
||||
else:
|
||||
# Export-only entry - set import to None (validation will flag this)
|
||||
source["stat_energy_from"] = None
|
||||
source["stat_cost"] = None
|
||||
source["entity_energy_price"] = None
|
||||
source["number_energy_price"] = None
|
||||
|
||||
# Export fields from flow_to
|
||||
if i < len(flow_to):
|
||||
ft = flow_to[i]
|
||||
source["stat_energy_to"] = ft.get("stat_energy_to")
|
||||
source["stat_compensation"] = ft.get("stat_compensation")
|
||||
source["entity_energy_price_export"] = ft.get("entity_energy_price")
|
||||
source["number_energy_price_export"] = ft.get("number_energy_price")
|
||||
else:
|
||||
source["stat_energy_to"] = None
|
||||
source["stat_compensation"] = None
|
||||
source["entity_energy_price_export"] = None
|
||||
source["number_energy_price_export"] = None
|
||||
|
||||
# Power config at index i goes to grid connection at index i
|
||||
if i < len(power_list):
|
||||
power = power_list[i]
|
||||
if "power_config" in power:
|
||||
source["power_config"] = power["power_config"]
|
||||
if "stat_rate" in power:
|
||||
source["stat_rate"] = power["stat_rate"]
|
||||
|
||||
new_sources.append(source)
|
||||
|
||||
return new_sources
|
||||
|
||||
|
||||
def _is_legacy_grid_format(source: dict[str, Any]) -> bool:
|
||||
"""Check if a grid source is in the legacy format."""
|
||||
return source.get("type") == "grid" and "flow_from" in source
|
||||
|
||||
|
||||
class _EnergyPreferencesStore(storage.Store[EnergyPreferences]):
|
||||
"""Energy preferences store with migration support."""
|
||||
|
||||
@@ -419,6 +623,18 @@ class _EnergyPreferencesStore(storage.Store[EnergyPreferences]):
|
||||
if old_major_version == 1 and old_minor_version < 2:
|
||||
# Add device_consumption_water field if it doesn't exist
|
||||
data.setdefault("device_consumption_water", [])
|
||||
|
||||
if old_major_version == 1 and old_minor_version < 3:
|
||||
# Migrate legacy grid format to unified format
|
||||
new_sources: list[dict[str, Any]] = []
|
||||
for source in data.get("energy_sources", []):
|
||||
if _is_legacy_grid_format(source):
|
||||
# Convert legacy grid to multiple unified grid sources
|
||||
new_sources.extend(_migrate_legacy_grid_to_unified(source))
|
||||
else:
|
||||
new_sources.append(source)
|
||||
data["energy_sources"] = new_sources
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@@ -516,27 +732,18 @@ class EnergyManager:
|
||||
source: GridSourceType,
|
||||
generate_entity_id: Callable[[str, PowerConfig], str],
|
||||
) -> GridSourceType:
|
||||
"""Set stat_rate for grid power sources if power_config is specified."""
|
||||
if "power" not in source:
|
||||
"""Set stat_rate for grid if power_config is specified."""
|
||||
if "power_config" not in source:
|
||||
return source
|
||||
|
||||
processed_power: list[GridPowerSourceType] = []
|
||||
for power in source["power"]:
|
||||
if "power_config" in power:
|
||||
config = power["power_config"]
|
||||
config = source["power_config"]
|
||||
|
||||
# If power_config has stat_rate (standard), just use it directly
|
||||
if "stat_rate" in config:
|
||||
processed_power.append({**power, "stat_rate": config["stat_rate"]})
|
||||
else:
|
||||
# For inverted or two-sensor config, set stat_rate to generated entity_id
|
||||
processed_power.append(
|
||||
{**power, "stat_rate": generate_entity_id("grid", config)}
|
||||
)
|
||||
else:
|
||||
processed_power.append(power)
|
||||
# If power_config has stat_rate (standard), just use it directly
|
||||
if "stat_rate" in config:
|
||||
return {**source, "stat_rate": config["stat_rate"]}
|
||||
|
||||
return {**source, "power": processed_power}
|
||||
# For inverted or two-sensor config, set stat_rate to the generated entity_id
|
||||
return {**source, "stat_rate": generate_entity_id("grid", config)}
|
||||
|
||||
@callback
|
||||
def async_listen_updates(self, update_listener: Callable[[], Awaitable]) -> None:
|
||||
|
||||
@@ -94,22 +94,15 @@ class SourceAdapter:
|
||||
|
||||
|
||||
SOURCE_ADAPTERS: Final = (
|
||||
# Grid import cost (unified format)
|
||||
SourceAdapter(
|
||||
"grid",
|
||||
"flow_from",
|
||||
None, # No flow_type - unified format
|
||||
"stat_energy_from",
|
||||
"stat_cost",
|
||||
"Cost",
|
||||
"cost",
|
||||
),
|
||||
SourceAdapter(
|
||||
"grid",
|
||||
"flow_to",
|
||||
"stat_energy_to",
|
||||
"stat_compensation",
|
||||
"Compensation",
|
||||
"compensation",
|
||||
),
|
||||
SourceAdapter(
|
||||
"gas",
|
||||
None,
|
||||
@@ -128,6 +121,16 @@ SOURCE_ADAPTERS: Final = (
|
||||
),
|
||||
)
|
||||
|
||||
# Separate adapter for grid export compensation (needs different price field)
|
||||
GRID_EXPORT_ADAPTER: Final = SourceAdapter(
|
||||
"grid",
|
||||
None, # No flow_type - unified format
|
||||
"stat_energy_to",
|
||||
"stat_compensation",
|
||||
"Compensation",
|
||||
"compensation",
|
||||
)
|
||||
|
||||
|
||||
class EntityNotFoundError(HomeAssistantError):
|
||||
"""When a referenced entity was not found."""
|
||||
@@ -183,22 +186,20 @@ class SensorManager:
|
||||
if adapter.source_type != energy_source["type"]:
|
||||
continue
|
||||
|
||||
if adapter.flow_type is None:
|
||||
self._process_sensor_data(
|
||||
adapter,
|
||||
energy_source,
|
||||
to_add,
|
||||
to_remove,
|
||||
)
|
||||
continue
|
||||
self._process_sensor_data(
|
||||
adapter,
|
||||
energy_source,
|
||||
to_add,
|
||||
to_remove,
|
||||
)
|
||||
|
||||
for flow in energy_source[adapter.flow_type]: # type: ignore[typeddict-item]
|
||||
self._process_sensor_data(
|
||||
adapter,
|
||||
flow,
|
||||
to_add,
|
||||
to_remove,
|
||||
)
|
||||
# Handle grid export compensation (unified format uses different price fields)
|
||||
if energy_source["type"] == "grid":
|
||||
self._process_grid_export_sensor(
|
||||
energy_source,
|
||||
to_add,
|
||||
to_remove,
|
||||
)
|
||||
|
||||
# Process power sensors for battery and grid sources
|
||||
self._process_power_sensor_data(
|
||||
@@ -222,11 +223,16 @@ class SensorManager:
|
||||
if config.get(adapter.total_money_key) is not None:
|
||||
return
|
||||
|
||||
key = (adapter.source_type, adapter.flow_type, config[adapter.stat_energy_key])
|
||||
# Skip if the energy stat is not configured (e.g., export-only or power-only grids)
|
||||
stat_energy = config.get(adapter.stat_energy_key)
|
||||
if not stat_energy:
|
||||
return
|
||||
|
||||
key = (adapter.source_type, adapter.flow_type, stat_energy)
|
||||
|
||||
# Make sure the right data is there
|
||||
# If the entity existed, we don't pop it from to_remove so it's removed
|
||||
if not valid_entity_id(config[adapter.stat_energy_key]) or (
|
||||
if not valid_entity_id(stat_energy) or (
|
||||
config.get("entity_energy_price") is None
|
||||
and config.get("number_energy_price") is None
|
||||
):
|
||||
@@ -242,6 +248,56 @@ class SensorManager:
|
||||
)
|
||||
to_add.append(self.current_entities[key])
|
||||
|
||||
@callback
|
||||
def _process_grid_export_sensor(
|
||||
self,
|
||||
config: Mapping[str, Any],
|
||||
to_add: list[EnergyCostSensor | EnergyPowerSensor],
|
||||
to_remove: dict[tuple[str, str | None, str], EnergyCostSensor],
|
||||
) -> None:
|
||||
"""Process grid export compensation sensor (unified format).
|
||||
|
||||
The unified grid format uses different field names for export pricing:
|
||||
- entity_energy_price_export instead of entity_energy_price
|
||||
- number_energy_price_export instead of number_energy_price
|
||||
"""
|
||||
# No export meter configured
|
||||
stat_energy_to = config.get("stat_energy_to")
|
||||
if stat_energy_to is None:
|
||||
return
|
||||
|
||||
# Already have a compensation stat
|
||||
if config.get("stat_compensation") is not None:
|
||||
return
|
||||
|
||||
key = ("grid", None, stat_energy_to)
|
||||
|
||||
# Check for export pricing fields (different names in unified format)
|
||||
if not valid_entity_id(stat_energy_to) or (
|
||||
config.get("entity_energy_price_export") is None
|
||||
and config.get("number_energy_price_export") is None
|
||||
):
|
||||
return
|
||||
|
||||
# Create a config wrapper that maps the sell price fields to standard names
|
||||
# so EnergyCostSensor can use them
|
||||
export_config: dict[str, Any] = {
|
||||
"stat_energy_to": stat_energy_to,
|
||||
"stat_compensation": config.get("stat_compensation"),
|
||||
"entity_energy_price": config.get("entity_energy_price_export"),
|
||||
"number_energy_price": config.get("number_energy_price_export"),
|
||||
}
|
||||
|
||||
if current_entity := to_remove.pop(key, None):
|
||||
current_entity.update_config(export_config)
|
||||
return
|
||||
|
||||
self.current_entities[key] = EnergyCostSensor(
|
||||
GRID_EXPORT_ADAPTER,
|
||||
export_config,
|
||||
)
|
||||
to_add.append(self.current_entities[key])
|
||||
|
||||
@callback
|
||||
def _process_power_sensor_data(
|
||||
self,
|
||||
@@ -252,21 +308,14 @@ class SensorManager:
|
||||
"""Process power sensor data for battery and grid sources."""
|
||||
source_type = energy_source.get("type")
|
||||
|
||||
if source_type == "battery":
|
||||
if source_type in ("battery", "grid"):
|
||||
# Both battery and grid now use unified format with power_config at top level
|
||||
power_config = energy_source.get("power_config")
|
||||
if power_config and self._needs_power_sensor(power_config):
|
||||
self._create_or_keep_power_sensor(
|
||||
source_type, power_config, to_add, to_remove
|
||||
)
|
||||
|
||||
elif source_type == "grid":
|
||||
for power in energy_source.get("power", []):
|
||||
power_config = power.get("power_config")
|
||||
if power_config and self._needs_power_sensor(power_config):
|
||||
self._create_or_keep_power_sensor(
|
||||
source_type, power_config, to_add, to_remove
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _needs_power_sensor(power_config: PowerConfig) -> bool:
|
||||
"""Check if power_config needs a transform sensor."""
|
||||
@@ -312,6 +361,17 @@ class EnergyCostSensor(SensorEntity):
|
||||
|
||||
This is intended as a fallback for when no specific cost sensor is available for the
|
||||
utility.
|
||||
|
||||
Expected config fields (from adapter or export_config wrapper):
|
||||
- stat_energy_key (via adapter): Key to get the energy statistic ID
|
||||
- total_money_key (via adapter): Key to get the existing cost/compensation stat
|
||||
- entity_energy_price: Entity ID providing price per unit (e.g., $/kWh)
|
||||
- number_energy_price: Fixed price per unit
|
||||
|
||||
Note: For grid export compensation, the unified format uses different field names
|
||||
(entity_energy_price_export, number_energy_price_export). The _process_grid_export_sensor
|
||||
method in SensorManager creates a wrapper config that maps these to the standard
|
||||
field names (entity_energy_price, number_energy_price) so this class can use them.
|
||||
"""
|
||||
|
||||
_attr_entity_registry_visible_default = False
|
||||
|
||||
@@ -401,16 +401,20 @@ def _validate_grid_source(
|
||||
source_result: ValidationIssues,
|
||||
validate_calls: list[functools.partial[None]],
|
||||
) -> None:
|
||||
"""Validate grid energy source."""
|
||||
flow_from: data.FlowFromGridSourceType
|
||||
for flow_from in source["flow_from"]:
|
||||
wanted_statistics_metadata.add(flow_from["stat_energy_from"])
|
||||
"""Validate grid energy source (unified format)."""
|
||||
stat_energy_from = source.get("stat_energy_from")
|
||||
stat_energy_to = source.get("stat_energy_to")
|
||||
stat_rate = source.get("stat_rate")
|
||||
|
||||
# Validate import meter (optional)
|
||||
if stat_energy_from:
|
||||
wanted_statistics_metadata.add(stat_energy_from)
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_usage_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
flow_from["stat_energy_from"],
|
||||
stat_energy_from,
|
||||
ENERGY_USAGE_DEVICE_CLASSES,
|
||||
ENERGY_USAGE_UNITS,
|
||||
ENERGY_UNIT_ERROR,
|
||||
@@ -418,7 +422,8 @@ def _validate_grid_source(
|
||||
)
|
||||
)
|
||||
|
||||
if (stat_cost := flow_from.get("stat_cost")) is not None:
|
||||
# Validate import cost tracking (only if import meter exists)
|
||||
if (stat_cost := source.get("stat_cost")) is not None:
|
||||
wanted_statistics_metadata.add(stat_cost)
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
@@ -429,7 +434,7 @@ def _validate_grid_source(
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
elif (entity_energy_price := flow_from.get("entity_energy_price")) is not None:
|
||||
elif (entity_energy_price := source.get("entity_energy_price")) is not None:
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_price_entity,
|
||||
@@ -442,27 +447,27 @@ def _validate_grid_source(
|
||||
)
|
||||
|
||||
if (
|
||||
flow_from.get("entity_energy_price") is not None
|
||||
or flow_from.get("number_energy_price") is not None
|
||||
source.get("entity_energy_price") is not None
|
||||
or source.get("number_energy_price") is not None
|
||||
):
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_auto_generated_cost_entity,
|
||||
hass,
|
||||
flow_from["stat_energy_from"],
|
||||
stat_energy_from,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
flow_to: data.FlowToGridSourceType
|
||||
for flow_to in source["flow_to"]:
|
||||
wanted_statistics_metadata.add(flow_to["stat_energy_to"])
|
||||
# Validate export meter (optional)
|
||||
if stat_energy_to:
|
||||
wanted_statistics_metadata.add(stat_energy_to)
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_usage_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
flow_to["stat_energy_to"],
|
||||
stat_energy_to,
|
||||
ENERGY_USAGE_DEVICE_CLASSES,
|
||||
ENERGY_USAGE_UNITS,
|
||||
ENERGY_UNIT_ERROR,
|
||||
@@ -470,7 +475,8 @@ def _validate_grid_source(
|
||||
)
|
||||
)
|
||||
|
||||
if (stat_compensation := flow_to.get("stat_compensation")) is not None:
|
||||
# Validate export compensation tracking
|
||||
if (stat_compensation := source.get("stat_compensation")) is not None:
|
||||
wanted_statistics_metadata.add(stat_compensation)
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
@@ -481,12 +487,14 @@ def _validate_grid_source(
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
elif (entity_energy_price := flow_to.get("entity_energy_price")) is not None:
|
||||
elif (
|
||||
entity_price_export := source.get("entity_energy_price_export")
|
||||
) is not None:
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_price_entity,
|
||||
hass,
|
||||
entity_energy_price,
|
||||
entity_price_export,
|
||||
source_result,
|
||||
ENERGY_PRICE_UNITS,
|
||||
ENERGY_PRICE_UNIT_ERROR,
|
||||
@@ -494,26 +502,27 @@ def _validate_grid_source(
|
||||
)
|
||||
|
||||
if (
|
||||
flow_to.get("entity_energy_price") is not None
|
||||
or flow_to.get("number_energy_price") is not None
|
||||
source.get("entity_energy_price_export") is not None
|
||||
or source.get("number_energy_price_export") is not None
|
||||
):
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_auto_generated_cost_entity,
|
||||
hass,
|
||||
flow_to["stat_energy_to"],
|
||||
stat_energy_to,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
for power_stat in source.get("power", []):
|
||||
wanted_statistics_metadata.add(power_stat["stat_rate"])
|
||||
# Validate power sensor (optional)
|
||||
if stat_rate:
|
||||
wanted_statistics_metadata.add(stat_rate)
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_power_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
power_stat["stat_rate"],
|
||||
stat_rate,
|
||||
POWER_USAGE_DEVICE_CLASSES,
|
||||
POWER_USAGE_UNITS,
|
||||
POWER_UNIT_ERROR,
|
||||
|
||||
@@ -6,6 +6,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_DEVICE
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
@@ -15,6 +16,12 @@ from homeassistant.helpers.selector import (
|
||||
from . import dongle
|
||||
from .const import DOMAIN, ERROR_INVALID_DONGLE_PATH, LOGGER
|
||||
|
||||
MANUAL_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_DEVICE): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class EnOceanFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the enOcean config flows."""
|
||||
@@ -49,17 +56,14 @@ class EnOceanFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Propose a list of detected dongles."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
if user_input[CONF_DEVICE] == self.MANUAL_PATH_VALUE:
|
||||
return await self.async_step_manual()
|
||||
if await self.validate_enocean_conf(user_input):
|
||||
return self.create_enocean_entry(user_input)
|
||||
errors = {CONF_DEVICE: ERROR_INVALID_DONGLE_PATH}
|
||||
return await self.async_step_manual(user_input)
|
||||
|
||||
devices = await self.hass.async_add_executor_job(dongle.detect)
|
||||
if len(devices) == 0:
|
||||
return await self.async_step_manual(user_input)
|
||||
return await self.async_step_manual()
|
||||
devices.append(self.MANUAL_PATH_VALUE)
|
||||
|
||||
return self.async_show_form(
|
||||
@@ -75,26 +79,21 @@ class EnOceanFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_manual(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Request manual USB dongle path."""
|
||||
default_value = None
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
if await self.validate_enocean_conf(user_input):
|
||||
return self.create_enocean_entry(user_input)
|
||||
default_value = user_input[CONF_DEVICE]
|
||||
errors = {CONF_DEVICE: ERROR_INVALID_DONGLE_PATH}
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="manual",
|
||||
data_schema=vol.Schema(
|
||||
{vol.Required(CONF_DEVICE, default=default_value): str}
|
||||
),
|
||||
data_schema=self.add_suggested_values_to_schema(MANUAL_SCHEMA, user_input),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==43.14.0",
|
||||
"aioesphomeapi==44.0.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.6.0"
|
||||
],
|
||||
|
||||
@@ -11,6 +11,7 @@ from homeassistant.components.sensor import (
|
||||
RestoreSensor,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN, UnitOfMass
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -47,6 +48,7 @@ class EufyLifeSensorEntity(SensorEntity):
|
||||
"""Representation of an EufyLife sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def __init__(self, data: EufyLifeData) -> None:
|
||||
"""Initialize the weight sensor entity."""
|
||||
|
||||
@@ -9,49 +9,34 @@ Note that the API used by this integration's client does not support cooling.
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
import evohomeasync as ec1
|
||||
import evohomeasync2 as ec2
|
||||
from evohomeasync2.const import SZ_CAN_BE_TEMPORARY, SZ_SYSTEM_MODE, SZ_TIMING_MODE
|
||||
from evohomeasync2.schemas.const import (
|
||||
S2_DURATION as SZ_DURATION,
|
||||
S2_PERIOD as SZ_PERIOD,
|
||||
SystemMode as EvoSystemMode,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_MODE,
|
||||
CONF_PASSWORD,
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_USERNAME,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.discovery import async_load_platform
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.service import verify_domain_control
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .const import (
|
||||
ATTR_DURATION,
|
||||
ATTR_DURATION_UNTIL,
|
||||
ATTR_PERIOD,
|
||||
ATTR_SETPOINT,
|
||||
CONF_LOCATION_IDX,
|
||||
DOMAIN,
|
||||
EVOHOME_DATA,
|
||||
SCAN_INTERVAL_DEFAULT,
|
||||
SCAN_INTERVAL_MINIMUM,
|
||||
EvoService,
|
||||
)
|
||||
from .coordinator import EvoDataUpdateCoordinator
|
||||
from .services import setup_service_functions
|
||||
from .storage import TokenManager
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -72,26 +57,6 @@ CONFIG_SCHEMA: Final = vol.Schema(
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
# system mode schemas are built dynamically when the services are registered
|
||||
# because supported modes can vary for edge-case systems
|
||||
|
||||
RESET_ZONE_OVERRIDE_SCHEMA: Final = vol.Schema(
|
||||
{vol.Required(ATTR_ENTITY_ID): cv.entity_id}
|
||||
)
|
||||
SET_ZONE_OVERRIDE_SCHEMA: Final = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(ATTR_SETPOINT): vol.All(
|
||||
vol.Coerce(float), vol.Range(min=4.0, max=35.0)
|
||||
),
|
||||
vol.Optional(ATTR_DURATION_UNTIL): vol.All(
|
||||
cv.time_period, vol.Range(min=timedelta(days=0), max=timedelta(days=1))
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
EVOHOME_KEY: HassKey[EvoData] = HassKey(DOMAIN)
|
||||
|
||||
|
||||
@dataclass
|
||||
class EvoData:
|
||||
@@ -130,7 +95,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
assert coordinator.tcs is not None # mypy
|
||||
|
||||
hass.data[EVOHOME_KEY] = EvoData(
|
||||
hass.data[EVOHOME_DATA] = EvoData(
|
||||
coordinator=coordinator,
|
||||
loc_idx=coordinator.loc_idx,
|
||||
tcs=coordinator.tcs,
|
||||
@@ -147,132 +112,3 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
setup_service_functions(hass, coordinator)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@callback
|
||||
def setup_service_functions(
|
||||
hass: HomeAssistant, coordinator: EvoDataUpdateCoordinator
|
||||
) -> None:
|
||||
"""Set up the service handlers for the system/zone operating modes.
|
||||
|
||||
Not all Honeywell TCC-compatible systems support all operating modes. In addition,
|
||||
each mode will require any of four distinct service schemas. This has to be
|
||||
enumerated before registering the appropriate handlers.
|
||||
|
||||
It appears that all TCC-compatible systems support the same three zones modes.
|
||||
"""
|
||||
|
||||
@verify_domain_control(DOMAIN)
|
||||
async def force_refresh(call: ServiceCall) -> None:
|
||||
"""Obtain the latest state data via the vendor's RESTful API."""
|
||||
await coordinator.async_refresh()
|
||||
|
||||
@verify_domain_control(DOMAIN)
|
||||
async def set_system_mode(call: ServiceCall) -> None:
|
||||
"""Set the system mode."""
|
||||
assert coordinator.tcs is not None # mypy
|
||||
|
||||
payload = {
|
||||
"unique_id": coordinator.tcs.id,
|
||||
"service": call.service,
|
||||
"data": call.data,
|
||||
}
|
||||
async_dispatcher_send(hass, DOMAIN, payload)
|
||||
|
||||
@verify_domain_control(DOMAIN)
|
||||
async def set_zone_override(call: ServiceCall) -> None:
|
||||
"""Set the zone override (setpoint)."""
|
||||
entity_id = call.data[ATTR_ENTITY_ID]
|
||||
|
||||
registry = er.async_get(hass)
|
||||
registry_entry = registry.async_get(entity_id)
|
||||
|
||||
if registry_entry is None or registry_entry.platform != DOMAIN:
|
||||
raise ValueError(f"'{entity_id}' is not a known {DOMAIN} entity")
|
||||
|
||||
if registry_entry.domain != "climate":
|
||||
raise ValueError(f"'{entity_id}' is not an {DOMAIN} controller/zone")
|
||||
|
||||
payload = {
|
||||
"unique_id": registry_entry.unique_id,
|
||||
"service": call.service,
|
||||
"data": call.data,
|
||||
}
|
||||
|
||||
async_dispatcher_send(hass, DOMAIN, payload)
|
||||
|
||||
assert coordinator.tcs is not None # mypy
|
||||
|
||||
hass.services.async_register(DOMAIN, EvoService.REFRESH_SYSTEM, force_refresh)
|
||||
|
||||
# Enumerate which operating modes are supported by this system
|
||||
modes = list(coordinator.tcs.allowed_system_modes)
|
||||
|
||||
# Not all systems support "AutoWithReset": register this handler only if required
|
||||
if any(
|
||||
m[SZ_SYSTEM_MODE]
|
||||
for m in modes
|
||||
if m[SZ_SYSTEM_MODE] == EvoSystemMode.AUTO_WITH_RESET
|
||||
):
|
||||
hass.services.async_register(DOMAIN, EvoService.RESET_SYSTEM, set_system_mode)
|
||||
|
||||
system_mode_schemas = []
|
||||
modes = [m for m in modes if m[SZ_SYSTEM_MODE] != EvoSystemMode.AUTO_WITH_RESET]
|
||||
|
||||
# Permanent-only modes will use this schema
|
||||
perm_modes = [m[SZ_SYSTEM_MODE] for m in modes if not m[SZ_CAN_BE_TEMPORARY]]
|
||||
if perm_modes: # any of: "Auto", "HeatingOff": permanent only
|
||||
schema = vol.Schema({vol.Required(ATTR_MODE): vol.In(perm_modes)})
|
||||
system_mode_schemas.append(schema)
|
||||
|
||||
modes = [m for m in modes if m[SZ_CAN_BE_TEMPORARY]]
|
||||
|
||||
# These modes are set for a number of hours (or indefinitely): use this schema
|
||||
temp_modes = [m[SZ_SYSTEM_MODE] for m in modes if m[SZ_TIMING_MODE] == SZ_DURATION]
|
||||
if temp_modes: # any of: "AutoWithEco", permanent or for 0-24 hours
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_MODE): vol.In(temp_modes),
|
||||
vol.Optional(ATTR_DURATION): vol.All(
|
||||
cv.time_period,
|
||||
vol.Range(min=timedelta(hours=0), max=timedelta(hours=24)),
|
||||
),
|
||||
}
|
||||
)
|
||||
system_mode_schemas.append(schema)
|
||||
|
||||
# These modes are set for a number of days (or indefinitely): use this schema
|
||||
temp_modes = [m[SZ_SYSTEM_MODE] for m in modes if m[SZ_TIMING_MODE] == SZ_PERIOD]
|
||||
if temp_modes: # any of: "Away", "Custom", "DayOff", permanent or for 1-99 days
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_MODE): vol.In(temp_modes),
|
||||
vol.Optional(ATTR_PERIOD): vol.All(
|
||||
cv.time_period,
|
||||
vol.Range(min=timedelta(days=1), max=timedelta(days=99)),
|
||||
),
|
||||
}
|
||||
)
|
||||
system_mode_schemas.append(schema)
|
||||
|
||||
if system_mode_schemas:
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
EvoService.SET_SYSTEM_MODE,
|
||||
set_system_mode,
|
||||
schema=vol.Schema(vol.Any(*system_mode_schemas)),
|
||||
)
|
||||
|
||||
# The zone modes are consistent across all systems and use the same schema
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
EvoService.RESET_ZONE_OVERRIDE,
|
||||
set_zone_override,
|
||||
schema=RESET_ZONE_OVERRIDE_SCHEMA,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
EvoService.SET_ZONE_OVERRIDE,
|
||||
set_zone_override,
|
||||
schema=SET_ZONE_OVERRIDE_SCHEMA,
|
||||
)
|
||||
|
||||
@@ -41,12 +41,12 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import EVOHOME_KEY
|
||||
from .const import (
|
||||
ATTR_DURATION,
|
||||
ATTR_DURATION_UNTIL,
|
||||
ATTR_PERIOD,
|
||||
ATTR_SETPOINT,
|
||||
EVOHOME_DATA,
|
||||
EvoService,
|
||||
)
|
||||
from .coordinator import EvoDataUpdateCoordinator
|
||||
@@ -85,9 +85,9 @@ async def async_setup_platform(
|
||||
if discovery_info is None:
|
||||
return
|
||||
|
||||
coordinator = hass.data[EVOHOME_KEY].coordinator
|
||||
loc_idx = hass.data[EVOHOME_KEY].loc_idx
|
||||
tcs = hass.data[EVOHOME_KEY].tcs
|
||||
coordinator = hass.data[EVOHOME_DATA].coordinator
|
||||
loc_idx = hass.data[EVOHOME_DATA].loc_idx
|
||||
tcs = hass.data[EVOHOME_DATA].tcs
|
||||
|
||||
_LOGGER.debug(
|
||||
"Found the Location/Controller (%s), id=%s, name=%s (location_idx=%s)",
|
||||
|
||||
@@ -4,9 +4,15 @@ from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from enum import StrEnum, unique
|
||||
from typing import Final
|
||||
from typing import TYPE_CHECKING, Final
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import EvoData
|
||||
|
||||
DOMAIN: Final = "evohome"
|
||||
EVOHOME_DATA: HassKey[EvoData] = HassKey(DOMAIN)
|
||||
|
||||
STORAGE_VER: Final = 1
|
||||
STORAGE_KEY: Final = DOMAIN
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user