mirror of
https://github.com/home-assistant/core.git
synced 2026-02-14 02:49:32 +01:00
Compare commits
544 Commits
claude/are
...
frontend-d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b42d008e79 | ||
|
|
54141ffd3f | ||
|
|
92b823068c | ||
|
|
d4a6377ab3 | ||
|
|
80d07c42ac | ||
|
|
077eeafa69 | ||
|
|
b6ff8c94b1 | ||
|
|
6a1581f2bf | ||
|
|
2dc0d32a29 | ||
|
|
036696f4cd | ||
|
|
89f5b33a5e | ||
|
|
fc52885c21 | ||
|
|
ffa8fc583d | ||
|
|
f18fa07019 | ||
|
|
ce704dd5f7 | ||
|
|
d930755f92 | ||
|
|
196c6d9839 | ||
|
|
cce5358901 | ||
|
|
df7c3d787d | ||
|
|
a6287731f7 | ||
|
|
1667b3f16b | ||
|
|
2aa9d22350 | ||
|
|
3bcb303ef1 | ||
|
|
e6de37cc69 | ||
|
|
d10f5cc9ea | ||
|
|
4921f05189 | ||
|
|
877ad391f0 | ||
|
|
8a5594b9e4 | ||
|
|
a0623d1f97 | ||
|
|
c8f8ef887a | ||
|
|
40ec6d3793 | ||
|
|
0a79d84f9a | ||
|
|
7a7e60ce75 | ||
|
|
6bfaf6b188 | ||
|
|
34a445545c | ||
|
|
3c854a7679 | ||
|
|
b7b6c1a72e | ||
|
|
fdf02cf657 | ||
|
|
acf739df81 | ||
|
|
4801dcaded | ||
|
|
11af0a2d04 | ||
|
|
40b30b94a2 | ||
|
|
902d3f45a2 | ||
|
|
bf887fbc71 | ||
|
|
e5ede7deea | ||
|
|
8b674a44a1 | ||
|
|
e145963d48 | ||
|
|
1bca0ba5f8 | ||
|
|
38531033a1 | ||
|
|
9f1b6a12a5 | ||
|
|
876589f0cd | ||
|
|
bd09ac9030 | ||
|
|
6d143c1ce2 | ||
|
|
f4ceb22d73 | ||
|
|
5839191c37 | ||
|
|
29feccb190 | ||
|
|
a017417849 | ||
|
|
72a7d708b0 | ||
|
|
47be13e6bf | ||
|
|
7d583be8e1 | ||
|
|
ccb3b35694 | ||
|
|
48893d4daa | ||
|
|
0388e5dd7f | ||
|
|
7a68903318 | ||
|
|
64766100fe | ||
|
|
0576dd91b7 | ||
|
|
f4440e992f | ||
|
|
ea83b5a892 | ||
|
|
d148952c99 | ||
|
|
ed9a810908 | ||
|
|
6960cd6853 | ||
|
|
5bd86ba600 | ||
|
|
70bc49479d | ||
|
|
81e0c105d6 | ||
|
|
527e2aec1f | ||
|
|
cd6661260c | ||
|
|
efa522cc73 | ||
|
|
f9bd1b3d30 | ||
|
|
4cfdb14714 | ||
|
|
6fb802e6b9 | ||
|
|
9b30fecb0c | ||
|
|
e77acc1002 | ||
|
|
07e8b780a2 | ||
|
|
e060395786 | ||
|
|
661b14dec5 | ||
|
|
b8e63b7ef6 | ||
|
|
fd78e35a86 | ||
|
|
db55dfe3c7 | ||
|
|
bda3121f98 | ||
|
|
fd4981f3e2 | ||
|
|
ae1bedd94a | ||
|
|
90b67f90fa | ||
|
|
9c821fb5f5 | ||
|
|
1f9691ace1 | ||
|
|
5331cd99c6 | ||
|
|
1c3f24c78f | ||
|
|
e179e74df3 | ||
|
|
98602bd311 | ||
|
|
5f01124c74 | ||
|
|
4b5368be8e | ||
|
|
6379014f13 | ||
|
|
aa640020be | ||
|
|
92f4e600d1 | ||
|
|
25a6b6fa65 | ||
|
|
3cbe1295f9 | ||
|
|
72581fb2b1 | ||
|
|
97c89590e0 | ||
|
|
b6ba86f3c1 | ||
|
|
cedc291872 | ||
|
|
1d30486f82 | ||
|
|
9f1b4c9035 | ||
|
|
80ebb34ad1 | ||
|
|
e0e11fd99d | ||
|
|
578a933f30 | ||
|
|
57493a1f69 | ||
|
|
3a4100fa94 | ||
|
|
0c1af1d613 | ||
|
|
4e46431798 | ||
|
|
bec66f49a2 | ||
|
|
4019768fa1 | ||
|
|
25d902fd3e | ||
|
|
30f006538d | ||
|
|
15b1fee42d | ||
|
|
d69b816459 | ||
|
|
bf79721e97 | ||
|
|
66a0b44284 | ||
|
|
8693294ea6 | ||
|
|
14ac7927f1 | ||
|
|
b4674473d7 | ||
|
|
f01ece1d3d | ||
|
|
08160a41a6 | ||
|
|
e617698770 | ||
|
|
ee31bdf18b | ||
|
|
305b911c0d | ||
|
|
842abf78d2 | ||
|
|
134e8d1c1b | ||
|
|
733e90f747 | ||
|
|
6c92f7a864 | ||
|
|
f69b5b6e8f | ||
|
|
59e53ee7b7 | ||
|
|
62e1b0118c | ||
|
|
b7e9066b9d | ||
|
|
2d6532b8ee | ||
|
|
ebd1f1b00f | ||
|
|
95a1ceb080 | ||
|
|
3f9e7d1dba | ||
|
|
eab80f78d9 | ||
|
|
aa9fdd56ec | ||
|
|
c727261f67 | ||
|
|
703c62aa74 | ||
|
|
6e1f90228b | ||
|
|
3be089d2a5 | ||
|
|
692d3d35cc | ||
|
|
c52cb8362e | ||
|
|
93ac215ab4 | ||
|
|
f9eb86b50a | ||
|
|
a7f9992a4e | ||
|
|
13fde0d135 | ||
|
|
5105c6c50f | ||
|
|
af152ebe50 | ||
|
|
dea4452e42 | ||
|
|
af07631d83 | ||
|
|
d2ca00ca53 | ||
|
|
bb2f7bdfc4 | ||
|
|
b1379d9153 | ||
|
|
ea4b286659 | ||
|
|
2d00cb9a29 | ||
|
|
2ef1a20ae4 | ||
|
|
95defddfff | ||
|
|
009bdd91cc | ||
|
|
63bbead41e | ||
|
|
2c9a96b62a | ||
|
|
ace7fad62a | ||
|
|
3c73cc8bad | ||
|
|
83c41c265d | ||
|
|
c8bc5618dc | ||
|
|
60d770f265 | ||
|
|
6f4b9dcad7 | ||
|
|
1bba31f7af | ||
|
|
4705e584b0 | ||
|
|
80bbe5df6a | ||
|
|
88c4d88e06 | ||
|
|
718f459026 | ||
|
|
5c3ddcff3e | ||
|
|
08acececb2 | ||
|
|
27d6ae2881 | ||
|
|
5c4d9f4ca4 | ||
|
|
9ece327881 | ||
|
|
1b0ef3f358 | ||
|
|
a5eca0614a | ||
|
|
7b2509fadb | ||
|
|
f6e0bc28f4 | ||
|
|
e87056408e | ||
|
|
c945f32989 | ||
|
|
8d37917d8b | ||
|
|
68cc2dff53 | ||
|
|
45babbca92 | ||
|
|
b56dcfb7e9 | ||
|
|
a56114d84a | ||
|
|
de8a26c5b0 | ||
|
|
48f39524c4 | ||
|
|
2b4ef312c3 | ||
|
|
b4d175b811 | ||
|
|
7ff6c2a421 | ||
|
|
cf0a438f32 | ||
|
|
9e1bfa3564 | ||
|
|
3c266183e1 | ||
|
|
5c5f5d064a | ||
|
|
fc18ec4588 | ||
|
|
3fd2fa27e7 | ||
|
|
cf637f8c2f | ||
|
|
228fca9f0c | ||
|
|
c5ce8998e2 | ||
|
|
a4204bf11e | ||
|
|
3e44d15fc1 | ||
|
|
4f07d8688c | ||
|
|
89fda1a4ae | ||
|
|
f678e7ef34 | ||
|
|
24e8208deb | ||
|
|
3c66a1b35d | ||
|
|
5a2299e8b6 | ||
|
|
8087953b90 | ||
|
|
77a15b44c9 | ||
|
|
2177b494b9 | ||
|
|
10497c2bf4 | ||
|
|
e7fd744941 | ||
|
|
b9bfbc9e98 | ||
|
|
ba6f1343cc | ||
|
|
0d07d4bc69 | ||
|
|
94931a21fb | ||
|
|
ce295605ad | ||
|
|
9e371fd083 | ||
|
|
9fa5a843cb | ||
|
|
8b5fb407e5 | ||
|
|
8ef1e25f8c | ||
|
|
ce3dd2b6db | ||
|
|
a98010d0c1 | ||
|
|
a915a69886 | ||
|
|
bb4ffd8c6e | ||
|
|
403710354b | ||
|
|
9b3743a8bc | ||
|
|
9642ff63ca | ||
|
|
5a1862431e | ||
|
|
efed2b75a5 | ||
|
|
5a87a8805e | ||
|
|
8c48084b3f | ||
|
|
60fd442ed7 | ||
|
|
1d6c5a283e | ||
|
|
a53f876e09 | ||
|
|
88d894212b | ||
|
|
f2d4319366 | ||
|
|
3eb8d64381 | ||
|
|
818ce549d9 | ||
|
|
db7800d170 | ||
|
|
bc1c24efb1 | ||
|
|
5ad632c34a | ||
|
|
65f95e5c4b | ||
|
|
bb406594d1 | ||
|
|
b7a7b7bc63 | ||
|
|
1c59d846e3 | ||
|
|
3b40bb7d28 | ||
|
|
a171e17097 | ||
|
|
c881d96d2f | ||
|
|
f1a99a2d65 | ||
|
|
d02adabe5d | ||
|
|
286730165d | ||
|
|
95a58252cf | ||
|
|
bf6643643b | ||
|
|
8d780d6712 | ||
|
|
576c7227c6 | ||
|
|
915d375f0a | ||
|
|
e9487a81a7 | ||
|
|
0a2fe01b66 | ||
|
|
9de9bde7d8 | ||
|
|
fbc91d3d3d | ||
|
|
47672614df | ||
|
|
8c01c4a155 | ||
|
|
5dc7f8bfe3 | ||
|
|
cc01d15d74 | ||
|
|
5c980e8d97 | ||
|
|
c01e3beb2e | ||
|
|
a5b16e3694 | ||
|
|
866cd52ada | ||
|
|
2d308aaa20 | ||
|
|
0456eb54ee | ||
|
|
ce6fced6a4 | ||
|
|
fc56f52c74 | ||
|
|
f7e65eeece | ||
|
|
8c94de4a9c | ||
|
|
46971c1c82 | ||
|
|
fb5c3c7eb6 | ||
|
|
ea42237444 | ||
|
|
2a76c2678e | ||
|
|
72b6e5fabe | ||
|
|
f739fc1f55 | ||
|
|
aecfca5020 | ||
|
|
f024ae442f | ||
|
|
07a9aad4a4 | ||
|
|
22ab58077e | ||
|
|
4b666688c9 | ||
|
|
d118332366 | ||
|
|
9f32e0da14 | ||
|
|
1cef223a06 | ||
|
|
29da1233f3 | ||
|
|
a5b3d22058 | ||
|
|
d37e958a0b | ||
|
|
0498ac7364 | ||
|
|
67bdeb9945 | ||
|
|
a227307387 | ||
|
|
0e0309cabf | ||
|
|
fd2dfc83c6 | ||
|
|
9e736891c4 | ||
|
|
fbabf0dcb8 | ||
|
|
7128791152 | ||
|
|
94456b5bc3 | ||
|
|
2105c6b177 | ||
|
|
34156f79e8 | ||
|
|
bb1a2530f5 | ||
|
|
06613746f9 | ||
|
|
98ca948afe | ||
|
|
fa58fe5f4e | ||
|
|
46f230c487 | ||
|
|
13a987aba3 | ||
|
|
9cef323581 | ||
|
|
7ea7576188 | ||
|
|
f8abbfd42b | ||
|
|
5cd1821bc9 | ||
|
|
2ef7f26ffb | ||
|
|
184bea49e2 | ||
|
|
c853fb2068 | ||
|
|
79e0a93e48 | ||
|
|
3867c1d7d1 | ||
|
|
b9b6b050cc | ||
|
|
d960736b3d | ||
|
|
3e8923f105 | ||
|
|
17cca3e69d | ||
|
|
12714c489f | ||
|
|
f788d61b4a | ||
|
|
5c726af00b | ||
|
|
d1d207fbb2 | ||
|
|
6c7f8df7f7 | ||
|
|
6f8c9b1504 | ||
|
|
4f9aedbc84 | ||
|
|
afa0f572ce | ||
|
|
a6a1b9ddbd | ||
|
|
c1f5b4593f | ||
|
|
f1de4dc1cc | ||
|
|
4ae0d9a9c6 | ||
|
|
fcd0b579cf | ||
|
|
52fb0343e4 | ||
|
|
1050b4580a | ||
|
|
344c42172e | ||
|
|
93cc0fd7f1 | ||
|
|
05fe636b55 | ||
|
|
f22467d099 | ||
|
|
4bc3899b32 | ||
|
|
fc4d6bf5f1 | ||
|
|
8ed0672a8f | ||
|
|
282e347a1b | ||
|
|
1bfb02b440 | ||
|
|
71b03bd9ae | ||
|
|
cbd69822eb | ||
|
|
db900f4dd2 | ||
|
|
a707e695bc | ||
|
|
4feceac205 | ||
|
|
10c20faaca | ||
|
|
abcd512401 | ||
|
|
dee7a237ee | ||
|
|
3975eba12c | ||
|
|
ade91ebdab | ||
|
|
1bf194dd0f | ||
|
|
2eca8db8aa | ||
|
|
78415bc1ff | ||
|
|
e2469bcd0f | ||
|
|
54d64b7da2 | ||
|
|
d548f3d12f | ||
|
|
668995da73 | ||
|
|
9eeae8eac6 | ||
|
|
7e7056aa94 | ||
|
|
b633b8d271 | ||
|
|
45c7b9ccb8 | ||
|
|
3ad1a57dfc | ||
|
|
3cbe236a36 | ||
|
|
39816c1e8a | ||
|
|
5587dd43b9 | ||
|
|
715d1e4eb8 | ||
|
|
af172fb70d | ||
|
|
8c8bc104eb | ||
|
|
51b20fb5db | ||
|
|
445ba26667 | ||
|
|
886448f4ba | ||
|
|
ede4341ef3 | ||
|
|
fe363f32ec | ||
|
|
31562e7571 | ||
|
|
0bdb51e4ca | ||
|
|
67a5d7ac21 | ||
|
|
5e7f06c476 | ||
|
|
9a69852296 | ||
|
|
fdf8edf474 | ||
|
|
47e1a98bee | ||
|
|
2d8572b943 | ||
|
|
660cfdbd50 | ||
|
|
4208595da6 | ||
|
|
b6b2d2fc6f | ||
|
|
6c4c632848 | ||
|
|
78cf62176f | ||
|
|
df971c7a42 | ||
|
|
1fcabb7f2d | ||
|
|
9fb60c9ea2 | ||
|
|
9c11a4646f | ||
|
|
b036a78776 | ||
|
|
60bb3cb704 | ||
|
|
a722925b8e | ||
|
|
419c5de50e | ||
|
|
37faed565e | ||
|
|
622953e61f | ||
|
|
17926c3f6a | ||
|
|
48d85170c2 | ||
|
|
08d179c520 | ||
|
|
5752387da8 | ||
|
|
1ebde65f03 | ||
|
|
89f536e332 | ||
|
|
8784329333 | ||
|
|
d73538722d | ||
|
|
d49d3f0a2f | ||
|
|
8466dd4c2b | ||
|
|
6bb1e688c6 | ||
|
|
9bc1c4c4f3 | ||
|
|
a554cb8211 | ||
|
|
145d38403e | ||
|
|
10d4af5674 | ||
|
|
ed3b4d2de3 | ||
|
|
e66d324877 | ||
|
|
f7f18627a2 | ||
|
|
d18630020f | ||
|
|
a715ec318c | ||
|
|
0ef5a77dc9 | ||
|
|
b43abf83b8 | ||
|
|
84d28db3a7 | ||
|
|
74d99fa0be | ||
|
|
3ff0320ed8 | ||
|
|
16cb9e9785 | ||
|
|
d92279dfcb | ||
|
|
4b9d28d0e5 | ||
|
|
e6a60dfe50 | ||
|
|
d219056e9d | ||
|
|
6ff6b099b5 | ||
|
|
c5b9699098 | ||
|
|
6937bfdf67 | ||
|
|
39ee3fcfaa | ||
|
|
16cdfd05a0 | ||
|
|
f49d4787be | ||
|
|
2076700dc4 | ||
|
|
76c135913e | ||
|
|
c3534d5445 | ||
|
|
fc60b16d65 | ||
|
|
0443c93f77 | ||
|
|
0e770958ac | ||
|
|
2a54c71b6c | ||
|
|
50463291ab | ||
|
|
43cc34042a | ||
|
|
a02244ccda | ||
|
|
a739619121 | ||
|
|
5db97a5f1c | ||
|
|
804ba9c9cc | ||
|
|
5ecbcea946 | ||
|
|
11be2b6289 | ||
|
|
eefae0307b | ||
|
|
d397ee28ea | ||
|
|
02c821128e | ||
|
|
71dc15d45f | ||
|
|
1078387b22 | ||
|
|
35fab27d15 | ||
|
|
915dc7a908 | ||
|
|
e5a9738983 | ||
|
|
2ff73219a2 | ||
|
|
5dc1270ed1 | ||
|
|
9e95ad5a85 | ||
|
|
f97cf0e446 | ||
|
|
bd4fa0d5c2 | ||
|
|
f60d367184 | ||
|
|
6e231f2ec5 | ||
|
|
13ba2d2e47 | ||
|
|
ba4a163e24 | ||
|
|
b7db8684db | ||
|
|
a7595dc468 | ||
|
|
d2c8c3565b | ||
|
|
422d1031f4 | ||
|
|
c9a79cf100 | ||
|
|
c42d47a619 | ||
|
|
a26f871d32 | ||
|
|
d481c1bcc5 | ||
|
|
379e3596b4 | ||
|
|
423a7cdbba | ||
|
|
841fa48186 | ||
|
|
61e35157e3 | ||
|
|
87f655f56d | ||
|
|
692b8d0722 | ||
|
|
5f9f623c3f | ||
|
|
e595b6cd90 | ||
|
|
a748eebf3e | ||
|
|
6bdd544867 | ||
|
|
705eadf8ce | ||
|
|
b7c6e4eafc | ||
|
|
f4aba286fe | ||
|
|
5fa4f6de11 | ||
|
|
db1f045c42 | ||
|
|
eaba4817bd | ||
|
|
96cb2247df | ||
|
|
99fa7a1f52 | ||
|
|
e0ba928296 | ||
|
|
16fd5e8f1f | ||
|
|
201e95a417 | ||
|
|
dc01592991 | ||
|
|
c5fb2bd566 | ||
|
|
d03d996155 | ||
|
|
9618412a44 | ||
|
|
967e97661f | ||
|
|
b757312fe0 | ||
|
|
2ed8ec0bdf | ||
|
|
97f6e3741a | ||
|
|
c2d3244d26 | ||
|
|
9a5d4610f7 | ||
|
|
41c524fce4 | ||
|
|
5f9fa95554 | ||
|
|
6950be8ea9 | ||
|
|
c5a8bf64d0 | ||
|
|
a2b9a6e9df | ||
|
|
a0c567f0da | ||
|
|
c7feafdde6 | ||
|
|
e1e74b0aeb | ||
|
|
673411ef97 | ||
|
|
f7e5af7cb1 | ||
|
|
0ee56ce708 | ||
|
|
f93a176398 | ||
|
|
cd2394bc12 | ||
|
|
5c20b8eaff | ||
|
|
4bd499d3a6 | ||
|
|
8a53b94c5a | ||
|
|
d5aff326e3 | ||
|
|
22f66abbe7 | ||
|
|
f635228b1f | ||
|
|
4c708c143d | ||
|
|
3369459d41 |
@@ -22,6 +22,7 @@ base_platforms: &base_platforms
|
||||
- homeassistant/components/calendar/**
|
||||
- homeassistant/components/camera/**
|
||||
- homeassistant/components/climate/**
|
||||
- homeassistant/components/conversation/**
|
||||
- homeassistant/components/cover/**
|
||||
- homeassistant/components/date/**
|
||||
- homeassistant/components/datetime/**
|
||||
@@ -53,6 +54,7 @@ base_platforms: &base_platforms
|
||||
- homeassistant/components/update/**
|
||||
- homeassistant/components/vacuum/**
|
||||
- homeassistant/components/valve/**
|
||||
- homeassistant/components/wake_word/**
|
||||
- homeassistant/components/water_heater/**
|
||||
- homeassistant/components/weather/**
|
||||
|
||||
@@ -70,7 +72,6 @@ components: &components
|
||||
- homeassistant/components/cloud/**
|
||||
- homeassistant/components/config/**
|
||||
- homeassistant/components/configurator/**
|
||||
- homeassistant/components/conversation/**
|
||||
- homeassistant/components/demo/**
|
||||
- homeassistant/components/device_automation/**
|
||||
- homeassistant/components/dhcp/**
|
||||
|
||||
@@ -60,7 +60,13 @@
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||
},
|
||||
"[json][jsonc][yaml]": {
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[yaml]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"json.schemas": [
|
||||
|
||||
11
.github/workflows/builder.yml
vendored
11
.github/workflows/builder.yml
vendored
@@ -100,7 +100,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@0bd50d53a6d7fb5cb921e607957e9cc12b4ce392 # v12
|
||||
uses: dawidd6/action-download-artifact@5c98f0b039f36ef966fdb7dfa9779262785ecb05 # v14
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -111,7 +111,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@0bd50d53a6d7fb5cb921e607957e9cc12b4ce392 # v12
|
||||
uses: dawidd6/action-download-artifact@5c98f0b039f36ef966fdb7dfa9779262785ecb05 # v14
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: OHF-Voice/intents-package
|
||||
@@ -225,7 +225,7 @@ jobs:
|
||||
|
||||
- name: Build base image
|
||||
id: build
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
@@ -235,6 +235,7 @@ jobs:
|
||||
build-args: |
|
||||
BUILD_FROM=${{ steps.vars.outputs.base_image }}
|
||||
tags: ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}
|
||||
outputs: type=image,push=true,compression=zstd,compression-level=9,force-compression=true,oci-mediatypes=true
|
||||
labels: |
|
||||
io.hass.arch=${{ matrix.arch }}
|
||||
io.hass.version=${{ needs.init.outputs.version }}
|
||||
@@ -529,7 +530,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -542,7 +543,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -254,7 +254,7 @@ jobs:
|
||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/codespell.json"
|
||||
- name: Run prek
|
||||
uses: j178/prek-action@564dda4cfa5e96aafdc4a5696c4bf7b46baae5ac # v1.1.0
|
||||
uses: j178/prek-action@0bb87d7f00b0c99306c8bcb8b8beba1eb581c037 # v1.1.1
|
||||
env:
|
||||
PREK_SKIP: no-commit-to-branch,mypy,pylint,gen_requirements_all,hassfest,hassfest-metadata,hassfest-mypy-config
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@b20883b0cd1f46c72ae0ba6d1090936928f9fa30 # v4.32.0
|
||||
uses: github/codeql-action/init@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 # v4.32.2
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@b20883b0cd1f46c72ae0ba6d1090936928f9fa30 # v4.32.0
|
||||
uses: github/codeql-action/analyze@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 # v4.32.2
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@a6101c89c6feaecc585efdd8d461f18bb7896f20 # v2.0.5
|
||||
uses: actions/ai-inference@a380166897b5408b8fb7dddd148142794cb5624a # v2.0.6
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@a6101c89c6feaecc585efdd8d461f18bb7896f20 # v2.0.5
|
||||
uses: actions/ai-inference@a380166897b5408b8fb7dddd148142794cb5624a # v2.0.6
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.14.13
|
||||
rev: v0.15.0
|
||||
hooks:
|
||||
- id: ruff-check
|
||||
args:
|
||||
|
||||
@@ -221,6 +221,7 @@ homeassistant.components.generic_hygrostat.*
|
||||
homeassistant.components.generic_thermostat.*
|
||||
homeassistant.components.geo_location.*
|
||||
homeassistant.components.geocaching.*
|
||||
homeassistant.components.ghost.*
|
||||
homeassistant.components.gios.*
|
||||
homeassistant.components.github.*
|
||||
homeassistant.components.glances.*
|
||||
@@ -286,6 +287,7 @@ homeassistant.components.input_button.*
|
||||
homeassistant.components.input_select.*
|
||||
homeassistant.components.input_text.*
|
||||
homeassistant.components.integration.*
|
||||
homeassistant.components.intelliclima.*
|
||||
homeassistant.components.intent.*
|
||||
homeassistant.components.intent_script.*
|
||||
homeassistant.components.ios.*
|
||||
@@ -362,7 +364,6 @@ homeassistant.components.my.*
|
||||
homeassistant.components.mysensors.*
|
||||
homeassistant.components.myuplink.*
|
||||
homeassistant.components.nam.*
|
||||
homeassistant.components.nanoleaf.*
|
||||
homeassistant.components.nasweb.*
|
||||
homeassistant.components.neato.*
|
||||
homeassistant.components.nest.*
|
||||
@@ -389,6 +390,7 @@ homeassistant.components.onkyo.*
|
||||
homeassistant.components.open_meteo.*
|
||||
homeassistant.components.open_router.*
|
||||
homeassistant.components.openai_conversation.*
|
||||
homeassistant.components.openevse.*
|
||||
homeassistant.components.openexchangerates.*
|
||||
homeassistant.components.opensky.*
|
||||
homeassistant.components.openuv.*
|
||||
@@ -434,6 +436,7 @@ homeassistant.components.raspberry_pi.*
|
||||
homeassistant.components.rdw.*
|
||||
homeassistant.components.recollect_waste.*
|
||||
homeassistant.components.recorder.*
|
||||
homeassistant.components.redgtech.*
|
||||
homeassistant.components.remember_the_milk.*
|
||||
homeassistant.components.remote.*
|
||||
homeassistant.components.remote_calendar.*
|
||||
|
||||
23
CODEOWNERS
generated
23
CODEOWNERS
generated
@@ -15,7 +15,7 @@
|
||||
.yamllint @home-assistant/core
|
||||
pyproject.toml @home-assistant/core
|
||||
requirements_test.txt @home-assistant/core
|
||||
/.devcontainer/ @home-assistant/core
|
||||
/.devcontainer/ @home-assistant/core @edenhaus
|
||||
/.github/ @home-assistant/core
|
||||
/.vscode/ @home-assistant/core
|
||||
/homeassistant/*.py @home-assistant/core
|
||||
@@ -595,6 +595,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/geonetnz_quakes/ @exxamalte
|
||||
/homeassistant/components/geonetnz_volcano/ @exxamalte
|
||||
/tests/components/geonetnz_volcano/ @exxamalte
|
||||
/homeassistant/components/ghost/ @johnonolan
|
||||
/tests/components/ghost/ @johnonolan
|
||||
/homeassistant/components/gios/ @bieniu
|
||||
/tests/components/gios/ @bieniu
|
||||
/homeassistant/components/github/ @timmo001 @ludeeus
|
||||
@@ -670,6 +672,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/hdmi_cec/ @inytar
|
||||
/tests/components/hdmi_cec/ @inytar
|
||||
/homeassistant/components/heatmiser/ @andylockran
|
||||
/homeassistant/components/hegel/ @boazca
|
||||
/tests/components/hegel/ @boazca
|
||||
/homeassistant/components/heos/ @andrewsayre
|
||||
/tests/components/heos/ @andrewsayre
|
||||
/homeassistant/components/here_travel_time/ @eifinger
|
||||
@@ -713,8 +717,10 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/homekit_controller/ @Jc2k @bdraco
|
||||
/homeassistant/components/homematic/ @pvizeli
|
||||
/tests/components/homematic/ @pvizeli
|
||||
/homeassistant/components/homematicip_cloud/ @hahn-th
|
||||
/tests/components/homematicip_cloud/ @hahn-th
|
||||
/homeassistant/components/homematicip_cloud/ @hahn-th @lackas
|
||||
/tests/components/homematicip_cloud/ @hahn-th @lackas
|
||||
/homeassistant/components/homevolt/ @danielhiversen
|
||||
/tests/components/homevolt/ @danielhiversen
|
||||
/homeassistant/components/homewizard/ @DCSBL
|
||||
/tests/components/homewizard/ @DCSBL
|
||||
/homeassistant/components/honeywell/ @rdfurman @mkmer
|
||||
@@ -756,6 +762,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/icloud/ @Quentame @nzapponi
|
||||
/homeassistant/components/idasen_desk/ @abmantis
|
||||
/tests/components/idasen_desk/ @abmantis
|
||||
/homeassistant/components/idrive_e2/ @patrickvorgers
|
||||
/tests/components/idrive_e2/ @patrickvorgers
|
||||
/homeassistant/components/igloohome/ @keithle888
|
||||
/tests/components/igloohome/ @keithle888
|
||||
/homeassistant/components/ign_sismologia/ @exxamalte
|
||||
@@ -800,6 +808,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/insteon/ @teharris1
|
||||
/homeassistant/components/integration/ @dgomes
|
||||
/tests/components/integration/ @dgomes
|
||||
/homeassistant/components/intelliclima/ @dvdinth
|
||||
/tests/components/intelliclima/ @dvdinth
|
||||
/homeassistant/components/intellifire/ @jeeftor
|
||||
/tests/components/intellifire/ @jeeftor
|
||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
@@ -1076,8 +1086,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/nam/ @bieniu
|
||||
/homeassistant/components/namecheapdns/ @tr4nt0r
|
||||
/tests/components/namecheapdns/ @tr4nt0r
|
||||
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
|
||||
/tests/components/nanoleaf/ @milanmeu @joostlek
|
||||
/homeassistant/components/nanoleaf/ @milanmeu @joostlek @loebi-ch @JaspervRijbroek @jonathanrobichaud4
|
||||
/tests/components/nanoleaf/ @milanmeu @joostlek @loebi-ch @JaspervRijbroek @jonathanrobichaud4
|
||||
/homeassistant/components/nasweb/ @nasWebio
|
||||
/tests/components/nasweb/ @nasWebio
|
||||
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM @heindrichpaul
|
||||
@@ -1355,6 +1365,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/recorder/ @home-assistant/core
|
||||
/homeassistant/components/recovery_mode/ @home-assistant/core
|
||||
/tests/components/recovery_mode/ @home-assistant/core
|
||||
/homeassistant/components/redgtech/ @jonhsady @luan-nvg
|
||||
/tests/components/redgtech/ @jonhsady @luan-nvg
|
||||
/homeassistant/components/refoss/ @ashionky
|
||||
/tests/components/refoss/ @ashionky
|
||||
/homeassistant/components/rehlko/ @bdraco @peterager
|
||||
@@ -1561,6 +1573,7 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/speedtestdotnet/ @rohankapoorcom @engrbm87
|
||||
/tests/components/speedtestdotnet/ @rohankapoorcom @engrbm87
|
||||
/homeassistant/components/splunk/ @Bre77
|
||||
/tests/components/splunk/ @Bre77
|
||||
/homeassistant/components/spotify/ @frenck @joostlek
|
||||
/tests/components/spotify/ @frenck @joostlek
|
||||
/homeassistant/components/sql/ @gjohansson-ST @dougiteixeira
|
||||
|
||||
5
homeassistant/brands/heatit.json
Normal file
5
homeassistant/brands/heatit.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "heatit",
|
||||
"name": "Heatit",
|
||||
"iot_standards": ["zwave"]
|
||||
}
|
||||
5
homeassistant/brands/heiman.json
Normal file
5
homeassistant/brands/heiman.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "heiman",
|
||||
"name": "Heiman",
|
||||
"iot_standards": ["matter", "zigbee"]
|
||||
}
|
||||
@@ -64,7 +64,7 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
errors = {"base": "cannot_connect"}
|
||||
|
||||
except (ConnectTimeout, HTTPError):
|
||||
except ConnectTimeout, HTTPError:
|
||||
errors = {"base": "cannot_connect"}
|
||||
|
||||
if errors:
|
||||
|
||||
@@ -99,7 +99,7 @@ class AbodeLight(AbodeDevice, LightEntity):
|
||||
return _hs
|
||||
|
||||
@property
|
||||
def color_mode(self) -> ColorMode | None:
|
||||
def color_mode(self) -> ColorMode:
|
||||
"""Return the color mode of the light."""
|
||||
if self._device.is_dimmable and self._device.is_color_capable:
|
||||
if self.hs_color is not None:
|
||||
@@ -110,7 +110,7 @@ class AbodeLight(AbodeDevice, LightEntity):
|
||||
return ColorMode.ONOFF
|
||||
|
||||
@property
|
||||
def supported_color_modes(self) -> set[ColorMode] | None:
|
||||
def supported_color_modes(self) -> set[ColorMode]:
|
||||
"""Flag supported color modes."""
|
||||
if self._device.is_dimmable and self._device.is_color_capable:
|
||||
return {ColorMode.COLOR_TEMP, ColorMode.HS}
|
||||
|
||||
@@ -43,7 +43,7 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
longitude=user_input[CONF_LONGITUDE],
|
||||
)
|
||||
await accuweather.async_get_location()
|
||||
except (ApiError, ClientConnectorError, TimeoutError, ClientError):
|
||||
except ApiError, ClientConnectorError, TimeoutError, ClientError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidApiKeyError:
|
||||
errors[CONF_API_KEY] = "invalid_api_key"
|
||||
@@ -104,7 +104,7 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
longitude=self._longitude,
|
||||
)
|
||||
await accuweather.async_get_location()
|
||||
except (ApiError, ClientConnectorError, TimeoutError, ClientError):
|
||||
except ApiError, ClientConnectorError, TimeoutError, ClientError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidApiKeyError:
|
||||
errors["base"] = "invalid_api_key"
|
||||
|
||||
@@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator
|
||||
from .entity import ActronAirAcEntity, ActronAirZoneEntity
|
||||
from .entity import ActronAirAcEntity, ActronAirZoneEntity, handle_actron_api_errors
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -136,16 +136,19 @@ class ActronSystemClimate(ActronAirAcEntity, ActronAirClimateEntity):
|
||||
"""Return the target temperature."""
|
||||
return self._status.user_aircon_settings.temperature_setpoint_cool_c
|
||||
|
||||
@handle_actron_api_errors
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set a new fan mode."""
|
||||
api_fan_mode = FAN_MODE_MAPPING_HA_TO_ACTRONAIR.get(fan_mode)
|
||||
await self._status.user_aircon_settings.set_fan_mode(api_fan_mode)
|
||||
|
||||
@handle_actron_api_errors
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set the HVAC mode."""
|
||||
ac_mode = HVAC_MODE_MAPPING_HA_TO_ACTRONAIR.get(hvac_mode)
|
||||
await self._status.ac_system.set_system_mode(ac_mode)
|
||||
|
||||
@handle_actron_api_errors
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set the temperature."""
|
||||
temp = kwargs.get(ATTR_TEMPERATURE)
|
||||
@@ -209,11 +212,13 @@ class ActronZoneClimate(ActronAirZoneEntity, ActronAirClimateEntity):
|
||||
"""Return the target temperature."""
|
||||
return self._zone.temperature_setpoint_cool_c
|
||||
|
||||
@handle_actron_api_errors
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set the HVAC mode."""
|
||||
is_enabled = hvac_mode != HVACMode.OFF
|
||||
await self._zone.enable(is_enabled)
|
||||
|
||||
@handle_actron_api_errors
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set the temperature."""
|
||||
await self._zone.set_temperature(temperature=kwargs.get(ATTR_TEMPERATURE))
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
"""Base entity classes for Actron Air integration."""
|
||||
|
||||
from actron_neo_api import ActronAirZone
|
||||
from collections.abc import Callable, Coroutine
|
||||
from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from actron_neo_api import ActronAirAPIError, ActronAirZone
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
@@ -9,6 +14,26 @@ from .const import DOMAIN
|
||||
from .coordinator import ActronAirSystemCoordinator
|
||||
|
||||
|
||||
def handle_actron_api_errors[_EntityT: ActronAirEntity, **_P](
|
||||
func: Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, Any]],
|
||||
) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]:
|
||||
"""Decorate Actron Air API calls to handle ActronAirAPIError exceptions."""
|
||||
|
||||
@wraps(func)
|
||||
async def wrapper(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None:
|
||||
"""Wrap API calls with exception handling."""
|
||||
try:
|
||||
await func(self, *args, **kwargs)
|
||||
except ActronAirAPIError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"error": str(err)},
|
||||
) from err
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class ActronAirEntity(CoordinatorEntity[ActronAirSystemCoordinator]):
|
||||
"""Base class for Actron Air entities."""
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ rules:
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
|
||||
@@ -49,6 +49,9 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"api_error": {
|
||||
"message": "Failed to communicate with Actron Air device: {error}"
|
||||
},
|
||||
"auth_error": {
|
||||
"message": "Authentication failed, please reauthenticate"
|
||||
},
|
||||
|
||||
@@ -10,7 +10,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator
|
||||
from .entity import ActronAirAcEntity
|
||||
from .entity import ActronAirAcEntity, handle_actron_api_errors
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -29,30 +29,42 @@ SWITCHES: tuple[ActronAirSwitchEntityDescription, ...] = (
|
||||
key="away_mode",
|
||||
translation_key="away_mode",
|
||||
is_on_fn=lambda coordinator: coordinator.data.user_aircon_settings.away_mode,
|
||||
set_fn=lambda coordinator,
|
||||
enabled: coordinator.data.user_aircon_settings.set_away_mode(enabled),
|
||||
set_fn=lambda coordinator, enabled: (
|
||||
coordinator.data.user_aircon_settings.set_away_mode(enabled)
|
||||
),
|
||||
),
|
||||
ActronAirSwitchEntityDescription(
|
||||
key="continuous_fan",
|
||||
translation_key="continuous_fan",
|
||||
is_on_fn=lambda coordinator: coordinator.data.user_aircon_settings.continuous_fan_enabled,
|
||||
set_fn=lambda coordinator,
|
||||
enabled: coordinator.data.user_aircon_settings.set_continuous_mode(enabled),
|
||||
is_on_fn=lambda coordinator: (
|
||||
coordinator.data.user_aircon_settings.continuous_fan_enabled
|
||||
),
|
||||
set_fn=lambda coordinator, enabled: (
|
||||
coordinator.data.user_aircon_settings.set_continuous_mode(enabled)
|
||||
),
|
||||
),
|
||||
ActronAirSwitchEntityDescription(
|
||||
key="quiet_mode",
|
||||
translation_key="quiet_mode",
|
||||
is_on_fn=lambda coordinator: coordinator.data.user_aircon_settings.quiet_mode_enabled,
|
||||
set_fn=lambda coordinator,
|
||||
enabled: coordinator.data.user_aircon_settings.set_quiet_mode(enabled),
|
||||
is_on_fn=lambda coordinator: (
|
||||
coordinator.data.user_aircon_settings.quiet_mode_enabled
|
||||
),
|
||||
set_fn=lambda coordinator, enabled: (
|
||||
coordinator.data.user_aircon_settings.set_quiet_mode(enabled)
|
||||
),
|
||||
),
|
||||
ActronAirSwitchEntityDescription(
|
||||
key="turbo_mode",
|
||||
translation_key="turbo_mode",
|
||||
is_on_fn=lambda coordinator: coordinator.data.user_aircon_settings.turbo_enabled,
|
||||
set_fn=lambda coordinator,
|
||||
enabled: coordinator.data.user_aircon_settings.set_turbo_mode(enabled),
|
||||
is_supported_fn=lambda coordinator: coordinator.data.user_aircon_settings.turbo_supported,
|
||||
is_on_fn=lambda coordinator: (
|
||||
coordinator.data.user_aircon_settings.turbo_enabled
|
||||
),
|
||||
set_fn=lambda coordinator, enabled: (
|
||||
coordinator.data.user_aircon_settings.set_turbo_mode(enabled)
|
||||
),
|
||||
is_supported_fn=lambda coordinator: (
|
||||
coordinator.data.user_aircon_settings.turbo_supported
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -93,10 +105,12 @@ class ActronAirSwitch(ActronAirAcEntity, SwitchEntity):
|
||||
"""Return true if the switch is on."""
|
||||
return self.entity_description.is_on_fn(self.coordinator)
|
||||
|
||||
@handle_actron_api_errors
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
await self.entity_description.set_fn(self.coordinator, True)
|
||||
|
||||
@handle_actron_api_errors
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
await self.entity_description.set_fn(self.coordinator, False)
|
||||
|
||||
@@ -20,9 +20,10 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryNotReady, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
CONF_FORCE,
|
||||
@@ -45,6 +46,7 @@ SERVICE_REFRESH_SCHEMA = vol.Schema(
|
||||
{vol.Optional(CONF_FORCE, default=False): cv.boolean}
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
PLATFORMS = [Platform.SENSOR, Platform.SWITCH, Platform.UPDATE]
|
||||
type AdGuardConfigEntry = ConfigEntry[AdGuardData]
|
||||
|
||||
@@ -57,6 +59,69 @@ class AdGuardData:
|
||||
version: str
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the component."""
|
||||
|
||||
def _get_adguard_instances(hass: HomeAssistant) -> list[AdGuardHome]:
|
||||
"""Get the AdGuardHome instances."""
|
||||
entries: list[AdGuardConfigEntry] = hass.config_entries.async_loaded_entries(
|
||||
DOMAIN
|
||||
)
|
||||
if not entries:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN, translation_key="config_entry_not_loaded"
|
||||
)
|
||||
return [entry.runtime_data.client for entry in entries]
|
||||
|
||||
async def add_url(call: ServiceCall) -> None:
|
||||
"""Service call to add a new filter subscription to AdGuard Home."""
|
||||
for adguard in _get_adguard_instances(call.hass):
|
||||
await adguard.filtering.add_url(
|
||||
allowlist=False, name=call.data[CONF_NAME], url=call.data[CONF_URL]
|
||||
)
|
||||
|
||||
async def remove_url(call: ServiceCall) -> None:
|
||||
"""Service call to remove a filter subscription from AdGuard Home."""
|
||||
for adguard in _get_adguard_instances(call.hass):
|
||||
await adguard.filtering.remove_url(allowlist=False, url=call.data[CONF_URL])
|
||||
|
||||
async def enable_url(call: ServiceCall) -> None:
|
||||
"""Service call to enable a filter subscription in AdGuard Home."""
|
||||
for adguard in _get_adguard_instances(call.hass):
|
||||
await adguard.filtering.enable_url(allowlist=False, url=call.data[CONF_URL])
|
||||
|
||||
async def disable_url(call: ServiceCall) -> None:
|
||||
"""Service call to disable a filter subscription in AdGuard Home."""
|
||||
for adguard in _get_adguard_instances(call.hass):
|
||||
await adguard.filtering.disable_url(
|
||||
allowlist=False, url=call.data[CONF_URL]
|
||||
)
|
||||
|
||||
async def refresh(call: ServiceCall) -> None:
|
||||
"""Service call to refresh the filter subscriptions in AdGuard Home."""
|
||||
for adguard in _get_adguard_instances(call.hass):
|
||||
await adguard.filtering.refresh(
|
||||
allowlist=False, force=call.data[CONF_FORCE]
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_ADD_URL, add_url, schema=SERVICE_ADD_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_REMOVE_URL, remove_url, schema=SERVICE_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_ENABLE_URL, enable_url, schema=SERVICE_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_DISABLE_URL, disable_url, schema=SERVICE_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_REFRESH, refresh, schema=SERVICE_REFRESH_SCHEMA
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> bool:
|
||||
"""Set up AdGuard Home from a config entry."""
|
||||
session = async_get_clientsession(hass, entry.data[CONF_VERIFY_SSL])
|
||||
@@ -79,56 +144,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> b
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
async def add_url(call: ServiceCall) -> None:
|
||||
"""Service call to add a new filter subscription to AdGuard Home."""
|
||||
await adguard.filtering.add_url(
|
||||
allowlist=False, name=call.data[CONF_NAME], url=call.data[CONF_URL]
|
||||
)
|
||||
|
||||
async def remove_url(call: ServiceCall) -> None:
|
||||
"""Service call to remove a filter subscription from AdGuard Home."""
|
||||
await adguard.filtering.remove_url(allowlist=False, url=call.data[CONF_URL])
|
||||
|
||||
async def enable_url(call: ServiceCall) -> None:
|
||||
"""Service call to enable a filter subscription in AdGuard Home."""
|
||||
await adguard.filtering.enable_url(allowlist=False, url=call.data[CONF_URL])
|
||||
|
||||
async def disable_url(call: ServiceCall) -> None:
|
||||
"""Service call to disable a filter subscription in AdGuard Home."""
|
||||
await adguard.filtering.disable_url(allowlist=False, url=call.data[CONF_URL])
|
||||
|
||||
async def refresh(call: ServiceCall) -> None:
|
||||
"""Service call to refresh the filter subscriptions in AdGuard Home."""
|
||||
await adguard.filtering.refresh(allowlist=False, force=call.data[CONF_FORCE])
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_ADD_URL, add_url, schema=SERVICE_ADD_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_REMOVE_URL, remove_url, schema=SERVICE_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_ENABLE_URL, enable_url, schema=SERVICE_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_DISABLE_URL, disable_url, schema=SERVICE_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_REFRESH, refresh, schema=SERVICE_REFRESH_SCHEMA
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> bool:
|
||||
"""Unload AdGuard Home config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
# This is the last loaded instance of AdGuard, deregister any services
|
||||
hass.services.async_remove(DOMAIN, SERVICE_ADD_URL)
|
||||
hass.services.async_remove(DOMAIN, SERVICE_REMOVE_URL)
|
||||
hass.services.async_remove(DOMAIN, SERVICE_ENABLE_URL)
|
||||
hass.services.async_remove(DOMAIN, SERVICE_DISABLE_URL)
|
||||
hass.services.async_remove(DOMAIN, SERVICE_REFRESH)
|
||||
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
@@ -76,6 +76,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"config_entry_not_loaded": {
|
||||
"message": "Config entry not loaded."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"add_url": {
|
||||
"description": "Adds a new filter subscription to AdGuard Home.",
|
||||
|
||||
@@ -7,10 +7,12 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN, SERVER_URL
|
||||
from .services import async_setup_services
|
||||
|
||||
ATTRIBUTION = "ispyconnect.com"
|
||||
DEFAULT_BRAND = "Agent DVR by ispyconnect.com"
|
||||
@@ -19,6 +21,14 @@ PLATFORMS = [Platform.ALARM_CONTROL_PANEL, Platform.CAMERA]
|
||||
|
||||
AgentDVRConfigEntry = ConfigEntry[Agent]
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the component."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, config_entry: AgentDVRConfigEntry
|
||||
|
||||
@@ -9,10 +9,7 @@ from homeassistant.components.camera import CameraEntityFeature
|
||||
from homeassistant.components.mjpeg import MjpegCamera, filter_urllib3_logging
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AgentDVRConfigEntry
|
||||
from .const import ATTRIBUTION, CAMERA_SCAN_INTERVAL_SECS, DOMAIN
|
||||
@@ -21,20 +18,6 @@ SCAN_INTERVAL = timedelta(seconds=CAMERA_SCAN_INTERVAL_SECS)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_DEV_EN_ALT = "enable_alerts"
|
||||
_DEV_DS_ALT = "disable_alerts"
|
||||
_DEV_EN_REC = "start_recording"
|
||||
_DEV_DS_REC = "stop_recording"
|
||||
_DEV_SNAP = "snapshot"
|
||||
|
||||
CAMERA_SERVICES = {
|
||||
_DEV_EN_ALT: "async_enable_alerts",
|
||||
_DEV_DS_ALT: "async_disable_alerts",
|
||||
_DEV_EN_REC: "async_start_recording",
|
||||
_DEV_DS_REC: "async_stop_recording",
|
||||
_DEV_SNAP: "async_snapshot",
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -57,10 +40,6 @@ async def async_setup_entry(
|
||||
|
||||
async_add_entities(cameras)
|
||||
|
||||
platform = async_get_current_platform()
|
||||
for service, method in CAMERA_SERVICES.items():
|
||||
platform.async_register_entity_service(service, None, method)
|
||||
|
||||
|
||||
class AgentCamera(MjpegCamera):
|
||||
"""Representation of an Agent Device Stream."""
|
||||
|
||||
38
homeassistant/components/agent_dvr/services.py
Normal file
38
homeassistant/components/agent_dvr/services.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Services for Agent DVR."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_DEV_EN_ALT = "enable_alerts"
|
||||
_DEV_DS_ALT = "disable_alerts"
|
||||
_DEV_EN_REC = "start_recording"
|
||||
_DEV_DS_REC = "stop_recording"
|
||||
_DEV_SNAP = "snapshot"
|
||||
|
||||
CAMERA_SERVICES = {
|
||||
_DEV_EN_ALT: "async_enable_alerts",
|
||||
_DEV_DS_ALT: "async_disable_alerts",
|
||||
_DEV_EN_REC: "async_start_recording",
|
||||
_DEV_DS_REC: "async_stop_recording",
|
||||
_DEV_SNAP: "async_snapshot",
|
||||
}
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Home Assistant services."""
|
||||
|
||||
for service_name, method in CAMERA_SERVICES.items():
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
service_name,
|
||||
entity_domain=CAMERA_DOMAIN,
|
||||
schema=None,
|
||||
func=method,
|
||||
)
|
||||
@@ -133,8 +133,9 @@ CONTROL_ENTITIES: tuple[AirGradientSelectEntityDescription, ...] = (
|
||||
value_fn=lambda config: _get_value(
|
||||
config.co2_automatic_baseline_calibration_days, ABC_DAYS
|
||||
),
|
||||
set_value_fn=lambda client,
|
||||
value: client.set_co2_automatic_baseline_calibration(int(value)),
|
||||
set_value_fn=lambda client, value: (
|
||||
client.set_co2_automatic_baseline_calibration(int(value))
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -85,7 +85,7 @@ class AirobotButton(AirobotEntity, ButtonEntity):
|
||||
"""Handle the button press."""
|
||||
try:
|
||||
await self.entity_description.press_fn(self.coordinator)
|
||||
except (AirobotConnectionError, AirobotTimeoutError):
|
||||
except AirobotConnectionError, AirobotTimeoutError:
|
||||
# Connection errors during reboot are expected as device restarts
|
||||
pass
|
||||
except AirobotError as err:
|
||||
|
||||
@@ -114,7 +114,7 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
AirOSDeviceConnectionError,
|
||||
):
|
||||
self.errors["base"] = "cannot_connect"
|
||||
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
|
||||
except AirOSConnectionAuthenticationError, AirOSDataMissingError:
|
||||
self.errors["base"] = "invalid_auth"
|
||||
except AirOSKeyDataMissingError:
|
||||
self.errors["base"] = "key_data_missing"
|
||||
|
||||
@@ -130,7 +130,7 @@ class AirVisualFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
await coro
|
||||
except (InvalidKeyError, KeyExpiredError, UnauthorizedError):
|
||||
except InvalidKeyError, KeyExpiredError, UnauthorizedError:
|
||||
errors[CONF_API_KEY] = "invalid_api_key"
|
||||
except NotFoundError:
|
||||
errors[CONF_CITY] = "location_not_found"
|
||||
|
||||
@@ -100,7 +100,7 @@ class AirZoneCloudConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
await self.airzone.login()
|
||||
except (AirzoneCloudError, LoginError):
|
||||
except AirzoneCloudError, LoginError:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
return await self.async_step_inst_pick()
|
||||
|
||||
@@ -18,12 +18,15 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
CONF_DEVICE_BAUD,
|
||||
CONF_DEVICE_PATH,
|
||||
DOMAIN,
|
||||
PROTOCOL_SERIAL,
|
||||
PROTOCOL_SOCKET,
|
||||
SIGNAL_PANEL_MESSAGE,
|
||||
@@ -32,9 +35,11 @@ from .const import (
|
||||
SIGNAL_ZONE_FAULT,
|
||||
SIGNAL_ZONE_RESTORE,
|
||||
)
|
||||
from .services import async_setup_services
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
PLATFORMS = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.BINARY_SENSOR,
|
||||
@@ -54,6 +59,12 @@ class AlarmDecoderData:
|
||||
restart: bool
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the component."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: AlarmDecoderConfigEntry
|
||||
) -> bool:
|
||||
|
||||
@@ -2,17 +2,13 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelEntity,
|
||||
AlarmControlPanelEntityFeature,
|
||||
AlarmControlPanelState,
|
||||
CodeFormat,
|
||||
)
|
||||
from homeassistant.const import ATTR_CODE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
@@ -27,11 +23,6 @@ from .const import (
|
||||
)
|
||||
from .entity import AlarmDecoderEntity
|
||||
|
||||
SERVICE_ALARM_TOGGLE_CHIME = "alarm_toggle_chime"
|
||||
|
||||
SERVICE_ALARM_KEYPRESS = "alarm_keypress"
|
||||
ATTR_KEYPRESS = "keypress"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -50,23 +41,6 @@ async def async_setup_entry(
|
||||
)
|
||||
async_add_entities([entity])
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_ALARM_TOGGLE_CHIME,
|
||||
{
|
||||
vol.Required(ATTR_CODE): cv.string,
|
||||
},
|
||||
"alarm_toggle_chime",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_ALARM_KEYPRESS,
|
||||
{
|
||||
vol.Required(ATTR_KEYPRESS): cv.string,
|
||||
},
|
||||
"alarm_keypress",
|
||||
)
|
||||
|
||||
|
||||
class AlarmDecoderAlarmPanel(AlarmDecoderEntity, AlarmControlPanelEntity):
|
||||
"""Representation of an AlarmDecoder-based alarm panel."""
|
||||
|
||||
46
homeassistant/components/alarmdecoder/services.py
Normal file
46
homeassistant/components/alarmdecoder/services.py
Normal file
@@ -0,0 +1,46 @@
|
||||
"""Support for AlarmDecoder-based alarm control panels (Honeywell/DSC)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
DOMAIN as ALARM_CONTROL_PANEL_DOMAIN,
|
||||
)
|
||||
from homeassistant.const import ATTR_CODE
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
SERVICE_ALARM_TOGGLE_CHIME = "alarm_toggle_chime"
|
||||
|
||||
SERVICE_ALARM_KEYPRESS = "alarm_keypress"
|
||||
ATTR_KEYPRESS = "keypress"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Home Assistant services."""
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_ALARM_TOGGLE_CHIME,
|
||||
entity_domain=ALARM_CONTROL_PANEL_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_CODE): cv.string,
|
||||
},
|
||||
func="alarm_toggle_chime",
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_ALARM_KEYPRESS,
|
||||
entity_domain=ALARM_CONTROL_PANEL_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_KEYPRESS): cv.string,
|
||||
},
|
||||
func="alarm_keypress",
|
||||
)
|
||||
@@ -123,7 +123,7 @@ class Auth:
|
||||
allow_redirects=True,
|
||||
)
|
||||
|
||||
except (TimeoutError, aiohttp.ClientError):
|
||||
except TimeoutError, aiohttp.ClientError:
|
||||
_LOGGER.error("Timeout calling LWA to get auth token")
|
||||
return None
|
||||
|
||||
|
||||
@@ -358,7 +358,7 @@ async def async_send_changereport_message(
|
||||
"""
|
||||
try:
|
||||
token = await config.async_get_access_token()
|
||||
except (RequireRelink, NoTokenAvailable):
|
||||
except RequireRelink, NoTokenAvailable:
|
||||
await config.set_authorized(False)
|
||||
_LOGGER.error(
|
||||
"Error when sending ChangeReport to Alexa, could not get access token"
|
||||
@@ -392,7 +392,7 @@ async def async_send_changereport_message(
|
||||
allow_redirects=True,
|
||||
)
|
||||
|
||||
except (TimeoutError, aiohttp.ClientError):
|
||||
except TimeoutError, aiohttp.ClientError:
|
||||
_LOGGER.error("Timeout sending report to Alexa for %s", alexa_entity.entity_id)
|
||||
return
|
||||
|
||||
@@ -549,7 +549,7 @@ async def async_send_doorbell_event_message(
|
||||
allow_redirects=True,
|
||||
)
|
||||
|
||||
except (TimeoutError, aiohttp.ClientError):
|
||||
except TimeoutError, aiohttp.ClientError:
|
||||
_LOGGER.error("Timeout sending report to Alexa for %s", alexa_entity.entity_id)
|
||||
return
|
||||
|
||||
|
||||
@@ -29,3 +29,24 @@ COUNTRY_DOMAINS = {
|
||||
|
||||
CATEGORY_SENSORS = "sensors"
|
||||
CATEGORY_NOTIFICATIONS = "notifications"
|
||||
|
||||
# Map service translation keys to Alexa API
|
||||
INFO_SKILLS_MAPPING = {
|
||||
"calendar_today": "Alexa.Calendar.PlayToday",
|
||||
"calendar_tomorrow": "Alexa.Calendar.PlayTomorrow",
|
||||
"calendar_next": "Alexa.Calendar.PlayNext",
|
||||
"date": "Alexa.Date.Play",
|
||||
"time": "Alexa.Time.Play",
|
||||
"national_news": "Alexa.News.NationalNews",
|
||||
"flash_briefing": "Alexa.FlashBriefing.Play",
|
||||
"traffic": "Alexa.Traffic.Play",
|
||||
"weather": "Alexa.Weather.Play",
|
||||
"cleanup": "Alexa.CleanUp.Play",
|
||||
"good_morning": "Alexa.GoodMorning.Play",
|
||||
"sing_song": "Alexa.SingASong.Play",
|
||||
"fun_fact": "Alexa.FunFact.Play",
|
||||
"tell_joke": "Alexa.Joke.Play",
|
||||
"tell_story": "Alexa.TellStory.Play",
|
||||
"im_home": "Alexa.ImHome.Play",
|
||||
"goodnight": "Alexa.GoodNight.Play",
|
||||
}
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
{
|
||||
"services": {
|
||||
"send_info_skill": {
|
||||
"service": "mdi:information"
|
||||
},
|
||||
"send_sound": {
|
||||
"service": "mdi:cast-audio"
|
||||
},
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==11.0.2"]
|
||||
"requirements": ["aioamazondevices==12.0.0"]
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ from homeassistant.helpers.typing import StateType
|
||||
from .const import CATEGORY_NOTIFICATIONS, CATEGORY_SENSORS
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import async_remove_unsupported_notification_sensors
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -105,6 +106,9 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
# Remove notification sensors from unsupported devices
|
||||
await async_remove_unsupported_notification_sensors(hass, coordinator)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
@@ -122,6 +126,7 @@ async def async_setup_entry(
|
||||
AmazonSensorEntity(coordinator, serial_num, notification_desc)
|
||||
for notification_desc in NOTIFICATIONS
|
||||
for serial_num in new_devices
|
||||
if coordinator.data[serial_num].notifications_supported
|
||||
]
|
||||
async_add_entities(sensors_list + notifications_list)
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Support for services."""
|
||||
|
||||
from aioamazondevices.const.metadata import ALEXA_INFO_SKILLS
|
||||
from aioamazondevices.const.sounds import SOUNDS_LIST
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -9,13 +10,15 @@ from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DOMAIN, INFO_SKILLS_MAPPING
|
||||
from .coordinator import AmazonConfigEntry
|
||||
|
||||
ATTR_TEXT_COMMAND = "text_command"
|
||||
ATTR_SOUND = "sound"
|
||||
ATTR_INFO_SKILL = "info_skill"
|
||||
SERVICE_TEXT_COMMAND = "send_text_command"
|
||||
SERVICE_SOUND_NOTIFICATION = "send_sound"
|
||||
SERVICE_INFO_SKILL = "send_info_skill"
|
||||
|
||||
SCHEMA_SOUND_SERVICE = vol.Schema(
|
||||
{
|
||||
@@ -29,6 +32,12 @@ SCHEMA_CUSTOM_COMMAND = vol.Schema(
|
||||
vol.Required(ATTR_DEVICE_ID): cv.string,
|
||||
}
|
||||
)
|
||||
SCHEMA_INFO_SKILL = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_INFO_SKILL): cv.string,
|
||||
vol.Required(ATTR_DEVICE_ID): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
@@ -86,6 +95,17 @@ async def _async_execute_action(call: ServiceCall, attribute: str) -> None:
|
||||
await coordinator.api.call_alexa_text_command(
|
||||
coordinator.data[device.serial_number], value
|
||||
)
|
||||
elif attribute == ATTR_INFO_SKILL:
|
||||
info_skill = INFO_SKILLS_MAPPING.get(value)
|
||||
if info_skill not in ALEXA_INFO_SKILLS:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_info_skill_value",
|
||||
translation_placeholders={"info_skill": value},
|
||||
)
|
||||
await coordinator.api.call_alexa_info_skill(
|
||||
coordinator.data[device.serial_number], value
|
||||
)
|
||||
|
||||
|
||||
async def async_send_sound_notification(call: ServiceCall) -> None:
|
||||
@@ -98,6 +118,11 @@ async def async_send_text_command(call: ServiceCall) -> None:
|
||||
await _async_execute_action(call, ATTR_TEXT_COMMAND)
|
||||
|
||||
|
||||
async def async_send_info_skill(call: ServiceCall) -> None:
|
||||
"""Send an info skill command to a AmazonDevice."""
|
||||
await _async_execute_action(call, ATTR_INFO_SKILL)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Amazon Devices integration."""
|
||||
@@ -112,5 +137,10 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
async_send_text_command,
|
||||
SCHEMA_CUSTOM_COMMAND,
|
||||
),
|
||||
(
|
||||
SERVICE_INFO_SKILL,
|
||||
async_send_info_skill,
|
||||
SCHEMA_INFO_SKILL,
|
||||
),
|
||||
):
|
||||
hass.services.async_register(DOMAIN, service_name, method, schema=schema)
|
||||
|
||||
@@ -67,3 +67,36 @@ send_sound:
|
||||
- squeaky_12
|
||||
- zap_01
|
||||
translation_key: sound
|
||||
|
||||
send_info_skill:
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: alexa_devices
|
||||
info_skill:
|
||||
required: true
|
||||
example: date
|
||||
default: date
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- calendar_today
|
||||
- calendar_tomorrow
|
||||
- calendar_next
|
||||
- date
|
||||
- time
|
||||
- national_news
|
||||
- flash_briefing
|
||||
- traffic
|
||||
- weather
|
||||
- cleanup
|
||||
- good_morning
|
||||
- sing_song
|
||||
- fun_fact
|
||||
- tell_joke
|
||||
- tell_story
|
||||
- im_home
|
||||
- goodnight
|
||||
translation_key: info_skill
|
||||
|
||||
@@ -90,6 +90,9 @@
|
||||
"cannot_retrieve_data_with_error": {
|
||||
"message": "Error retrieving data: {error}"
|
||||
},
|
||||
"config_entry_not_found": {
|
||||
"message": "Config entry not found: {device_id}"
|
||||
},
|
||||
"device_serial_number_missing": {
|
||||
"message": "Device serial number missing: {device_id}"
|
||||
},
|
||||
@@ -99,11 +102,35 @@
|
||||
"invalid_device_id": {
|
||||
"message": "Invalid device ID specified: {device_id}"
|
||||
},
|
||||
"invalid_info_skill_value": {
|
||||
"message": "Invalid info skill {info_skill} specified"
|
||||
},
|
||||
"invalid_sound_value": {
|
||||
"message": "Invalid sound {sound} specified"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"info_skill": {
|
||||
"options": {
|
||||
"calendar_next": "Calendar: Next event",
|
||||
"calendar_today": "Calendar: Today's Calendar",
|
||||
"calendar_tomorrow": "Calendar: Tomorrow's Calendar",
|
||||
"cleanup": "Encourage me to clean up",
|
||||
"date": "Date",
|
||||
"flash_briefing": "Flash Briefing",
|
||||
"fun_fact": "Tell me a fun fact",
|
||||
"good_morning": "Good morning",
|
||||
"goodnight": "Wish me a good night",
|
||||
"im_home": "Welcome me home",
|
||||
"national_news": "National News",
|
||||
"sing_song": "Sing a song",
|
||||
"tell_joke": "Tell me a joke",
|
||||
"tell_story": "Tell me a story",
|
||||
"time": "Time",
|
||||
"traffic": "Traffic",
|
||||
"weather": "Weather"
|
||||
}
|
||||
},
|
||||
"sound": {
|
||||
"options": {
|
||||
"air_horn_03": "Air horn",
|
||||
@@ -151,6 +178,20 @@
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"send_info_skill": {
|
||||
"description": "Sends an info skill command to a device",
|
||||
"fields": {
|
||||
"device_id": {
|
||||
"description": "[%key:component::alexa_devices::common::device_id_description%]",
|
||||
"name": "Device"
|
||||
},
|
||||
"info_skill": {
|
||||
"description": "The info skill command to send.",
|
||||
"name": "Alexa info skill command"
|
||||
}
|
||||
},
|
||||
"name": "Send info skill command"
|
||||
},
|
||||
"send_sound": {
|
||||
"description": "Sends a sound to a device",
|
||||
"fields": {
|
||||
|
||||
@@ -59,13 +59,15 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
# Replace unique id for "DND" switch and remove from Speaker Group
|
||||
await async_update_unique_id(
|
||||
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
|
||||
)
|
||||
# DND keys
|
||||
old_key = "do_not_disturb"
|
||||
new_key = "dnd"
|
||||
|
||||
# Remove DND switch from virtual groups
|
||||
await async_remove_dnd_from_virtual_group(hass, coordinator)
|
||||
# Remove old DND switch from virtual groups
|
||||
await async_remove_dnd_from_virtual_group(hass, coordinator, old_key)
|
||||
|
||||
# Replace unique id for DND switch
|
||||
await async_update_unique_id(hass, coordinator, SWITCH_DOMAIN, old_key, new_key)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
|
||||
@@ -5,8 +5,14 @@ from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.const.schedules import (
|
||||
NOTIFICATION_ALARM,
|
||||
NOTIFICATION_REMINDER,
|
||||
NOTIFICATION_TIMER,
|
||||
)
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -48,7 +54,7 @@ def alexa_api_call[_T: AmazonEntity, **_P](
|
||||
async def async_update_unique_id(
|
||||
hass: HomeAssistant,
|
||||
coordinator: AmazonDevicesCoordinator,
|
||||
domain: str,
|
||||
platform: str,
|
||||
old_key: str,
|
||||
new_key: str,
|
||||
) -> None:
|
||||
@@ -57,7 +63,9 @@ async def async_update_unique_id(
|
||||
|
||||
for serial_num in coordinator.data:
|
||||
unique_id = f"{serial_num}-{old_key}"
|
||||
if entity_id := entity_registry.async_get_entity_id(domain, DOMAIN, unique_id):
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
DOMAIN, platform, unique_id
|
||||
):
|
||||
_LOGGER.debug("Updating unique_id for %s", entity_id)
|
||||
new_unique_id = unique_id.replace(old_key, new_key)
|
||||
|
||||
@@ -68,12 +76,13 @@ async def async_update_unique_id(
|
||||
async def async_remove_dnd_from_virtual_group(
|
||||
hass: HomeAssistant,
|
||||
coordinator: AmazonDevicesCoordinator,
|
||||
key: str,
|
||||
) -> None:
|
||||
"""Remove entity DND from virtual group."""
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
for serial_num in coordinator.data:
|
||||
unique_id = f"{serial_num}-do_not_disturb"
|
||||
unique_id = f"{serial_num}-{key}"
|
||||
entity_id = entity_registry.async_get_entity_id(
|
||||
DOMAIN, SWITCH_DOMAIN, unique_id
|
||||
)
|
||||
@@ -81,3 +90,27 @@ async def async_remove_dnd_from_virtual_group(
|
||||
if entity_id and is_group:
|
||||
entity_registry.async_remove(entity_id)
|
||||
_LOGGER.debug("Removed DND switch from virtual group %s", entity_id)
|
||||
|
||||
|
||||
async def async_remove_unsupported_notification_sensors(
|
||||
hass: HomeAssistant,
|
||||
coordinator: AmazonDevicesCoordinator,
|
||||
) -> None:
|
||||
"""Remove notification sensors from unsupported devices."""
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
for serial_num in coordinator.data:
|
||||
for notification_key in (
|
||||
NOTIFICATION_ALARM,
|
||||
NOTIFICATION_REMINDER,
|
||||
NOTIFICATION_TIMER,
|
||||
):
|
||||
unique_id = f"{serial_num}-{notification_key}"
|
||||
entity_id = entity_registry.async_get_entity_id(
|
||||
DOMAIN, SENSOR_DOMAIN, unique_id=unique_id
|
||||
)
|
||||
is_unsupported = not coordinator.data[serial_num].notifications_supported
|
||||
|
||||
if entity_id and is_unsupported:
|
||||
entity_registry.async_remove(entity_id)
|
||||
_LOGGER.debug("Removed unsupported notification sensor %s", entity_id)
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from amberelectric.models.channel import ChannelType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_CONFIG_ENTRY_ID
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
@@ -13,6 +12,7 @@ from homeassistant.core import (
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import service
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
from homeassistant.util.json import JsonValueType
|
||||
|
||||
@@ -37,23 +37,6 @@ GET_FORECASTS_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def async_get_entry(hass: HomeAssistant, config_entry_id: str) -> AmberConfigEntry:
|
||||
"""Get the Amber config entry."""
|
||||
if not (entry := hass.config_entries.async_get_entry(config_entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": config_entry_id},
|
||||
)
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_loaded",
|
||||
translation_placeholders={"target": entry.title},
|
||||
)
|
||||
return entry
|
||||
|
||||
|
||||
def get_forecasts(channel_type: str, data: dict) -> list[JsonValueType]:
|
||||
"""Return an array of forecasts."""
|
||||
results: list[JsonValueType] = []
|
||||
@@ -109,7 +92,9 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
|
||||
async def handle_get_forecasts(call: ServiceCall) -> ServiceResponse:
|
||||
channel_type = call.data[ATTR_CHANNEL_TYPE]
|
||||
entry = async_get_entry(hass, call.data[ATTR_CONFIG_ENTRY_ID])
|
||||
entry: AmberConfigEntry = service.async_get_config_entry(
|
||||
hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY_ID]
|
||||
)
|
||||
coordinator = entry.runtime_data
|
||||
forecasts = get_forecasts(channel_type, coordinator.data)
|
||||
return {"forecasts": forecasts}
|
||||
|
||||
@@ -25,12 +25,6 @@
|
||||
"exceptions": {
|
||||
"channel_not_found": {
|
||||
"message": "There is no {channel_type} channel at this site."
|
||||
},
|
||||
"integration_not_found": {
|
||||
"message": "Config entry \"{target}\" not found in registry."
|
||||
},
|
||||
"not_loaded": {
|
||||
"message": "{target} is not loaded."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
|
||||
@@ -77,9 +77,11 @@ class AmbientNetworkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# Filter out indoor stations
|
||||
self._stations = dict(
|
||||
filter(
|
||||
lambda item: not item[1]
|
||||
.get(API_STATION_INFO, {})
|
||||
.get(API_STATION_INDOOR, False),
|
||||
lambda item: (
|
||||
not item[1]
|
||||
.get(API_STATION_INFO, {})
|
||||
.get(API_STATION_INDOOR, False)
|
||||
),
|
||||
self._stations.items(),
|
||||
)
|
||||
)
|
||||
@@ -113,7 +115,7 @@ class AmbientNetworkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id=CONF_USER, data_schema=schema, errors=errors if errors else {}
|
||||
step_id=CONF_USER, data_schema=schema, errors=errors or {}
|
||||
)
|
||||
|
||||
async def async_step_station(
|
||||
|
||||
@@ -31,7 +31,7 @@ class AmbientStationFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.data_schema,
|
||||
errors=errors if errors else {},
|
||||
errors=errors or {},
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
|
||||
@@ -26,21 +26,26 @@ from homeassistant.const import (
|
||||
UnitOfVolumetricFlux,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AmbientStation, AmbientStationConfigEntry
|
||||
from . import AmbientStationConfigEntry
|
||||
from .const import ATTR_LAST_DATA, TYPE_SOLARRADIATION, TYPE_SOLARRADIATION_LX
|
||||
from .entity import AmbientWeatherEntity
|
||||
|
||||
TYPE_24HOURRAININ = "24hourrainin"
|
||||
TYPE_AQI_PM10_24H_AQIN = "aqi_pm10_24h_aqin"
|
||||
TYPE_AQI_PM10_AQIN = "aqi_pm10_aqin"
|
||||
TYPE_AQI_PM25 = "aqi_pm25"
|
||||
TYPE_AQI_PM25_24H = "aqi_pm25_24h"
|
||||
TYPE_AQI_PM25_24H_AQIN = "aqi_pm25_24h_aqin"
|
||||
TYPE_AQI_PM25_AQIN = "aqi_pm25_aqin"
|
||||
TYPE_AQI_PM25_IN = "aqi_pm25_in"
|
||||
TYPE_AQI_PM25_IN_24H = "aqi_pm25_in_24h"
|
||||
TYPE_BAROMABSIN = "baromabsin"
|
||||
TYPE_BAROMRELIN = "baromrelin"
|
||||
TYPE_CO2 = "co2"
|
||||
TYPE_CO2_IN_24H_AQIN = "co2_in_24h_aqin"
|
||||
TYPE_CO2_IN_AQIN = "co2_in_aqin"
|
||||
TYPE_DAILYRAININ = "dailyrainin"
|
||||
TYPE_DEWPOINT = "dewPoint"
|
||||
TYPE_EVENTRAININ = "eventrainin"
|
||||
@@ -58,17 +63,23 @@ TYPE_HUMIDITY7 = "humidity7"
|
||||
TYPE_HUMIDITY8 = "humidity8"
|
||||
TYPE_HUMIDITY9 = "humidity9"
|
||||
TYPE_HUMIDITYIN = "humidityin"
|
||||
TYPE_LASTLIGHTNING = "lightning_time"
|
||||
TYPE_LASTLIGHTNING_DISTANCE = "lightning_distance"
|
||||
TYPE_LASTRAIN = "lastRain"
|
||||
TYPE_LIGHTNING_PER_DAY = "lightning_day"
|
||||
TYPE_LIGHTNING_PER_HOUR = "lightning_hour"
|
||||
TYPE_LASTLIGHTNING_DISTANCE = "lightning_distance"
|
||||
TYPE_LASTLIGHTNING = "lightning_time"
|
||||
TYPE_MAXDAILYGUST = "maxdailygust"
|
||||
TYPE_MONTHLYRAININ = "monthlyrainin"
|
||||
TYPE_PM_IN_HUMIDITY_AQIN = "pm_in_humidity_aqin"
|
||||
TYPE_PM_IN_TEMP_AQIN = "pm_in_temp_aqin"
|
||||
TYPE_PM10_IN_24H_AQIN = "pm10_in_24h_aqin"
|
||||
TYPE_PM10_IN_AQIN = "pm10_in_aqin"
|
||||
TYPE_PM25 = "pm25"
|
||||
TYPE_PM25_24H = "pm25_24h"
|
||||
TYPE_PM25_IN = "pm25_in"
|
||||
TYPE_PM25_IN_24H = "pm25_in_24h"
|
||||
TYPE_PM25_IN_24H_AQIN = "pm25_in_24h_aqin"
|
||||
TYPE_PM25_IN_AQIN = "pm25_in_aqin"
|
||||
TYPE_SOILHUM1 = "soilhum1"
|
||||
TYPE_SOILHUM10 = "soilhum10"
|
||||
TYPE_SOILHUM2 = "soilhum2"
|
||||
@@ -79,8 +90,8 @@ TYPE_SOILHUM6 = "soilhum6"
|
||||
TYPE_SOILHUM7 = "soilhum7"
|
||||
TYPE_SOILHUM8 = "soilhum8"
|
||||
TYPE_SOILHUM9 = "soilhum9"
|
||||
TYPE_SOILTEMP1F = "soiltemp1f"
|
||||
TYPE_SOILTEMP10F = "soiltemp10f"
|
||||
TYPE_SOILTEMP1F = "soiltemp1f"
|
||||
TYPE_SOILTEMP2F = "soiltemp2f"
|
||||
TYPE_SOILTEMP3F = "soiltemp3f"
|
||||
TYPE_SOILTEMP4F = "soiltemp4f"
|
||||
@@ -144,6 +155,86 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="pm25_indoor_aqi_24h_average",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_PM25_IN_AQIN,
|
||||
translation_key="pm25_indoor_aqin",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_PM25_IN_24H_AQIN,
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
translation_key="pm25_indoor_24h_aqin",
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_PM10_IN_AQIN,
|
||||
translation_key="pm10_indoor_aqin",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM10,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_PM10_IN_24H_AQIN,
|
||||
translation_key="pm10_indoor_24h_aqin",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM10,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_CO2_IN_AQIN,
|
||||
translation_key="co2_indoor_aqin",
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_CO2_IN_24H_AQIN,
|
||||
translation_key="co2_indoor_24h_aqin",
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_PM_IN_TEMP_AQIN,
|
||||
translation_key="pm_indoor_temp_aqin",
|
||||
native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_PM_IN_HUMIDITY_AQIN,
|
||||
translation_key="pm_indoor_humidity_aqin",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_AQI_PM25_AQIN,
|
||||
translation_key="pm25_aqi_aqin",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_AQI_PM25_24H_AQIN,
|
||||
translation_key="pm25_aqi_24h_aqin",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_AQI_PM10_AQIN,
|
||||
translation_key="pm10_aqi_aqin",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_AQI_PM10_24H_AQIN,
|
||||
translation_key="pm10_aqi_24h_aqin",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_BAROMABSIN,
|
||||
translation_key="absolute_pressure",
|
||||
@@ -683,22 +774,6 @@ async def async_setup_entry(
|
||||
class AmbientWeatherSensor(AmbientWeatherEntity, SensorEntity):
|
||||
"""Define an Ambient sensor."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ambient: AmbientStation,
|
||||
mac_address: str,
|
||||
station_name: str,
|
||||
description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(ambient, mac_address, station_name, description)
|
||||
|
||||
if description.key == TYPE_SOLARRADIATION_LX:
|
||||
# Since TYPE_SOLARRADIATION and TYPE_SOLARRADIATION_LX will have the same
|
||||
# name in the UI, we influence the entity ID of TYPE_SOLARRADIATION_LX here
|
||||
# to differentiate them:
|
||||
self.entity_id = f"sensor.{station_name}_solar_rad_lx"
|
||||
|
||||
@callback
|
||||
def update_from_latest_data(self) -> None:
|
||||
"""Fetch new state data for the sensor."""
|
||||
|
||||
@@ -156,6 +156,12 @@
|
||||
"absolute_pressure": {
|
||||
"name": "Absolute pressure"
|
||||
},
|
||||
"co2_indoor_24h_aqin": {
|
||||
"name": "CO2 Indoor 24h Average AQIN"
|
||||
},
|
||||
"co2_indoor_aqin": {
|
||||
"name": "CO2 Indoor AQIN"
|
||||
},
|
||||
"daily_rain": {
|
||||
"name": "Daily rain"
|
||||
},
|
||||
@@ -228,18 +234,39 @@
|
||||
"monthly_rain": {
|
||||
"name": "Monthly rain"
|
||||
},
|
||||
"pm10_aqi_24h_aqin": {
|
||||
"name": "PM10 Indoor AQI 24h Average AQIN"
|
||||
},
|
||||
"pm10_aqi_aqin": {
|
||||
"name": "PM10 Indoor AQI AQIN"
|
||||
},
|
||||
"pm10_indoor_24h_aqin": {
|
||||
"name": "PM10 Indoor 24h Average AQIN"
|
||||
},
|
||||
"pm10_indoor_aqin": {
|
||||
"name": "PM10 Indoor AQIN"
|
||||
},
|
||||
"pm25_24h_average": {
|
||||
"name": "PM2.5 24 hour average"
|
||||
},
|
||||
"pm25_aqi": {
|
||||
"name": "PM2.5 AQI"
|
||||
},
|
||||
"pm25_aqi_24h_aqin": {
|
||||
"name": "PM2.5 Indoor AQI 24h Average AQIN"
|
||||
},
|
||||
"pm25_aqi_24h_average": {
|
||||
"name": "PM2.5 AQI 24 hour average"
|
||||
},
|
||||
"pm25_aqi_aqin": {
|
||||
"name": "PM2.5 Indoor AQI AQIN"
|
||||
},
|
||||
"pm25_indoor": {
|
||||
"name": "PM2.5 indoor"
|
||||
},
|
||||
"pm25_indoor_24h_aqin": {
|
||||
"name": "PM2.5 Indoor 24h AQIN"
|
||||
},
|
||||
"pm25_indoor_24h_average": {
|
||||
"name": "PM2.5 indoor 24 hour average"
|
||||
},
|
||||
@@ -249,6 +276,15 @@
|
||||
"pm25_indoor_aqi_24h_average": {
|
||||
"name": "PM2.5 indoor AQI"
|
||||
},
|
||||
"pm25_indoor_aqin": {
|
||||
"name": "PM2.5 Indoor AQIN"
|
||||
},
|
||||
"pm_indoor_humidity_aqin": {
|
||||
"name": "Indoor Humidity AQIN"
|
||||
},
|
||||
"pm_indoor_temp_aqin": {
|
||||
"name": "Indoor Temperature AQIN"
|
||||
},
|
||||
"relative_pressure": {
|
||||
"name": "Relative pressure"
|
||||
},
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncIterator, Callable
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
from contextlib import asynccontextmanager, suppress
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
@@ -202,7 +202,7 @@ class AmcrestChecker(ApiWrapper):
|
||||
@asynccontextmanager
|
||||
async def async_stream_command(
|
||||
self, *args: Any, **kwargs: Any
|
||||
) -> AsyncIterator[httpx.Response]:
|
||||
) -> AsyncGenerator[httpx.Response]:
|
||||
"""amcrest.ApiWrapper.command wrapper to catch errors."""
|
||||
async with (
|
||||
self._async_command_wrapper(),
|
||||
@@ -211,7 +211,7 @@ class AmcrestChecker(ApiWrapper):
|
||||
yield ret
|
||||
|
||||
@asynccontextmanager
|
||||
async def _async_command_wrapper(self) -> AsyncIterator[None]:
|
||||
async def _async_command_wrapper(self) -> AsyncGenerator[None]:
|
||||
try:
|
||||
yield
|
||||
except LoginError as ex:
|
||||
|
||||
@@ -73,31 +73,21 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
started = False
|
||||
|
||||
async def _async_handle_labs_update(
|
||||
event: Event[labs.EventLabsUpdatedData],
|
||||
event_data: labs.EventLabsUpdatedData,
|
||||
) -> None:
|
||||
"""Handle labs feature toggle."""
|
||||
await analytics.save_preferences({ATTR_SNAPSHOTS: event.data["enabled"]})
|
||||
await analytics.save_preferences({ATTR_SNAPSHOTS: event_data["enabled"]})
|
||||
if started:
|
||||
await analytics.async_schedule()
|
||||
|
||||
@callback
|
||||
def _async_labs_event_filter(event_data: labs.EventLabsUpdatedData) -> bool:
|
||||
"""Filter labs events for this integration's snapshot feature."""
|
||||
return (
|
||||
event_data["domain"] == DOMAIN
|
||||
and event_data["preview_feature"] == LABS_SNAPSHOT_FEATURE
|
||||
)
|
||||
|
||||
async def start_schedule(_event: Event) -> None:
|
||||
"""Start the send schedule after the started event."""
|
||||
nonlocal started
|
||||
started = True
|
||||
await analytics.async_schedule()
|
||||
|
||||
hass.bus.async_listen(
|
||||
labs.EVENT_LABS_UPDATED,
|
||||
_async_handle_labs_update,
|
||||
event_filter=_async_labs_event_filter,
|
||||
labs.async_subscribe_preview_feature(
|
||||
hass, DOMAIN, LABS_SNAPSHOT_FEATURE, _async_handle_labs_update
|
||||
)
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, start_schedule)
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
"preview_features": {
|
||||
"snapshots": {
|
||||
"feedback_url": "https://forms.gle/GqvRmgmghSDco8M46",
|
||||
"learn_more_url": "https://www.home-assistant.io/blog/2026/02/02/about-device-database/",
|
||||
"report_issue_url": "https://github.com/OHF-Device-Database/device-database/issues/new"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"preview_features": {
|
||||
"snapshots": {
|
||||
"description": "This free, open source device database of the Open Home Foundation helps users find useful information about smart home devices used in real installations.\n\nYou can help build it by anonymously sharing data about your devices. Only device-specific details (like model or manufacturer) are shared — never personally identifying information (like the names you assign).\n\nLearn more about the device database and how we process your data in our [Data Use Statement](https://www.openhomefoundation.org/device-database-data-use-statement), which you accept by opting in.",
|
||||
"description": "We're creating the [Open Home Foundation Device Database](https://www.home-assistant.io/blog/2026/02/02/about-device-database/): a free, open source community-powered resource to help users find practical information about how smart home devices perform in real installations.\n\nYou can help us build it by opting in to share anonymized data about your devices. This data will only ever include device-specific details (like model or manufacturer) – never personally identifying information (like the names you assign).\n\nFind out how we process your data (should you choose to contribute) in our [Data Use Statement](https://www.openhomefoundation.org/device-database-data-use-statement).",
|
||||
"disable_confirmation": "Your data will no longer be shared with the Open Home Foundation's device database.",
|
||||
"enable_confirmation": "This feature is still in development and may change. The device database is being refined based on user feedback and is not yet complete.",
|
||||
"name": "Device database"
|
||||
|
||||
@@ -93,7 +93,7 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: self.host})
|
||||
try:
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
except CannotConnect, ConnectionClosed:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
user_input = {}
|
||||
@@ -135,7 +135,7 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# Attempt to pair again.
|
||||
try:
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
except CannotConnect, ConnectionClosed:
|
||||
# Device doesn't respond to the specified host. Abort.
|
||||
# If we are in the user flow we could go back to the user step to allow
|
||||
# them to enter a new IP address but we cannot do that for the zeroconf
|
||||
@@ -203,7 +203,7 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
try:
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
except CannotConnect, ConnectionClosed:
|
||||
# Device became network unreachable after discovery.
|
||||
# Abort and let discovery find it again later.
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
@@ -229,7 +229,7 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
try:
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
except CannotConnect, ConnectionClosed:
|
||||
# Device is network unreachable. Abort.
|
||||
errors["base"] = "cannot_connect"
|
||||
return self.async_show_form(
|
||||
@@ -264,7 +264,7 @@ class AndroidTVRemoteOptionsFlowHandler(OptionsFlowWithReload):
|
||||
@callback
|
||||
def _save_config(self, data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Save the updated options."""
|
||||
new_data = {k: v for k, v in data.items() if k not in [CONF_APPS]}
|
||||
new_data = {k: v for k, v in data.items() if k != CONF_APPS}
|
||||
if self._apps:
|
||||
new_data[CONF_APPS] = self._apps
|
||||
|
||||
|
||||
@@ -73,7 +73,7 @@ async def validate_account(auth: MSOB2CAuth, account_number: str) -> str | MSOB2
|
||||
_aw = AnglianWater(authenticator=auth)
|
||||
try:
|
||||
await _aw.validate_smart_meter(account_number)
|
||||
except (InvalidAccountIdError, SmartMeterUnavailableError):
|
||||
except InvalidAccountIdError, SmartMeterUnavailableError:
|
||||
return "smart_meter_unavailable"
|
||||
return auth
|
||||
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
|
||||
import anthropic
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||
@@ -14,10 +12,19 @@ from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DEFAULT_CONVERSATION_NAME, DOMAIN, LOGGER
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
DATA_REPAIR_DEFER_RELOAD,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DEPRECATED_MODELS,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
)
|
||||
|
||||
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@@ -27,14 +34,15 @@ type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Anthropic."""
|
||||
hass.data.setdefault(DOMAIN, {}).setdefault(DATA_REPAIR_DEFER_RELOAD, set())
|
||||
await async_migrate_integration(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool:
|
||||
"""Set up Anthropic from a config entry."""
|
||||
client = await hass.async_add_executor_job(
|
||||
partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY])
|
||||
client = anthropic.AsyncAnthropic(
|
||||
api_key=entry.data[CONF_API_KEY], http_client=get_async_client(hass)
|
||||
)
|
||||
try:
|
||||
await client.models.list(timeout=10.0)
|
||||
@@ -50,6 +58,22 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(async_update_options))
|
||||
|
||||
for subentry in entry.subentries.values():
|
||||
if (model := subentry.data.get(CONF_CHAT_MODEL)) and model.startswith(
|
||||
tuple(DEPRECATED_MODELS)
|
||||
):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"model_deprecated",
|
||||
is_fixable=True,
|
||||
is_persistent=False,
|
||||
learn_more_url="https://platform.claude.com/docs/en/about-claude/model-deprecations",
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="model_deprecated",
|
||||
)
|
||||
break
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -62,6 +86,11 @@ async def async_update_options(
|
||||
hass: HomeAssistant, entry: AnthropicConfigEntry
|
||||
) -> None:
|
||||
"""Update options."""
|
||||
defer_reload_entries: set[str] = hass.data.setdefault(DOMAIN, {}).setdefault(
|
||||
DATA_REPAIR_DEFER_RELOAD, set()
|
||||
)
|
||||
if entry.entry_id in defer_reload_entries:
|
||||
return
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
@@ -30,12 +29,14 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.selector import (
|
||||
NumberSelector,
|
||||
NumberSelectorConfig,
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
TemplateSelector,
|
||||
)
|
||||
from homeassistant.helpers.typing import VolDictType
|
||||
@@ -47,6 +48,7 @@ from .const import (
|
||||
CONF_RECOMMENDED,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
CONF_THINKING_EFFORT,
|
||||
CONF_WEB_SEARCH,
|
||||
CONF_WEB_SEARCH_CITY,
|
||||
CONF_WEB_SEARCH_COUNTRY,
|
||||
@@ -58,6 +60,7 @@ from .const import (
|
||||
DEFAULT_AI_TASK_NAME,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DOMAIN,
|
||||
NON_ADAPTIVE_THINKING_MODELS,
|
||||
NON_THINKING_MODELS,
|
||||
WEB_SEARCH_UNSUPPORTED_MODELS,
|
||||
)
|
||||
@@ -86,12 +89,47 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
|
||||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
client = await hass.async_add_executor_job(
|
||||
partial(anthropic.AsyncAnthropic, api_key=data[CONF_API_KEY])
|
||||
client = anthropic.AsyncAnthropic(
|
||||
api_key=data[CONF_API_KEY], http_client=get_async_client(hass)
|
||||
)
|
||||
await client.models.list(timeout=10.0)
|
||||
|
||||
|
||||
async def get_model_list(client: anthropic.AsyncAnthropic) -> list[SelectOptionDict]:
|
||||
"""Get list of available models."""
|
||||
try:
|
||||
models = (await client.models.list()).data
|
||||
except anthropic.AnthropicError:
|
||||
models = []
|
||||
_LOGGER.debug("Available models: %s", models)
|
||||
model_options: list[SelectOptionDict] = []
|
||||
short_form = re.compile(r"[^\d]-\d$")
|
||||
for model_info in models:
|
||||
# Resolve alias from versioned model name:
|
||||
model_alias = (
|
||||
model_info.id[:-9]
|
||||
if model_info.id
|
||||
not in (
|
||||
"claude-3-haiku-20240307",
|
||||
"claude-3-5-haiku-20241022",
|
||||
"claude-3-opus-20240229",
|
||||
)
|
||||
and model_info.id[-2:-1] != "-"
|
||||
else model_info.id
|
||||
)
|
||||
if short_form.search(model_alias):
|
||||
model_alias += "-0"
|
||||
if model_alias.endswith(("haiku", "opus", "sonnet")):
|
||||
model_alias += "-latest"
|
||||
model_options.append(
|
||||
SelectOptionDict(
|
||||
label=model_info.display_name,
|
||||
value=model_alias,
|
||||
)
|
||||
)
|
||||
return model_options
|
||||
|
||||
|
||||
class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Anthropic."""
|
||||
|
||||
@@ -320,7 +358,9 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
|
||||
model = self.options[CONF_CHAT_MODEL]
|
||||
|
||||
if not model.startswith(tuple(NON_THINKING_MODELS)):
|
||||
if not model.startswith(tuple(NON_THINKING_MODELS)) and model.startswith(
|
||||
tuple(NON_ADAPTIVE_THINKING_MODELS)
|
||||
):
|
||||
step_schema[
|
||||
vol.Optional(
|
||||
CONF_THINKING_BUDGET, default=DEFAULT[CONF_THINKING_BUDGET]
|
||||
@@ -337,6 +377,22 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
else:
|
||||
self.options.pop(CONF_THINKING_BUDGET, None)
|
||||
|
||||
if not model.startswith(tuple(NON_ADAPTIVE_THINKING_MODELS)):
|
||||
step_schema[
|
||||
vol.Optional(
|
||||
CONF_THINKING_EFFORT,
|
||||
default=DEFAULT[CONF_THINKING_EFFORT],
|
||||
)
|
||||
] = SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=["none", "low", "medium", "high", "max"],
|
||||
translation_key=CONF_THINKING_EFFORT,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
)
|
||||
else:
|
||||
self.options.pop(CONF_THINKING_EFFORT, None)
|
||||
|
||||
if not model.startswith(tuple(WEB_SEARCH_UNSUPPORTED_MODELS)):
|
||||
step_schema.update(
|
||||
{
|
||||
@@ -401,49 +457,20 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
|
||||
async def _get_model_list(self) -> list[SelectOptionDict]:
|
||||
"""Get list of available models."""
|
||||
try:
|
||||
client = await self.hass.async_add_executor_job(
|
||||
partial(
|
||||
anthropic.AsyncAnthropic,
|
||||
api_key=self._get_entry().data[CONF_API_KEY],
|
||||
)
|
||||
)
|
||||
models = (await client.models.list()).data
|
||||
except anthropic.AnthropicError:
|
||||
models = []
|
||||
_LOGGER.debug("Available models: %s", models)
|
||||
model_options: list[SelectOptionDict] = []
|
||||
short_form = re.compile(r"[^\d]-\d$")
|
||||
for model_info in models:
|
||||
# Resolve alias from versioned model name:
|
||||
model_alias = (
|
||||
model_info.id[:-9]
|
||||
if model_info.id
|
||||
not in ("claude-3-haiku-20240307", "claude-3-opus-20240229")
|
||||
else model_info.id
|
||||
)
|
||||
if short_form.search(model_alias):
|
||||
model_alias += "-0"
|
||||
if model_alias.endswith(("haiku", "opus", "sonnet")):
|
||||
model_alias += "-latest"
|
||||
model_options.append(
|
||||
SelectOptionDict(
|
||||
label=model_info.display_name,
|
||||
value=model_alias,
|
||||
)
|
||||
)
|
||||
return model_options
|
||||
client = anthropic.AsyncAnthropic(
|
||||
api_key=self._get_entry().data[CONF_API_KEY],
|
||||
http_client=get_async_client(self.hass),
|
||||
)
|
||||
return await get_model_list(client)
|
||||
|
||||
async def _get_location_data(self) -> dict[str, str]:
|
||||
"""Get approximate location data of the user."""
|
||||
location_data: dict[str, str] = {}
|
||||
zone_home = self.hass.states.get(ENTITY_ID_HOME)
|
||||
if zone_home is not None:
|
||||
client = await self.hass.async_add_executor_job(
|
||||
partial(
|
||||
anthropic.AsyncAnthropic,
|
||||
api_key=self._get_entry().data[CONF_API_KEY],
|
||||
)
|
||||
client = anthropic.AsyncAnthropic(
|
||||
api_key=self._get_entry().data[CONF_API_KEY],
|
||||
http_client=get_async_client(self.hass),
|
||||
)
|
||||
location_schema = vol.Schema(
|
||||
{
|
||||
@@ -464,22 +491,24 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"role": "user",
|
||||
"content": "Where are the following coordinates located: "
|
||||
f"({zone_home.attributes[ATTR_LATITUDE]},"
|
||||
f" {zone_home.attributes[ATTR_LONGITUDE]})? Please respond "
|
||||
"only with a JSON object using the following schema:\n"
|
||||
f"{convert(location_schema)}",
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "{", # hints the model to skip any preamble
|
||||
},
|
||||
f" {zone_home.attributes[ATTR_LONGITUDE]})?",
|
||||
}
|
||||
],
|
||||
max_tokens=cast(int, DEFAULT[CONF_MAX_TOKENS]),
|
||||
output_config={
|
||||
"format": {
|
||||
"type": "json_schema",
|
||||
"schema": {
|
||||
**convert(location_schema),
|
||||
"additionalProperties": False,
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
_LOGGER.debug("Model response: %s", response.content)
|
||||
location_data = location_schema(
|
||||
json.loads(
|
||||
"{"
|
||||
+ "".join(
|
||||
"".join(
|
||||
block.text
|
||||
for block in response.content
|
||||
if isinstance(block, anthropic.types.TextBlock)
|
||||
|
||||
@@ -14,6 +14,7 @@ CONF_CHAT_MODEL = "chat_model"
|
||||
CONF_MAX_TOKENS = "max_tokens"
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
CONF_THINKING_BUDGET = "thinking_budget"
|
||||
CONF_THINKING_EFFORT = "thinking_effort"
|
||||
CONF_WEB_SEARCH = "web_search"
|
||||
CONF_WEB_SEARCH_USER_LOCATION = "user_location"
|
||||
CONF_WEB_SEARCH_MAX_USES = "web_search_max_uses"
|
||||
@@ -22,11 +23,14 @@ CONF_WEB_SEARCH_REGION = "region"
|
||||
CONF_WEB_SEARCH_COUNTRY = "country"
|
||||
CONF_WEB_SEARCH_TIMEZONE = "timezone"
|
||||
|
||||
DATA_REPAIR_DEFER_RELOAD = "repair_defer_reload"
|
||||
|
||||
DEFAULT = {
|
||||
CONF_CHAT_MODEL: "claude-3-5-haiku-latest",
|
||||
CONF_CHAT_MODEL: "claude-haiku-4-5",
|
||||
CONF_MAX_TOKENS: 3000,
|
||||
CONF_TEMPERATURE: 1.0,
|
||||
CONF_THINKING_BUDGET: 0,
|
||||
CONF_THINKING_EFFORT: "low",
|
||||
CONF_WEB_SEARCH: False,
|
||||
CONF_WEB_SEARCH_USER_LOCATION: False,
|
||||
CONF_WEB_SEARCH_MAX_USES: 5,
|
||||
@@ -40,9 +44,37 @@ NON_THINKING_MODELS = [
|
||||
"claude-3-haiku",
|
||||
]
|
||||
|
||||
NON_ADAPTIVE_THINKING_MODELS = [
|
||||
"claude-opus-4-5",
|
||||
"claude-sonnet-4-5",
|
||||
"claude-haiku-4-5",
|
||||
"claude-opus-4-1",
|
||||
"claude-opus-4-0",
|
||||
"claude-opus-4-20250514",
|
||||
"claude-sonnet-4-0",
|
||||
"claude-sonnet-4-20250514",
|
||||
"claude-3",
|
||||
]
|
||||
|
||||
UNSUPPORTED_STRUCTURED_OUTPUT_MODELS = [
|
||||
"claude-opus-4-1",
|
||||
"claude-opus-4-0",
|
||||
"claude-opus-4-20250514",
|
||||
"claude-sonnet-4-0",
|
||||
"claude-sonnet-4-20250514",
|
||||
"claude-3",
|
||||
]
|
||||
|
||||
WEB_SEARCH_UNSUPPORTED_MODELS = [
|
||||
"claude-3-haiku",
|
||||
"claude-3-opus",
|
||||
"claude-3-5-sonnet-20240620",
|
||||
"claude-3-5-sonnet-20241022",
|
||||
]
|
||||
|
||||
DEPRECATED_MODELS = [
|
||||
"claude-3-5-haiku",
|
||||
"claude-3-7-sonnet",
|
||||
"claude-3-5-sonnet",
|
||||
"claude-3-opus",
|
||||
]
|
||||
|
||||
@@ -20,9 +20,11 @@ from anthropic.types import (
|
||||
DocumentBlockParam,
|
||||
ImageBlockParam,
|
||||
InputJSONDelta,
|
||||
JSONOutputFormatParam,
|
||||
MessageDeltaUsage,
|
||||
MessageParam,
|
||||
MessageStreamEvent,
|
||||
OutputConfigParam,
|
||||
RawContentBlockDeltaEvent,
|
||||
RawContentBlockStartEvent,
|
||||
RawContentBlockStopEvent,
|
||||
@@ -41,6 +43,7 @@ from anthropic.types import (
|
||||
TextDelta,
|
||||
ThinkingBlock,
|
||||
ThinkingBlockParam,
|
||||
ThinkingConfigAdaptiveParam,
|
||||
ThinkingConfigDisabledParam,
|
||||
ThinkingConfigEnabledParam,
|
||||
ThinkingDelta,
|
||||
@@ -78,6 +81,7 @@ from .const import (
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
CONF_THINKING_EFFORT,
|
||||
CONF_WEB_SEARCH,
|
||||
CONF_WEB_SEARCH_CITY,
|
||||
CONF_WEB_SEARCH_COUNTRY,
|
||||
@@ -89,7 +93,9 @@ from .const import (
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
MIN_THINKING_BUDGET,
|
||||
NON_ADAPTIVE_THINKING_MODELS,
|
||||
NON_THINKING_MODELS,
|
||||
UNSUPPORTED_STRUCTURED_OUTPUT_MODELS,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
@@ -622,21 +628,34 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
stream=True,
|
||||
)
|
||||
|
||||
thinking_budget = options.get(
|
||||
CONF_THINKING_BUDGET, DEFAULT[CONF_THINKING_BUDGET]
|
||||
)
|
||||
if (
|
||||
not model.startswith(tuple(NON_THINKING_MODELS))
|
||||
and thinking_budget >= MIN_THINKING_BUDGET
|
||||
):
|
||||
model_args["thinking"] = ThinkingConfigEnabledParam(
|
||||
type="enabled", budget_tokens=thinking_budget
|
||||
if not model.startswith(tuple(NON_ADAPTIVE_THINKING_MODELS)):
|
||||
thinking_effort = options.get(
|
||||
CONF_THINKING_EFFORT, DEFAULT[CONF_THINKING_EFFORT]
|
||||
)
|
||||
if thinking_effort != "none":
|
||||
model_args["thinking"] = ThinkingConfigAdaptiveParam(type="adaptive")
|
||||
model_args["output_config"] = OutputConfigParam(effort=thinking_effort)
|
||||
else:
|
||||
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||
model_args["temperature"] = options.get(
|
||||
CONF_TEMPERATURE, DEFAULT[CONF_TEMPERATURE]
|
||||
)
|
||||
else:
|
||||
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||
model_args["temperature"] = options.get(
|
||||
CONF_TEMPERATURE, DEFAULT[CONF_TEMPERATURE]
|
||||
thinking_budget = options.get(
|
||||
CONF_THINKING_BUDGET, DEFAULT[CONF_THINKING_BUDGET]
|
||||
)
|
||||
if (
|
||||
not model.startswith(tuple(NON_THINKING_MODELS))
|
||||
and thinking_budget >= MIN_THINKING_BUDGET
|
||||
):
|
||||
model_args["thinking"] = ThinkingConfigEnabledParam(
|
||||
type="enabled", budget_tokens=thinking_budget
|
||||
)
|
||||
else:
|
||||
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||
model_args["temperature"] = options.get(
|
||||
CONF_TEMPERATURE, DEFAULT[CONF_TEMPERATURE]
|
||||
)
|
||||
|
||||
tools: list[ToolUnionParam] = []
|
||||
if chat_log.llm_api:
|
||||
@@ -680,8 +699,25 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
)
|
||||
|
||||
if structure and structure_name:
|
||||
structure_name = slugify(structure_name)
|
||||
if model_args["thinking"]["type"] == "disabled":
|
||||
if not model.startswith(tuple(UNSUPPORTED_STRUCTURED_OUTPUT_MODELS)):
|
||||
# Native structured output for those models who support it.
|
||||
structure_name = None
|
||||
model_args.setdefault("output_config", OutputConfigParam())[
|
||||
"format"
|
||||
] = JSONOutputFormatParam(
|
||||
type="json_schema",
|
||||
schema={
|
||||
**convert(
|
||||
structure,
|
||||
custom_serializer=chat_log.llm_api.custom_serializer
|
||||
if chat_log.llm_api
|
||||
else llm.selector_serializer,
|
||||
),
|
||||
"additionalProperties": False,
|
||||
},
|
||||
)
|
||||
elif model_args["thinking"]["type"] == "disabled":
|
||||
structure_name = slugify(structure_name)
|
||||
if not tools:
|
||||
# Simplest case: no tools and no extended thinking
|
||||
# Add a tool and force its use
|
||||
@@ -701,6 +737,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
# force tool use or disable text responses, so we add a hint to the
|
||||
# system prompt instead. With extended thinking, the model should be
|
||||
# smart enough to use the tool.
|
||||
structure_name = slugify(structure_name)
|
||||
model_args["tool_choice"] = ToolChoiceAutoParam(
|
||||
type="auto",
|
||||
)
|
||||
@@ -708,22 +745,24 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
model_args["system"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(
|
||||
type="text",
|
||||
text=f"Claude MUST use the '{structure_name}' tool to provide the final answer instead of plain text.",
|
||||
text=f"Claude MUST use the '{structure_name}' tool to provide "
|
||||
"the final answer instead of plain text.",
|
||||
)
|
||||
)
|
||||
|
||||
tools.append(
|
||||
ToolParam(
|
||||
name=structure_name,
|
||||
description="Use this tool to reply to the user",
|
||||
input_schema=convert(
|
||||
structure,
|
||||
custom_serializer=chat_log.llm_api.custom_serializer
|
||||
if chat_log.llm_api
|
||||
else llm.selector_serializer,
|
||||
),
|
||||
if structure_name:
|
||||
tools.append(
|
||||
ToolParam(
|
||||
name=structure_name,
|
||||
description="Use this tool to reply to the user",
|
||||
input_schema=convert(
|
||||
structure,
|
||||
custom_serializer=chat_log.llm_api.custom_serializer
|
||||
if chat_log.llm_api
|
||||
else llm.selector_serializer,
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
if tools:
|
||||
model_args["tools"] = tools
|
||||
@@ -744,7 +783,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
_transform_stream(
|
||||
chat_log,
|
||||
stream,
|
||||
output_tool=structure_name if structure else None,
|
||||
output_tool=structure_name or None,
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["anthropic==0.75.0"]
|
||||
"requirements": ["anthropic==0.78.0"]
|
||||
}
|
||||
|
||||
275
homeassistant/components/anthropic/repairs.py
Normal file
275
homeassistant/components/anthropic/repairs.py
Normal file
@@ -0,0 +1,275 @@
|
||||
"""Issue repair flow for Anthropic."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterator
|
||||
from typing import cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.repairs import RepairsFlow
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigSubentry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
||||
|
||||
from .config_flow import get_model_list
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
DATA_REPAIR_DEFER_RELOAD,
|
||||
DEFAULT,
|
||||
DEPRECATED_MODELS,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
|
||||
class ModelDeprecatedRepairFlow(RepairsFlow):
|
||||
"""Handler for an issue fixing flow."""
|
||||
|
||||
_subentry_iter: Iterator[tuple[str, str]] | None
|
||||
_current_entry_id: str | None
|
||||
_current_subentry_id: str | None
|
||||
_reload_pending: set[str]
|
||||
_pending_updates: dict[str, dict[str, str]]
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the flow."""
|
||||
super().__init__()
|
||||
self._subentry_iter = None
|
||||
self._current_entry_id = None
|
||||
self._current_subentry_id = None
|
||||
self._reload_pending = set()
|
||||
self._pending_updates = {}
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the first step of a fix flow."""
|
||||
previous_entry_id: str | None = None
|
||||
if user_input is not None:
|
||||
previous_entry_id = self._async_update_current_subentry(user_input)
|
||||
self._clear_current_target()
|
||||
|
||||
target = await self._async_next_target()
|
||||
next_entry_id = target[0].entry_id if target else None
|
||||
if previous_entry_id and previous_entry_id != next_entry_id:
|
||||
await self._async_apply_pending_updates(previous_entry_id)
|
||||
if target is None:
|
||||
await self._async_apply_all_pending_updates()
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
entry, subentry, model = target
|
||||
client = entry.runtime_data
|
||||
model_list = [
|
||||
model_option
|
||||
for model_option in await get_model_list(client)
|
||||
if not model_option["value"].startswith(tuple(DEPRECATED_MODELS))
|
||||
]
|
||||
|
||||
if "opus" in model:
|
||||
suggested_model = "claude-opus-4-5"
|
||||
elif "haiku" in model:
|
||||
suggested_model = "claude-haiku-4-5"
|
||||
elif "sonnet" in model:
|
||||
suggested_model = "claude-sonnet-4-5"
|
||||
else:
|
||||
suggested_model = cast(str, DEFAULT[CONF_CHAT_MODEL])
|
||||
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_CHAT_MODEL,
|
||||
default=suggested_model,
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(options=model_list, custom_value=True)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=schema,
|
||||
description_placeholders={
|
||||
"entry_name": entry.title,
|
||||
"model": model,
|
||||
"subentry_name": subentry.title,
|
||||
"subentry_type": self._format_subentry_type(subentry.subentry_type),
|
||||
},
|
||||
)
|
||||
|
||||
def _iter_deprecated_subentries(self) -> Iterator[tuple[str, str]]:
|
||||
"""Yield entry/subentry pairs that use deprecated models."""
|
||||
for entry in self.hass.config_entries.async_entries(DOMAIN):
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
continue
|
||||
for subentry in entry.subentries.values():
|
||||
model = subentry.data.get(CONF_CHAT_MODEL)
|
||||
if model and model.startswith(tuple(DEPRECATED_MODELS)):
|
||||
yield entry.entry_id, subentry.subentry_id
|
||||
|
||||
async def _async_next_target(
|
||||
self,
|
||||
) -> tuple[ConfigEntry, ConfigSubentry, str] | None:
|
||||
"""Return the next deprecated subentry target."""
|
||||
if self._subentry_iter is None:
|
||||
self._subentry_iter = self._iter_deprecated_subentries()
|
||||
|
||||
while True:
|
||||
try:
|
||||
entry_id, subentry_id = next(self._subentry_iter)
|
||||
except StopIteration:
|
||||
return None
|
||||
|
||||
entry = self.hass.config_entries.async_get_entry(entry_id)
|
||||
if entry is None:
|
||||
continue
|
||||
|
||||
subentry = entry.subentries.get(subentry_id)
|
||||
if subentry is None:
|
||||
continue
|
||||
|
||||
model = self._pending_model(entry_id, subentry_id)
|
||||
if model is None:
|
||||
model = subentry.data.get(CONF_CHAT_MODEL)
|
||||
if not model or not model.startswith(tuple(DEPRECATED_MODELS)):
|
||||
continue
|
||||
|
||||
self._current_entry_id = entry_id
|
||||
self._current_subentry_id = subentry_id
|
||||
return entry, subentry, model
|
||||
|
||||
def _async_update_current_subentry(self, user_input: dict[str, str]) -> str | None:
|
||||
"""Update the currently selected subentry."""
|
||||
if not self._current_entry_id or not self._current_subentry_id:
|
||||
return None
|
||||
|
||||
entry = self.hass.config_entries.async_get_entry(self._current_entry_id)
|
||||
if entry is None:
|
||||
return None
|
||||
|
||||
subentry = entry.subentries.get(self._current_subentry_id)
|
||||
if subentry is None:
|
||||
return None
|
||||
|
||||
updated_data = {
|
||||
**subentry.data,
|
||||
CONF_CHAT_MODEL: user_input[CONF_CHAT_MODEL],
|
||||
}
|
||||
if updated_data == subentry.data:
|
||||
return entry.entry_id
|
||||
self._queue_pending_update(
|
||||
entry.entry_id,
|
||||
subentry.subentry_id,
|
||||
updated_data[CONF_CHAT_MODEL],
|
||||
)
|
||||
return entry.entry_id
|
||||
|
||||
def _clear_current_target(self) -> None:
|
||||
"""Clear current target tracking."""
|
||||
self._current_entry_id = None
|
||||
self._current_subentry_id = None
|
||||
|
||||
def _format_subentry_type(self, subentry_type: str) -> str:
|
||||
"""Return a user-friendly subentry type label."""
|
||||
if subentry_type == "conversation":
|
||||
return "Conversation agent"
|
||||
if subentry_type in ("ai_task", "ai_task_data"):
|
||||
return "AI task"
|
||||
return subentry_type
|
||||
|
||||
def _queue_pending_update(
|
||||
self, entry_id: str, subentry_id: str, model: str
|
||||
) -> None:
|
||||
"""Store a pending model update for a subentry."""
|
||||
self._pending_updates.setdefault(entry_id, {})[subentry_id] = model
|
||||
|
||||
def _pending_model(self, entry_id: str, subentry_id: str) -> str | None:
|
||||
"""Return a pending model update if one exists."""
|
||||
return self._pending_updates.get(entry_id, {}).get(subentry_id)
|
||||
|
||||
def _mark_entry_for_reload(self, entry_id: str) -> None:
|
||||
"""Prevent reload until repairs are complete for the entry."""
|
||||
self._reload_pending.add(entry_id)
|
||||
defer_reload_entries: set[str] = self.hass.data.setdefault(
|
||||
DOMAIN, {}
|
||||
).setdefault(DATA_REPAIR_DEFER_RELOAD, set())
|
||||
defer_reload_entries.add(entry_id)
|
||||
|
||||
async def _async_reload_entry(self, entry_id: str) -> None:
|
||||
"""Reload an entry once all repairs are completed."""
|
||||
if entry_id not in self._reload_pending:
|
||||
return
|
||||
|
||||
entry = self.hass.config_entries.async_get_entry(entry_id)
|
||||
if entry is not None and entry.state is not ConfigEntryState.LOADED:
|
||||
self._clear_defer_reload(entry_id)
|
||||
self._reload_pending.discard(entry_id)
|
||||
return
|
||||
|
||||
if entry is not None:
|
||||
await self.hass.config_entries.async_reload(entry_id)
|
||||
|
||||
self._clear_defer_reload(entry_id)
|
||||
self._reload_pending.discard(entry_id)
|
||||
|
||||
def _clear_defer_reload(self, entry_id: str) -> None:
|
||||
"""Remove entry from the deferred reload set."""
|
||||
defer_reload_entries: set[str] = self.hass.data.setdefault(
|
||||
DOMAIN, {}
|
||||
).setdefault(DATA_REPAIR_DEFER_RELOAD, set())
|
||||
defer_reload_entries.discard(entry_id)
|
||||
|
||||
async def _async_apply_pending_updates(self, entry_id: str) -> None:
|
||||
"""Apply pending subentry updates for a single entry."""
|
||||
updates = self._pending_updates.pop(entry_id, None)
|
||||
if not updates:
|
||||
return
|
||||
|
||||
entry = self.hass.config_entries.async_get_entry(entry_id)
|
||||
if entry is None or entry.state is not ConfigEntryState.LOADED:
|
||||
return
|
||||
|
||||
changed = False
|
||||
for subentry_id, model in updates.items():
|
||||
subentry = entry.subentries.get(subentry_id)
|
||||
if subentry is None:
|
||||
continue
|
||||
|
||||
updated_data = {
|
||||
**subentry.data,
|
||||
CONF_CHAT_MODEL: model,
|
||||
}
|
||||
if updated_data == subentry.data:
|
||||
continue
|
||||
|
||||
if not changed:
|
||||
self._mark_entry_for_reload(entry_id)
|
||||
changed = True
|
||||
|
||||
self.hass.config_entries.async_update_subentry(
|
||||
entry,
|
||||
subentry,
|
||||
data=updated_data,
|
||||
)
|
||||
|
||||
if not changed:
|
||||
return
|
||||
|
||||
await self._async_reload_entry(entry_id)
|
||||
|
||||
async def _async_apply_all_pending_updates(self) -> None:
|
||||
"""Apply all pending updates across entries."""
|
||||
for entry_id in list(self._pending_updates):
|
||||
await self._async_apply_pending_updates(entry_id)
|
||||
|
||||
|
||||
async def async_create_fix_flow(
|
||||
hass: HomeAssistant,
|
||||
issue_id: str,
|
||||
data: dict[str, str | int | float | None] | None,
|
||||
) -> RepairsFlow:
|
||||
"""Create flow."""
|
||||
if issue_id == "model_deprecated":
|
||||
return ModelDeprecatedRepairFlow()
|
||||
raise HomeAssistantError("Unknown issue ID")
|
||||
@@ -47,12 +47,14 @@
|
||||
"model": {
|
||||
"data": {
|
||||
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data::thinking_budget%]",
|
||||
"thinking_effort": "[%key:component::anthropic::config_subentries::conversation::step::model::data::thinking_effort%]",
|
||||
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data::user_location%]",
|
||||
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search%]",
|
||||
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search_max_uses%]"
|
||||
},
|
||||
"data_description": {
|
||||
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::thinking_budget%]",
|
||||
"thinking_effort": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::thinking_effort%]",
|
||||
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::user_location%]",
|
||||
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search%]",
|
||||
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search_max_uses%]"
|
||||
@@ -95,12 +97,14 @@
|
||||
"model": {
|
||||
"data": {
|
||||
"thinking_budget": "Thinking budget",
|
||||
"thinking_effort": "Thinking effort",
|
||||
"user_location": "Include home location",
|
||||
"web_search": "Enable web search",
|
||||
"web_search_max_uses": "Maximum web searches"
|
||||
},
|
||||
"data_description": {
|
||||
"thinking_budget": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking.",
|
||||
"thinking_effort": "Control how many tokens Claude uses when responding, trading off between response thoroughness and token efficiency",
|
||||
"user_location": "Localize search results based on home location",
|
||||
"web_search": "The web search tool gives Claude direct access to real-time web content, allowing it to answer questions with up-to-date information beyond its knowledge cutoff",
|
||||
"web_search_max_uses": "Limit the number of searches performed per response"
|
||||
@@ -109,5 +113,32 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"model_deprecated": {
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"chat_model": "[%key:common::generic::model%]"
|
||||
},
|
||||
"description": "You are updating {subentry_name} ({subentry_type}) in {entry_name}. The current model {model} is deprecated. Select a supported model to continue.",
|
||||
"title": "Update model"
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "Model deprecated"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"thinking_effort": {
|
||||
"options": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"max": "Max",
|
||||
"medium": "[%key:common::state::medium%]",
|
||||
"none": "None"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aosmith",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["py-aosmith==1.0.15"]
|
||||
"requirements": ["py-aosmith==1.0.17"]
|
||||
}
|
||||
|
||||
@@ -50,7 +50,7 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
try:
|
||||
async with asyncio.timeout(CONNECTION_TIMEOUT):
|
||||
data = APCUPSdData(await aioapcaccess.request_status(host, port))
|
||||
except (OSError, asyncio.IncompleteReadError, TimeoutError):
|
||||
except OSError, asyncio.IncompleteReadError, TimeoutError:
|
||||
errors = {"base": "cannot_connect"}
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=_SCHEMA, errors=errors
|
||||
@@ -77,7 +77,7 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
try:
|
||||
async with asyncio.timeout(CONNECTION_TIMEOUT):
|
||||
data = APCUPSdData(await aioapcaccess.request_status(host, port))
|
||||
except (OSError, asyncio.IncompleteReadError, TimeoutError):
|
||||
except OSError, asyncio.IncompleteReadError, TimeoutError:
|
||||
errors = {"base": "cannot_connect"}
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure", data_schema=_SCHEMA, errors=errors
|
||||
|
||||
@@ -540,7 +540,17 @@ class APCUPSdSensor(APCUPSdEntity, SensorEntity):
|
||||
data = self.coordinator.data[key]
|
||||
|
||||
if self.entity_description.device_class == SensorDeviceClass.TIMESTAMP:
|
||||
self._attr_native_value = dateutil.parser.parse(data)
|
||||
# The date could be "N/A" for certain fields (e.g., XOFFBATT), indicating there is no value yet.
|
||||
if data == "N/A":
|
||||
self._attr_native_value = None
|
||||
return
|
||||
|
||||
try:
|
||||
self._attr_native_value = dateutil.parser.parse(data)
|
||||
except dateutil.parser.ParserError, OverflowError:
|
||||
# If parsing fails we should mark it as unknown, with a log for further debugging.
|
||||
_LOGGER.warning('Failed to parse date for %s: "%s"', key, data)
|
||||
self._attr_native_value = None
|
||||
return
|
||||
|
||||
self._attr_native_value, inferred_unit = infer_unit(data)
|
||||
|
||||
@@ -2,15 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pyatv.const import KeyboardFocusState
|
||||
from pyatv.const import FeatureName, FeatureState, KeyboardFocusState
|
||||
from pyatv.interface import AppleTV, KeyboardListener
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorEntity
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AppleTvConfigEntry
|
||||
from . import SIGNAL_CONNECTED, AppleTvConfigEntry
|
||||
from .entity import AppleTVEntity
|
||||
|
||||
|
||||
@@ -21,10 +22,22 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Load Apple TV binary sensor based on a config entry."""
|
||||
# apple_tv config entries always have a unique id
|
||||
assert config_entry.unique_id is not None
|
||||
name: str = config_entry.data[CONF_NAME]
|
||||
manager = config_entry.runtime_data
|
||||
async_add_entities([AppleTVKeyboardFocused(name, config_entry.unique_id, manager)])
|
||||
cb: CALLBACK_TYPE
|
||||
|
||||
def setup_entities(atv: AppleTV) -> None:
|
||||
if atv.features.in_state(FeatureState.Available, FeatureName.TextFocusState):
|
||||
assert config_entry.unique_id is not None
|
||||
name: str = config_entry.data[CONF_NAME]
|
||||
async_add_entities(
|
||||
[AppleTVKeyboardFocused(name, config_entry.unique_id, manager)]
|
||||
)
|
||||
cb()
|
||||
|
||||
cb = async_dispatcher_connect(
|
||||
hass, f"{SIGNAL_CONNECTED}_{config_entry.unique_id}", setup_entities
|
||||
)
|
||||
config_entry.async_on_unload(cb)
|
||||
|
||||
|
||||
class AppleTVKeyboardFocused(AppleTVEntity, BinarySensorEntity, KeyboardListener):
|
||||
|
||||
@@ -181,9 +181,9 @@ async def async_import_client_credential(
|
||||
CONF_DOMAIN: domain,
|
||||
CONF_CLIENT_ID: credential.client_id,
|
||||
CONF_CLIENT_SECRET: credential.client_secret,
|
||||
CONF_AUTH_DOMAIN: auth_domain if auth_domain else domain,
|
||||
CONF_AUTH_DOMAIN: auth_domain or domain,
|
||||
}
|
||||
item[CONF_NAME] = credential.name if credential.name else DEFAULT_IMPORT_NAME
|
||||
item[CONF_NAME] = credential.name or DEFAULT_IMPORT_NAME
|
||||
await hass.data[DATA_COMPONENT].async_import_item(item)
|
||||
|
||||
|
||||
|
||||
@@ -168,7 +168,7 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
|
||||
name = data.get(Attribute.NAME) if data else None
|
||||
|
||||
return name if name else "Aprilaire"
|
||||
return name or "Aprilaire"
|
||||
|
||||
def get_hw_version(self, data: dict[str, Any]) -> str:
|
||||
"""Get the hardware version."""
|
||||
|
||||
@@ -41,7 +41,7 @@ class APsystemsLocalAPIFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
try:
|
||||
device_info = await api.get_device_info()
|
||||
except (TimeoutError, ClientConnectionError):
|
||||
except TimeoutError, ClientConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
await self.async_set_unique_id(device_info.deviceId)
|
||||
|
||||
@@ -64,7 +64,7 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
async def _async_setup(self) -> None:
|
||||
try:
|
||||
device_info = await self.api.get_device_info()
|
||||
except (ConnectionError, TimeoutError):
|
||||
except ConnectionError, TimeoutError:
|
||||
raise UpdateFailed from None
|
||||
self.api.max_power = device_info.maxPower
|
||||
self.api.min_power = device_info.minPower
|
||||
|
||||
@@ -49,7 +49,7 @@ class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity):
|
||||
"""Set the state with the value fetched from the inverter."""
|
||||
try:
|
||||
status = await self._api.get_max_power()
|
||||
except (TimeoutError, ClientConnectorError):
|
||||
except TimeoutError, ClientConnectorError:
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = True
|
||||
|
||||
@@ -43,7 +43,7 @@ class ApSystemsInverterSwitch(ApSystemsEntity, SwitchEntity):
|
||||
"""Update switch status and availability."""
|
||||
try:
|
||||
status = await self._api.get_device_power_status()
|
||||
except (TimeoutError, ClientConnectionError, InverterReturnedError):
|
||||
except TimeoutError, ClientConnectionError, InverterReturnedError:
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = True
|
||||
|
||||
@@ -56,7 +56,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
refresh_token = await api.authenticate(
|
||||
user_input[CONF_EMAIL], user_input[CONF_PASSWORD]
|
||||
)
|
||||
except (ApiException, TimeoutError):
|
||||
except ApiException, TimeoutError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except AuthenticationFailed:
|
||||
errors["base"] = "invalid_auth"
|
||||
|
||||
@@ -94,7 +94,7 @@ def _retry[_SharpAquosTVDeviceT: SharpAquosTVDevice, **_P](
|
||||
try:
|
||||
func(obj, *args, **kwargs)
|
||||
break
|
||||
except (OSError, TypeError, ValueError):
|
||||
except OSError, TypeError, ValueError:
|
||||
update_retries -= 1
|
||||
if update_retries == 0:
|
||||
obj.set_state(MediaPlayerState.OFF)
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aranet",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aranet4==2.5.1"]
|
||||
"requirements": ["aranet4==2.6.0"]
|
||||
}
|
||||
|
||||
@@ -201,5 +201,5 @@ class ArwnSensor(SensorEntity):
|
||||
ev: dict[str, Any] = {}
|
||||
ev.update(event)
|
||||
self._attr_extra_state_attributes = ev
|
||||
self._attr_native_value = ev.get(self._state_key, None)
|
||||
self._attr_native_value = ev.get(self._state_key)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -969,7 +969,7 @@ class PipelineRun:
|
||||
metadata,
|
||||
self._speech_to_text_stream(audio_stream=stream, stt_vad=stt_vad),
|
||||
)
|
||||
except (asyncio.CancelledError, TimeoutError):
|
||||
except asyncio.CancelledError, TimeoutError:
|
||||
raise # expected
|
||||
except hass_nabucasa.auth.Unauthenticated as src_error:
|
||||
raise SpeechToTextError(
|
||||
|
||||
@@ -189,7 +189,7 @@ class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
try:
|
||||
await api.async_connect()
|
||||
|
||||
except (AsusRouterError, OSError):
|
||||
except AsusRouterError, OSError:
|
||||
_LOGGER.error(
|
||||
"Error connecting to the AsusWrt router at %s using protocol %s",
|
||||
host,
|
||||
|
||||
@@ -51,5 +51,5 @@ class AtagConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(DATA_SCHEMA),
|
||||
errors=errors if errors else {},
|
||||
errors=errors or {},
|
||||
)
|
||||
|
||||
@@ -30,6 +30,9 @@
|
||||
"title": "Set up one-time password delivered by notify component"
|
||||
},
|
||||
"setup": {
|
||||
"data": {
|
||||
"code": "Code"
|
||||
},
|
||||
"description": "A one-time password has been sent via **notify.{notify_service}**. Please enter it below:",
|
||||
"title": "Verify setup"
|
||||
}
|
||||
@@ -42,6 +45,9 @@
|
||||
},
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"code": "Code"
|
||||
},
|
||||
"description": "To activate two-factor authentication using time-based one-time passwords, scan the QR code with your authentication app. If you don't have one, we recommend either [Google Authenticator]({google_authenticator_url}) or [Authy]({authy_url}).\n\n{qr_code}\n\nAfter scanning the code, enter the six-digit code from your app to verify the setup. If you have problems scanning the QR code, do a manual setup with code **`{code}`**.",
|
||||
"title": "Set up two-factor authentication using TOTP"
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import labs, websocket_api
|
||||
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
|
||||
from homeassistant.components.labs import async_listen as async_labs_listen
|
||||
from homeassistant.components.labs import async_subscribe_preview_feature
|
||||
from homeassistant.const import (
|
||||
ATTR_AREA_ID,
|
||||
ATTR_ENTITY_ID,
|
||||
@@ -386,14 +386,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
schema=vol.Schema({vol.Optional(CONF_ID): str}),
|
||||
)
|
||||
|
||||
@callback
|
||||
def new_triggers_conditions_listener() -> None:
|
||||
async def new_triggers_conditions_listener(
|
||||
_event_data: labs.EventLabsUpdatedData,
|
||||
) -> None:
|
||||
"""Handle new_triggers_conditions flag change."""
|
||||
hass.async_create_task(
|
||||
reload_helper.execute_service(ServiceCall(hass, DOMAIN, SERVICE_RELOAD))
|
||||
)
|
||||
await reload_helper.execute_service(ServiceCall(hass, DOMAIN, SERVICE_RELOAD))
|
||||
|
||||
async_labs_listen(
|
||||
async_subscribe_preview_feature(
|
||||
hass,
|
||||
DOMAIN,
|
||||
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
|
||||
|
||||
@@ -304,7 +304,7 @@ async def _try_async_validate_config_item(
|
||||
"""Validate config item."""
|
||||
try:
|
||||
return await _async_validate_config_item(hass, config, False, True)
|
||||
except (vol.Invalid, HomeAssistantError):
|
||||
except vol.Invalid, HomeAssistantError:
|
||||
return None
|
||||
|
||||
|
||||
|
||||
@@ -297,14 +297,14 @@ class S3BackupAgent(BackupAgent):
|
||||
return self._backup_cache
|
||||
|
||||
backups = {}
|
||||
response = await self._client.list_objects_v2(Bucket=self._bucket)
|
||||
|
||||
# Filter for metadata files only
|
||||
metadata_files = [
|
||||
obj
|
||||
for obj in response.get("Contents", [])
|
||||
if obj["Key"].endswith(".metadata.json")
|
||||
]
|
||||
paginator = self._client.get_paginator("list_objects_v2")
|
||||
metadata_files: list[dict[str, Any]] = []
|
||||
async for page in paginator.paginate(Bucket=self._bucket):
|
||||
metadata_files.extend(
|
||||
obj
|
||||
for obj in page.get("Contents", [])
|
||||
if obj["Key"].endswith(".metadata.json")
|
||||
)
|
||||
|
||||
for metadata_file in metadata_files:
|
||||
try:
|
||||
|
||||
@@ -52,7 +52,7 @@ from .const import (
|
||||
from .errors import AuthenticationRequired, CannotConnect
|
||||
from .hub import AxisHub, get_axis_api
|
||||
|
||||
AXIS_OUI = {"00:40:8c", "ac:cc:8e", "b8:a4:4f"}
|
||||
AXIS_OUI = {"00:40:8c", "ac:cc:8e", "b8:a4:4f", "e8:27:25"}
|
||||
DEFAULT_PORT = 443
|
||||
DEFAULT_PROTOCOL = "https"
|
||||
PROTOCOL_CHOICES = ["https", "http"]
|
||||
|
||||
@@ -16,12 +16,18 @@ CONNECTION_TIMEOUT = 120 # 2 minutes
|
||||
# Default TIMEOUT_FOR_UPLOAD is 128 seconds, which is too short for large backups
|
||||
TIMEOUT_FOR_UPLOAD = 43200 # 12 hours
|
||||
|
||||
# Reduced retry count for download operations
|
||||
# Default is 20 retries with exponential backoff, which can hang for 30+ minutes
|
||||
# when there are persistent connection errors (e.g., SSL failures)
|
||||
TRY_COUNT_DOWNLOAD = 3
|
||||
|
||||
|
||||
class B2Http(BaseB2Http): # type: ignore[misc]
|
||||
"""B2Http with extended timeouts for backup operations."""
|
||||
|
||||
CONNECTION_TIMEOUT = CONNECTION_TIMEOUT
|
||||
TIMEOUT_FOR_UPLOAD = TIMEOUT_FOR_UPLOAD
|
||||
TRY_COUNT_DOWNLOAD = TRY_COUNT_DOWNLOAD
|
||||
|
||||
|
||||
class B2Session(BaseB2Session): # type: ignore[misc]
|
||||
|
||||
@@ -40,6 +40,10 @@ CACHE_TTL = 300
|
||||
# This prevents uploads from hanging indefinitely
|
||||
UPLOAD_TIMEOUT = 43200 # 12 hours (matches B2 HTTP timeout)
|
||||
|
||||
# Timeout for metadata download operations (in seconds)
|
||||
# This prevents the backup system from hanging when B2 connections fail
|
||||
METADATA_DOWNLOAD_TIMEOUT = 60
|
||||
|
||||
|
||||
def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
|
||||
"""Return the suggested filenames for the backup and metadata files."""
|
||||
@@ -413,12 +417,21 @@ class BackblazeBackupAgent(BackupAgent):
|
||||
backups = {}
|
||||
for file_name, file_version in all_files_in_prefix.items():
|
||||
if file_name.endswith(METADATA_FILE_SUFFIX):
|
||||
backup = await self._hass.async_add_executor_job(
|
||||
self._process_metadata_file_sync,
|
||||
file_name,
|
||||
file_version,
|
||||
all_files_in_prefix,
|
||||
)
|
||||
try:
|
||||
backup = await asyncio.wait_for(
|
||||
self._hass.async_add_executor_job(
|
||||
self._process_metadata_file_sync,
|
||||
file_name,
|
||||
file_version,
|
||||
all_files_in_prefix,
|
||||
),
|
||||
timeout=METADATA_DOWNLOAD_TIMEOUT,
|
||||
)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"Timeout downloading metadata file %s", file_name
|
||||
)
|
||||
continue
|
||||
if backup:
|
||||
backups[backup.backup_id] = backup
|
||||
self._backup_list_cache = backups
|
||||
@@ -442,10 +455,18 @@ class BackblazeBackupAgent(BackupAgent):
|
||||
if not file or not metadata_file_version:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
metadata_content = await self._hass.async_add_executor_job(
|
||||
self._download_and_parse_metadata_sync,
|
||||
metadata_file_version,
|
||||
)
|
||||
try:
|
||||
metadata_content = await asyncio.wait_for(
|
||||
self._hass.async_add_executor_job(
|
||||
self._download_and_parse_metadata_sync,
|
||||
metadata_file_version,
|
||||
),
|
||||
timeout=METADATA_DOWNLOAD_TIMEOUT,
|
||||
)
|
||||
except TimeoutError:
|
||||
raise BackupAgentError(
|
||||
f"Timeout downloading metadata for backup {backup_id}"
|
||||
) from None
|
||||
|
||||
_LOGGER.debug(
|
||||
"Successfully retrieved metadata for backup ID %s from file %s",
|
||||
@@ -468,16 +489,27 @@ class BackblazeBackupAgent(BackupAgent):
|
||||
# Process metadata files sequentially to avoid exhausting executor pool
|
||||
for file_name, file_version in all_files_in_prefix.items():
|
||||
if file_name.endswith(METADATA_FILE_SUFFIX):
|
||||
(
|
||||
result_backup_file,
|
||||
result_metadata_file_version,
|
||||
) = await self._hass.async_add_executor_job(
|
||||
self._process_metadata_file_for_id_sync,
|
||||
file_name,
|
||||
file_version,
|
||||
backup_id,
|
||||
all_files_in_prefix,
|
||||
)
|
||||
try:
|
||||
(
|
||||
result_backup_file,
|
||||
result_metadata_file_version,
|
||||
) = await asyncio.wait_for(
|
||||
self._hass.async_add_executor_job(
|
||||
self._process_metadata_file_for_id_sync,
|
||||
file_name,
|
||||
file_version,
|
||||
backup_id,
|
||||
all_files_in_prefix,
|
||||
),
|
||||
timeout=METADATA_DOWNLOAD_TIMEOUT,
|
||||
)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"Timeout downloading metadata file %s while searching for backup %s",
|
||||
file_name,
|
||||
backup_id,
|
||||
)
|
||||
continue
|
||||
if result_backup_file and result_metadata_file_version:
|
||||
return result_backup_file, result_metadata_file_version
|
||||
|
||||
|
||||
@@ -44,7 +44,7 @@ async def async_get_config_entry_diagnostics(
|
||||
account_data["allowed"], TO_REDACT_ACCOUNT_DATA_ALLOWED
|
||||
)
|
||||
|
||||
except (AttributeError, TypeError, ValueError, KeyError):
|
||||
except AttributeError, TypeError, ValueError, KeyError:
|
||||
bucket_info = {"name": "unknown", "id": "unknown"}
|
||||
account_data = {"error": "Failed to retrieve detailed account information"}
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@ EXCLUDE_FROM_BACKUP = [
|
||||
"tmp_backups/*.tar",
|
||||
"OZW_Log.txt",
|
||||
"tts/*",
|
||||
".cache/*",
|
||||
]
|
||||
|
||||
EXCLUDE_DATABASE_FROM_BACKUP = [
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Support for Baidu speech service."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aip import AipSpeech
|
||||
import voluptuous as vol
|
||||
@@ -9,6 +10,7 @@ from homeassistant.components.tts import (
|
||||
CONF_LANG,
|
||||
PLATFORM_SCHEMA as TTS_PLATFORM_SCHEMA,
|
||||
Provider,
|
||||
TtsAudioType,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
@@ -85,17 +87,17 @@ class BaiduTTSProvider(Provider):
|
||||
}
|
||||
|
||||
@property
|
||||
def default_language(self):
|
||||
def default_language(self) -> str:
|
||||
"""Return the default language."""
|
||||
return self._lang
|
||||
|
||||
@property
|
||||
def supported_languages(self):
|
||||
def supported_languages(self) -> list[str]:
|
||||
"""Return a list of supported languages."""
|
||||
return SUPPORTED_LANGUAGES
|
||||
|
||||
@property
|
||||
def default_options(self):
|
||||
def default_options(self) -> dict[str, Any]:
|
||||
"""Return a dict including default options."""
|
||||
return {
|
||||
CONF_PERSON: self._speech_conf_data[_OPTIONS[CONF_PERSON]],
|
||||
@@ -105,11 +107,16 @@ class BaiduTTSProvider(Provider):
|
||||
}
|
||||
|
||||
@property
|
||||
def supported_options(self):
|
||||
def supported_options(self) -> list[str]:
|
||||
"""Return a list of supported options."""
|
||||
return SUPPORTED_OPTIONS
|
||||
|
||||
def get_tts_audio(self, message, language, options):
|
||||
def get_tts_audio(
|
||||
self,
|
||||
message: str,
|
||||
language: str,
|
||||
options: dict[str, Any],
|
||||
) -> TtsAudioType:
|
||||
"""Load TTS from BaiduTTS."""
|
||||
|
||||
aip_speech = AipSpeech(
|
||||
|
||||
@@ -145,7 +145,7 @@ class BeoConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
async with self._client:
|
||||
try:
|
||||
await self._client.get_beolink_self(_request_timeout=3)
|
||||
except (ClientConnectorError, TimeoutError):
|
||||
except ClientConnectorError, TimeoutError:
|
||||
return self.async_abort(reason="invalid_address")
|
||||
|
||||
self._model = discovery_info.hostname[:-16].replace("-", " ")
|
||||
|
||||
@@ -6,16 +6,9 @@ from typing import Any
|
||||
|
||||
from blinkpy.auth import Auth
|
||||
from blinkpy.blinkpy import Blink
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import persistent_notification
|
||||
from homeassistant.const import (
|
||||
CONF_FILE_PATH,
|
||||
CONF_FILENAME,
|
||||
CONF_NAME,
|
||||
CONF_PIN,
|
||||
CONF_SCAN_INTERVAL,
|
||||
)
|
||||
from homeassistant.const import CONF_SCAN_INTERVAL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -27,13 +20,6 @@ from .services import async_setup_services
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SERVICE_SAVE_VIDEO_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_NAME): cv.string, vol.Required(CONF_FILENAME): cv.string}
|
||||
)
|
||||
SERVICE_SEND_PIN_SCHEMA = vol.Schema({vol.Optional(CONF_PIN): cv.string})
|
||||
SERVICE_SAVE_RECENT_CLIPS_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_NAME): cv.string, vol.Required(CONF_FILE_PATH): cv.string}
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
@@ -9,35 +9,23 @@ from typing import Any
|
||||
from blinkpy.auth import UnauthorizedError
|
||||
from blinkpy.camera import BlinkCamera as BlinkCameraAPI
|
||||
from requests.exceptions import ChunkedEncodingError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.camera import Camera
|
||||
from homeassistant.const import CONF_FILE_PATH, CONF_FILENAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
HomeAssistantError,
|
||||
ServiceValidationError,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import (
|
||||
DEFAULT_BRAND,
|
||||
DOMAIN,
|
||||
SERVICE_RECORD,
|
||||
SERVICE_SAVE_RECENT_CLIPS,
|
||||
SERVICE_SAVE_VIDEO,
|
||||
SERVICE_TRIGGER,
|
||||
)
|
||||
from .const import DEFAULT_BRAND, DOMAIN
|
||||
from .coordinator import BlinkConfigEntry, BlinkUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_VIDEO_CLIP = "video"
|
||||
ATTR_IMAGE = "image"
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@@ -56,20 +44,6 @@ async def async_setup_entry(
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(SERVICE_RECORD, None, "record")
|
||||
platform.async_register_entity_service(SERVICE_TRIGGER, None, "trigger_camera")
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SAVE_RECENT_CLIPS,
|
||||
{vol.Required(CONF_FILE_PATH): cv.string},
|
||||
"save_recent_clips",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SAVE_VIDEO,
|
||||
{vol.Required(CONF_FILENAME): cv.string},
|
||||
"save_video",
|
||||
)
|
||||
|
||||
|
||||
class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera):
|
||||
"""An implementation of a Blink Camera."""
|
||||
|
||||
@@ -20,11 +20,6 @@ TYPE_TEMPERATURE = "temperature"
|
||||
TYPE_BATTERY = "battery"
|
||||
TYPE_WIFI_STRENGTH = "wifi_strength"
|
||||
|
||||
SERVICE_RECORD = "record"
|
||||
SERVICE_TRIGGER = "trigger_camera"
|
||||
SERVICE_SAVE_VIDEO = "save_video"
|
||||
SERVICE_SAVE_RECENT_CLIPS = "save_recent_clips"
|
||||
SERVICE_SEND_PIN = "send_pin"
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
|
||||
@@ -4,13 +4,27 @@ from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_CONFIG_ENTRY_ID, CONF_PIN
|
||||
from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN
|
||||
from homeassistant.const import (
|
||||
ATTR_CONFIG_ENTRY_ID,
|
||||
CONF_FILE_PATH,
|
||||
CONF_FILENAME,
|
||||
CONF_PIN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir, service
|
||||
|
||||
from .const import DOMAIN, SERVICE_SEND_PIN
|
||||
from .const import DOMAIN
|
||||
|
||||
SERVICE_RECORD = "record"
|
||||
SERVICE_TRIGGER = "trigger_camera"
|
||||
SERVICE_SAVE_VIDEO = "save_video"
|
||||
SERVICE_SAVE_RECENT_CLIPS = "save_recent_clips"
|
||||
|
||||
|
||||
# Deprecated
|
||||
SERVICE_SEND_PIN = "send_pin"
|
||||
SERVICE_SEND_PIN_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY_ID): vol.All(cv.ensure_list, [cv.string]),
|
||||
@@ -52,3 +66,36 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
_send_pin,
|
||||
schema=SERVICE_SEND_PIN_SCHEMA,
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_RECORD,
|
||||
entity_domain=CAMERA_DOMAIN,
|
||||
schema=None,
|
||||
func="record",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_TRIGGER,
|
||||
entity_domain=CAMERA_DOMAIN,
|
||||
schema=None,
|
||||
func="trigger_camera",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_SAVE_RECENT_CLIPS,
|
||||
entity_domain=CAMERA_DOMAIN,
|
||||
schema={vol.Required(CONF_FILE_PATH): cv.string},
|
||||
func="save_recent_clips",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_SAVE_VIDEO,
|
||||
entity_domain=CAMERA_DOMAIN,
|
||||
schema={vol.Required(CONF_FILENAME): cv.string},
|
||||
func="save_video",
|
||||
)
|
||||
|
||||
@@ -80,18 +80,16 @@ SWITCHES = (
|
||||
key=PLUG_AND_CHARGE,
|
||||
translation_key=PLUG_AND_CHARGE,
|
||||
function=set_plug_and_charge,
|
||||
turn_on_off_fn=lambda evse_id, connector: (
|
||||
update_on_value_and_activity(PLUG_AND_CHARGE, evse_id, connector)
|
||||
turn_on_off_fn=lambda evse_id, connector: update_on_value_and_activity(
|
||||
PLUG_AND_CHARGE, evse_id, connector
|
||||
),
|
||||
),
|
||||
BlueCurrentSwitchEntityDescription(
|
||||
key=LINKED_CHARGE_CARDS,
|
||||
translation_key=LINKED_CHARGE_CARDS,
|
||||
function=set_linked_charge_cards,
|
||||
turn_on_off_fn=lambda evse_id, connector: (
|
||||
update_on_value_and_activity(
|
||||
PUBLIC_CHARGING, evse_id, connector, reverse_is_on=True
|
||||
)
|
||||
turn_on_off_fn=lambda evse_id, connector: update_on_value_and_activity(
|
||||
PUBLIC_CHARGING, evse_id, connector, reverse_is_on=True
|
||||
),
|
||||
),
|
||||
BlueCurrentSwitchEntityDescription(
|
||||
|
||||
@@ -148,8 +148,10 @@ SENSOR_TYPES: tuple[BMWBinarySensorEntityDescription, ...] = (
|
||||
device_class=BinarySensorDeviceClass.LOCK,
|
||||
# device class lock: On means unlocked, Off means locked
|
||||
# Possible values: LOCKED, SECURED, SELECTIVE_LOCKED, UNLOCKED
|
||||
value_fn=lambda v: v.doors_and_windows.door_lock_state
|
||||
not in {LockState.LOCKED, LockState.SECURED},
|
||||
value_fn=lambda v: (
|
||||
v.doors_and_windows.door_lock_state
|
||||
not in {LockState.LOCKED, LockState.SECURED}
|
||||
),
|
||||
attr_fn=lambda v, u: {
|
||||
"door_lock_state": v.doors_and_windows.door_lock_state.value
|
||||
},
|
||||
@@ -189,9 +191,11 @@ SENSOR_TYPES: tuple[BMWBinarySensorEntityDescription, ...] = (
|
||||
BMWBinarySensorEntityDescription(
|
||||
key="is_pre_entry_climatization_enabled",
|
||||
translation_key="is_pre_entry_climatization_enabled",
|
||||
value_fn=lambda v: v.charging_profile.is_pre_entry_climatization_enabled
|
||||
if v.charging_profile
|
||||
else False,
|
||||
value_fn=lambda v: (
|
||||
v.charging_profile.is_pre_entry_climatization_enabled
|
||||
if v.charging_profile
|
||||
else False
|
||||
),
|
||||
is_available=lambda v: v.has_electric_drivetrain,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -40,7 +40,9 @@ BUTTON_TYPES: tuple[BMWButtonEntityDescription, ...] = (
|
||||
BMWButtonEntityDescription(
|
||||
key="light_flash",
|
||||
translation_key="light_flash",
|
||||
remote_function=lambda vehicle: vehicle.remote_services.trigger_remote_light_flash(),
|
||||
remote_function=lambda vehicle: (
|
||||
vehicle.remote_services.trigger_remote_light_flash()
|
||||
),
|
||||
),
|
||||
BMWButtonEntityDescription(
|
||||
key="sound_horn",
|
||||
@@ -50,18 +52,24 @@ BUTTON_TYPES: tuple[BMWButtonEntityDescription, ...] = (
|
||||
BMWButtonEntityDescription(
|
||||
key="activate_air_conditioning",
|
||||
translation_key="activate_air_conditioning",
|
||||
remote_function=lambda vehicle: vehicle.remote_services.trigger_remote_air_conditioning(),
|
||||
remote_function=lambda vehicle: (
|
||||
vehicle.remote_services.trigger_remote_air_conditioning()
|
||||
),
|
||||
),
|
||||
BMWButtonEntityDescription(
|
||||
key="deactivate_air_conditioning",
|
||||
translation_key="deactivate_air_conditioning",
|
||||
remote_function=lambda vehicle: vehicle.remote_services.trigger_remote_air_conditioning_stop(),
|
||||
remote_function=lambda vehicle: (
|
||||
vehicle.remote_services.trigger_remote_air_conditioning_stop()
|
||||
),
|
||||
is_available=lambda vehicle: vehicle.is_remote_climate_stop_enabled,
|
||||
),
|
||||
BMWButtonEntityDescription(
|
||||
key="find_vehicle",
|
||||
translation_key="find_vehicle",
|
||||
remote_function=lambda vehicle: vehicle.remote_services.trigger_remote_vehicle_finder(),
|
||||
remote_function=lambda vehicle: (
|
||||
vehicle.remote_services.trigger_remote_vehicle_finder()
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -50,7 +50,9 @@ NUMBER_TYPES: list[BMWSwitchEntityDescription] = [
|
||||
is_available=lambda v: v.is_remote_climate_stop_enabled,
|
||||
value_fn=lambda v: v.climate.is_climate_on,
|
||||
remote_service_on=lambda v: v.remote_services.trigger_remote_air_conditioning(),
|
||||
remote_service_off=lambda v: v.remote_services.trigger_remote_air_conditioning_stop(),
|
||||
remote_service_off=lambda v: (
|
||||
v.remote_services.trigger_remote_air_conditioning_stop()
|
||||
),
|
||||
),
|
||||
BMWSwitchEntityDescription(
|
||||
key="charging",
|
||||
|
||||
@@ -16,14 +16,17 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.entity import SLOW_UPDATE_WARNING
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import BRIDGE_MAKE, DOMAIN
|
||||
from .models import BondData
|
||||
from .services import async_setup_services
|
||||
from .utils import BondHub
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
PLATFORMS = [
|
||||
Platform.BUTTON,
|
||||
Platform.COVER,
|
||||
@@ -38,6 +41,12 @@ _LOGGER = logging.getLogger(__name__)
|
||||
type BondConfigEntry = ConfigEntry[BondData]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the component."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BondConfigEntry) -> bool:
|
||||
"""Set up Bond from a config entry."""
|
||||
host = entry.data[CONF_HOST]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user