mirror of
https://github.com/home-assistant/core.git
synced 2026-01-02 12:12:00 +01:00
Compare commits
643 Commits
fix-radio-
...
openai-mod
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
099a480e57 | ||
|
|
4cc4bd3b9a | ||
|
|
dbc2b1354b | ||
|
|
fbe257f997 | ||
|
|
208dde10e6 | ||
|
|
b7b733efc3 | ||
|
|
1d9f779b2a | ||
|
|
56c53fdb9b | ||
|
|
5c4862ffe1 | ||
|
|
5c7913c3bd | ||
|
|
36a98470cc | ||
|
|
f2c995cf86 | ||
|
|
eeca5a8030 | ||
|
|
56d97f5545 | ||
|
|
995a99e256 | ||
|
|
ef7cd815b2 | ||
|
|
8b8616182d | ||
|
|
760b69d458 | ||
|
|
6adcd34521 | ||
|
|
a0992498c6 | ||
|
|
d6175fb383 | ||
|
|
dd3c9ab3af | ||
|
|
fea2ef1ac1 | ||
|
|
326bcc3f05 | ||
|
|
feeef88710 | ||
|
|
f481c1b92f | ||
|
|
eea22d8079 | ||
|
|
393087cf50 | ||
|
|
f5718e1df6 | ||
|
|
15f2ae3002 | ||
|
|
f458ede468 | ||
|
|
d85ffee27a | ||
|
|
2e12d67f2f | ||
|
|
46a01c2060 | ||
|
|
53d77c4c10 | ||
|
|
fcd514a06b | ||
|
|
049a698815 | ||
|
|
55f01e3485 | ||
|
|
c2b1932045 | ||
|
|
5543587527 | ||
|
|
202d8ac802 | ||
|
|
7613880645 | ||
|
|
3f77c13aad | ||
|
|
b966b59c09 | ||
|
|
40cf47ae5a | ||
|
|
da8ce52ed7 | ||
|
|
b5190788ac | ||
|
|
bfa7ff3ede | ||
|
|
1312e04c57 | ||
|
|
d3771571cd | ||
|
|
5aa629edd0 | ||
|
|
3ed297676f | ||
|
|
d735af505e | ||
|
|
e337abb12d | ||
|
|
45edd12f13 | ||
|
|
5b94f5a99a | ||
|
|
8b7295cd26 | ||
|
|
15f7dade5e | ||
|
|
61807412c4 | ||
|
|
f679f33c56 | ||
|
|
2abd203580 | ||
|
|
ccd22ce0d5 | ||
|
|
391b144033 | ||
|
|
b6db10340e | ||
|
|
23b2936174 | ||
|
|
fad5f7a47b | ||
|
|
58ddf4ea95 | ||
|
|
22fa863984 | ||
|
|
d9b25770ad | ||
|
|
1c8ae8a21b | ||
|
|
6d3872252b | ||
|
|
4730c5b831 | ||
|
|
9a9f65dc36 | ||
|
|
7c83fd0bf9 | ||
|
|
70e03cdd4e | ||
|
|
4d5c1b139b | ||
|
|
6dc5c9beb7 | ||
|
|
47611619db | ||
|
|
2a0a31bff8 | ||
|
|
dcf29d12a7 | ||
|
|
edf6166a9f | ||
|
|
eb8ca53a03 | ||
|
|
3dffd74607 | ||
|
|
b37273ed33 | ||
|
|
232b34609c | ||
|
|
aeeabfcae7 | ||
|
|
52abab8ae8 | ||
|
|
7aa4810b0a | ||
|
|
c4d742f549 | ||
|
|
51a46a128c | ||
|
|
9a6ba225e4 | ||
|
|
a5ab523014 | ||
|
|
40571dff3d | ||
|
|
5f2f038609 | ||
|
|
9fd2ad425c | ||
|
|
2f6c0a1b7f | ||
|
|
dde73c05cb | ||
|
|
993b0bbdd7 | ||
|
|
45dbf3ef1a | ||
|
|
71c1837f39 | ||
|
|
34eb99530f | ||
|
|
55ac4d8855 | ||
|
|
ef3fb50018 | ||
|
|
316ac6253b | ||
|
|
252a46d141 | ||
|
|
969ad232aa | ||
|
|
828a47db06 | ||
|
|
3947569132 | ||
|
|
e5f9788d24 | ||
|
|
dd399ef59f | ||
|
|
5a771b501d | ||
|
|
3f67ba4c02 | ||
|
|
c075134845 | ||
|
|
e5c7e04329 | ||
|
|
49807c9fbe | ||
|
|
e79d42ecfc | ||
|
|
1f07dd7946 | ||
|
|
8d1c789ca2 | ||
|
|
f5d68a4ea4 | ||
|
|
2315bcbfe3 | ||
|
|
df4e1411cc | ||
|
|
3e7974a638 | ||
|
|
48b8827390 | ||
|
|
42cf4e8db7 | ||
|
|
ef2531d28d | ||
|
|
79dd91ebc6 | ||
|
|
ecb6cc50b9 | ||
|
|
b6014da121 | ||
|
|
941d3c2be4 | ||
|
|
7d895653fb | ||
|
|
3bd70a4698 | ||
|
|
b85ec55abb | ||
|
|
3f42911af4 | ||
|
|
3c70932357 | ||
|
|
40252763d7 | ||
|
|
80b96b0007 | ||
|
|
f3db3ba3c8 | ||
|
|
102ef257a0 | ||
|
|
2476e7e47c | ||
|
|
671523feb3 | ||
|
|
54fa4d635b | ||
|
|
af0480f2a4 | ||
|
|
64f190749a | ||
|
|
6b489e0ab6 | ||
|
|
1315095b4a | ||
|
|
2d86fa079e | ||
|
|
875219ccb5 | ||
|
|
bc0162cf85 | ||
|
|
d774de79db | ||
|
|
be25a7bc70 | ||
|
|
05566e1621 | ||
|
|
b59d8b5730 | ||
|
|
75a90ab568 | ||
|
|
67c68dedba | ||
|
|
1fba61973d | ||
|
|
bf1a660dcb | ||
|
|
94d077ea41 | ||
|
|
c22f65bd87 | ||
|
|
0dba32dbcd | ||
|
|
8c964e64db | ||
|
|
c08aa74496 | ||
|
|
ff9fb6228b | ||
|
|
6eab118a2d | ||
|
|
c1e35cc9cf | ||
|
|
11dd2dc374 | ||
|
|
00c4b09773 | ||
|
|
bc9ad5eac6 | ||
|
|
eca80a1645 | ||
|
|
bd7cef92c7 | ||
|
|
27787e0679 | ||
|
|
0a9fbb215d | ||
|
|
77a954df9b | ||
|
|
61ca0b6b86 | ||
|
|
e3577de9d8 | ||
|
|
b8d45fba24 | ||
|
|
44fec53bac | ||
|
|
ca48b9e375 | ||
|
|
216e89dc5e | ||
|
|
72d5578128 | ||
|
|
e3bdd12dad | ||
|
|
43dc73c2e1 | ||
|
|
302b6f03ba | ||
|
|
b31e17f1f9 | ||
|
|
1b8f3348b0 | ||
|
|
0d42b24467 | ||
|
|
0c858de1af | ||
|
|
5d653d46c3 | ||
|
|
b262a5c9b6 | ||
|
|
ead99c549f | ||
|
|
1a6bfc0310 | ||
|
|
507f29a209 | ||
|
|
d796ab8fe7 | ||
|
|
d35dca377f | ||
|
|
96766fc62a | ||
|
|
afbb0ee2f4 | ||
|
|
e885ae1b15 | ||
|
|
51d38f8f05 | ||
|
|
be644ca96e | ||
|
|
d266b6f6ab | ||
|
|
dbdc666a92 | ||
|
|
2577d9f108 | ||
|
|
7dfb54c8e8 | ||
|
|
a50d926e2a | ||
|
|
0cfb395ab5 | ||
|
|
b3f049676d | ||
|
|
7e04a7ec19 | ||
|
|
c15bf097f0 | ||
|
|
dba3d98a2b | ||
|
|
4a5e193ebb | ||
|
|
1bbd07fe48 | ||
|
|
b9d19ffb29 | ||
|
|
22b35030a9 | ||
|
|
69c26e5f1f | ||
|
|
cb4d17b24f | ||
|
|
ff14f6b823 | ||
|
|
ab964c8bca | ||
|
|
05f686cb86 | ||
|
|
440a20340e | ||
|
|
676a931c48 | ||
|
|
360da43868 | ||
|
|
12193587c9 | ||
|
|
290f19dbd9 | ||
|
|
13434012e7 | ||
|
|
7202203f35 | ||
|
|
31167f5da7 | ||
|
|
665991a3c1 | ||
|
|
8a2493e9d2 | ||
|
|
be6743d4fd | ||
|
|
284b90d502 | ||
|
|
d7d2013ec8 | ||
|
|
b3bd882a80 | ||
|
|
3a6f23b95f | ||
|
|
6f59aaebdd | ||
|
|
f90e06fde1 | ||
|
|
380c737901 | ||
|
|
33cc257e75 | ||
|
|
3877a6211a | ||
|
|
916b4368dd | ||
|
|
4c99fe9ae5 | ||
|
|
353b573707 | ||
|
|
109663f177 | ||
|
|
3b89b2cbbe | ||
|
|
29d0d6cd43 | ||
|
|
1743766d17 | ||
|
|
277241c4d3 | ||
|
|
17034f4d6a | ||
|
|
ec544b0430 | ||
|
|
75c803e376 | ||
|
|
a96e31f1d8 | ||
|
|
43a30fad96 | ||
|
|
39d323186f | ||
|
|
57c024449c | ||
|
|
414057d455 | ||
|
|
50688bbd69 | ||
|
|
073ea813f0 | ||
|
|
6b959f42f6 | ||
|
|
0ff0902ccf | ||
|
|
37a154b1df | ||
|
|
3c87a3e892 | ||
|
|
aacaa9a20f | ||
|
|
c074453763 | ||
|
|
29afa891ec | ||
|
|
3b6eb045c6 | ||
|
|
0d819f2389 | ||
|
|
9802441fea | ||
|
|
fb13c8f4f2 | ||
|
|
a96e38871f | ||
|
|
17920b6ec3 | ||
|
|
40cabc8d70 | ||
|
|
b33a556ca5 | ||
|
|
9df97fb2e2 | ||
|
|
ee35fc495d | ||
|
|
9373bb287c | ||
|
|
d72fb021c1 | ||
|
|
0e6a1e3242 | ||
|
|
79b8e74d87 | ||
|
|
72d1c3cfc8 | ||
|
|
3d278b626a | ||
|
|
5383ff96ef | ||
|
|
a0991134c4 | ||
|
|
9def44dca4 | ||
|
|
656822b39c | ||
|
|
ae03fc2295 | ||
|
|
e32e06d7a0 | ||
|
|
6dc2340c5a | ||
|
|
83cd2dfef3 | ||
|
|
a5c301db1b | ||
|
|
5b41d5a795 | ||
|
|
9d178ad5f1 | ||
|
|
e8fca19335 | ||
|
|
58bb2fa327 | ||
|
|
fca05f6bcf | ||
|
|
a5f0f6c8b9 | ||
|
|
e2340314c6 | ||
|
|
aab6cd665f | ||
|
|
1734b316d5 | ||
|
|
3449863eee | ||
|
|
b68de0af88 | ||
|
|
840e0d1388 | ||
|
|
412035b970 | ||
|
|
3e465da892 | ||
|
|
0d79f7db51 | ||
|
|
62e3802ff2 | ||
|
|
02a11638b3 | ||
|
|
26a9af7371 | ||
|
|
e28f02d163 | ||
|
|
a6828898d1 | ||
|
|
29e105b0ef | ||
|
|
ce4a811b96 | ||
|
|
fe8384719d | ||
|
|
a57d48fd31 | ||
|
|
8a73511b02 | ||
|
|
033d8b3dfb | ||
|
|
6833bf1900 | ||
|
|
84e3dac406 | ||
|
|
bafd342d5d | ||
|
|
fae6b375cd | ||
|
|
d8de6e34dd | ||
|
|
9db5b0b3b7 | ||
|
|
bcec29763f | ||
|
|
27ad459ae0 | ||
|
|
9c933ef01f | ||
|
|
2011e64390 | ||
|
|
ffc2b0a8cf | ||
|
|
549069e22c | ||
|
|
57e4270b7b | ||
|
|
38e4e18f60 | ||
|
|
7f2a32d4eb | ||
|
|
d46e0e132b | ||
|
|
828f0f8b26 | ||
|
|
849a25e3cc | ||
|
|
3cb579d585 | ||
|
|
381bd489d8 | ||
|
|
f5b785acd5 | ||
|
|
648dce2fa3 | ||
|
|
d14a0e0191 | ||
|
|
9caf46c68b | ||
|
|
e89ae021d8 | ||
|
|
36156d9c54 | ||
|
|
3e0628cec2 | ||
|
|
8bd51a7fd1 | ||
|
|
5b29d6bbdf | ||
|
|
2c2ac4b669 | ||
|
|
35097602d7 | ||
|
|
e5fe243a86 | ||
|
|
fd10fa1fba | ||
|
|
087a938a7d | ||
|
|
c058561162 | ||
|
|
b89b248b4c | ||
|
|
cd94685b7d | ||
|
|
0acfb81d50 | ||
|
|
7d06aec8da | ||
|
|
ee4325a927 | ||
|
|
c7aadcdd20 | ||
|
|
8256401f7f | ||
|
|
ab187f39c2 | ||
|
|
1cb278966c | ||
|
|
b522bd5ef2 | ||
|
|
a6e1d96852 | ||
|
|
3d74d02704 | ||
|
|
db45f46c8a | ||
|
|
4f938d032d | ||
|
|
e1f15dac39 | ||
|
|
41e261096a | ||
|
|
f6aa4aa788 | ||
|
|
7d7767c93a | ||
|
|
5e883cfb12 | ||
|
|
e2cc51f21d | ||
|
|
816977dd75 | ||
|
|
a81e83cb28 | ||
|
|
c476500c49 | ||
|
|
f65fa38429 | ||
|
|
66641356cc | ||
|
|
37ae476c67 | ||
|
|
5ec9c4e6e3 | ||
|
|
80eb4fb2f6 | ||
|
|
9e3a78b7ef | ||
|
|
c08c402409 | ||
|
|
d42d270fb2 | ||
|
|
9068a09620 | ||
|
|
1ef07544d5 | ||
|
|
ed4a23d104 | ||
|
|
0729b3a2f1 | ||
|
|
c9356868f7 | ||
|
|
1753baf186 | ||
|
|
8421ca7802 | ||
|
|
124931b2ee | ||
|
|
c27a67db82 | ||
|
|
3ae9ea3f19 | ||
|
|
e35f7b12f1 | ||
|
|
1a1e9e9f57 | ||
|
|
254f766357 | ||
|
|
7df0016fab | ||
|
|
57f89dd606 | ||
|
|
92bb1f2551 | ||
|
|
f680e992ff | ||
|
|
f08d1e547f | ||
|
|
9e022ad75e | ||
|
|
14ff04200e | ||
|
|
5e4ce46dae | ||
|
|
155fc134b6 | ||
|
|
25f64a2f36 | ||
|
|
dcbdce4b2b | ||
|
|
50047f0a4e | ||
|
|
21b1122f83 | ||
|
|
09104fca4d | ||
|
|
ad4e5459b1 | ||
|
|
334d5f09fb | ||
|
|
9f3d890e91 | ||
|
|
eae9f4f925 | ||
|
|
5e50c723a7 | ||
|
|
f761f7628a | ||
|
|
26d71fcdba | ||
|
|
e4359e74c6 | ||
|
|
5e30e6cb91 | ||
|
|
bc07030304 | ||
|
|
25ba2437dd | ||
|
|
cfc7cfcf37 | ||
|
|
74288a3bc8 | ||
|
|
b2fe17c6d4 | ||
|
|
611f86cf8c | ||
|
|
23a8442abe | ||
|
|
f3ad6bd9b6 | ||
|
|
023dd9d523 | ||
|
|
f7d132b043 | ||
|
|
bb17f34bae | ||
|
|
d22dd68119 | ||
|
|
4122af1d33 | ||
|
|
87fd45d4ab | ||
|
|
1c35aff510 | ||
|
|
ab6ac94af9 | ||
|
|
d33f73fce2 | ||
|
|
fca6dc264f | ||
|
|
5287f4de81 | ||
|
|
ccc1f01ff6 | ||
|
|
531f1f1964 | ||
|
|
72dc2b15d5 | ||
|
|
cf2ef4cec1 | ||
|
|
28994152ae | ||
|
|
ad881d892b | ||
|
|
87e641bf59 | ||
|
|
6ecaca753d | ||
|
|
017cd0bf45 | ||
|
|
1920edd712 | ||
|
|
2dca78efbb | ||
|
|
e0179a7d45 | ||
|
|
d393d5fdbb | ||
|
|
a34264f345 | ||
|
|
73c9d99abf | ||
|
|
ec5991bc68 | ||
|
|
87aecf0ed9 | ||
|
|
0b2ce73eac | ||
|
|
22828568e2 | ||
|
|
5a4c837328 | ||
|
|
cd73824e3e | ||
|
|
32121a073c | ||
|
|
c6c622797d | ||
|
|
193b32218f | ||
|
|
e6702d2392 | ||
|
|
19b3b6cb28 | ||
|
|
a2220cc2e6 | ||
|
|
18a89d5815 | ||
|
|
6eeec948a8 | ||
|
|
0e09a47476 | ||
|
|
f0a636949a | ||
|
|
d15baf9f9f | ||
|
|
4f27058a68 | ||
|
|
058e1ede10 | ||
|
|
d23321cf54 | ||
|
|
eb20292683 | ||
|
|
12f913e737 | ||
|
|
7e405d4ddb | ||
|
|
2829cc1248 | ||
|
|
8881919efd | ||
|
|
a00f61f7be | ||
|
|
c37b0a8f1d | ||
|
|
c75b34a911 | ||
|
|
cbe2fbdc34 | ||
|
|
c2bc4a990e | ||
|
|
49baa65f61 | ||
|
|
24a7ebd2bb | ||
|
|
a4b9efa1b1 | ||
|
|
15544769b6 | ||
|
|
3307132441 | ||
|
|
da255af8de | ||
|
|
a7e879714b | ||
|
|
8aaf5756e0 | ||
|
|
ce5f06b1e5 | ||
|
|
e42ca06173 | ||
|
|
2807f057de | ||
|
|
283d0d16c0 | ||
|
|
84959a0077 | ||
|
|
e012196af8 | ||
|
|
5d43938f0d | ||
|
|
cbdc8e3800 | ||
|
|
1b5bbda6b0 | ||
|
|
57083d877e | ||
|
|
3045f67ae5 | ||
|
|
6f31057d30 | ||
|
|
511ffdc03c | ||
|
|
59fe6da47c | ||
|
|
e1cdc1af1c | ||
|
|
f6e2b962fd | ||
|
|
fe0ce9bc6d | ||
|
|
b083919031 | ||
|
|
ef2e699d2c | ||
|
|
71df8ffe6e | ||
|
|
98604f09fc | ||
|
|
b97b04661e | ||
|
|
828037de1f | ||
|
|
659504c91f | ||
|
|
434ac421d1 | ||
|
|
de849b920a | ||
|
|
e387d4834f | ||
|
|
39ed877a17 | ||
|
|
13d05a338b | ||
|
|
cb2095bcbe | ||
|
|
6de630ef3e | ||
|
|
a02359b25d | ||
|
|
afcd991262 | ||
|
|
6b5b35fece | ||
|
|
ed8effa162 | ||
|
|
70c01efe57 | ||
|
|
ebffaed0bd | ||
|
|
ab1e323d49 | ||
|
|
6e63c17b39 | ||
|
|
a35299d94c | ||
|
|
c97ad9657f | ||
|
|
aab8908af8 | ||
|
|
ae7bc14059 | ||
|
|
546f6afac2 | ||
|
|
8ccd097e98 | ||
|
|
77ae6048ef | ||
|
|
420d1e169d | ||
|
|
91b8262128 | ||
|
|
e393929014 | ||
|
|
11938762eb | ||
|
|
94862e6a50 | ||
|
|
1a8d4c5041 | ||
|
|
b775ba2955 | ||
|
|
d2bf27195a | ||
|
|
824006729b | ||
|
|
a7cba2b9bb | ||
|
|
bd1917c9b6 | ||
|
|
7541e266da | ||
|
|
f58c76c883 | ||
|
|
a77a071954 | ||
|
|
0dc145aee3 | ||
|
|
ac5d4f4a81 | ||
|
|
d44b822295 | ||
|
|
6d0891e970 | ||
|
|
73730e3eb3 | ||
|
|
87b00fdc7b | ||
|
|
f780b9763d | ||
|
|
7a7e16bbb6 | ||
|
|
dcf8d7f74d | ||
|
|
ccc80c78a0 | ||
|
|
b0f7c985e4 | ||
|
|
7875290256 | ||
|
|
f478812568 | ||
|
|
9ce03c79f0 | ||
|
|
19951d9403 | ||
|
|
4b8dcc39b4 | ||
|
|
b151a9bf75 | ||
|
|
e3cc4acdc6 | ||
|
|
fc53ddb3b4 | ||
|
|
0409c05265 | ||
|
|
9d2ffa6372 | ||
|
|
5c4f166f6f | ||
|
|
6396f54e0d | ||
|
|
090b8f0659 | ||
|
|
a46cc82916 | ||
|
|
8007bf1c31 | ||
|
|
c296e1f818 | ||
|
|
799dc97d4a | ||
|
|
e4c9df6d98 | ||
|
|
03e295ace0 | ||
|
|
b71bcb002b | ||
|
|
c60e06d32f | ||
|
|
448d6041e5 | ||
|
|
15c9ddea78 | ||
|
|
0c783e87d1 | ||
|
|
42b50c71ec | ||
|
|
991864a8af | ||
|
|
b79e770bcf | ||
|
|
f02c1b0d4e | ||
|
|
a5d6bfd1b3 | ||
|
|
21f6bf3914 | ||
|
|
0bce01da0b | ||
|
|
6351c3302e | ||
|
|
2ea20ee2ab | ||
|
|
008e2a3d10 | ||
|
|
699c60f293 | ||
|
|
404d17efca | ||
|
|
4b5c04b2f0 | ||
|
|
8cb9cadce9 | ||
|
|
075efb469a | ||
|
|
0e7a4c91bf | ||
|
|
4ee930507d | ||
|
|
1b11ac9123 | ||
|
|
8d7e387b46 | ||
|
|
70e9c4e2d0 | ||
|
|
26de1ea37b | ||
|
|
3ffcfa42ba | ||
|
|
e304022560 | ||
|
|
160e4e4d05 | ||
|
|
eb0f11a859 | ||
|
|
295b15ace9 | ||
|
|
d997efc500 | ||
|
|
736865c130 | ||
|
|
4f4ec6f41a | ||
|
|
33d05d99eb | ||
|
|
8d82e34ba5 | ||
|
|
2ea09ff37a | ||
|
|
676567f471 | ||
|
|
3151713a34 | ||
|
|
23773759ea | ||
|
|
ef255788d2 | ||
|
|
b72536acfa | ||
|
|
fea7dc7eba | ||
|
|
f1698cdb75 | ||
|
|
1b21c986e8 | ||
|
|
1e164c94b1 | ||
|
|
7898e3f0fb | ||
|
|
0d54e75940 | ||
|
|
3cfff4de3a | ||
|
|
275d390a6c | ||
|
|
e63e6a6072 | ||
|
|
e592e565c0 | ||
|
|
12b90f3c8e | ||
|
|
76be2fdba1 | ||
|
|
528daad854 | ||
|
|
dcad5bbe04 | ||
|
|
ca85ffc068 | ||
|
|
9a5cbe483b | ||
|
|
be7735964b | ||
|
|
79683c8267 | ||
|
|
8f24ebe967 | ||
|
|
520d92b902 | ||
|
|
22e46d9977 | ||
|
|
57c04f3a56 | ||
|
|
c0368f2448 | ||
|
|
6a7f4953cd |
@@ -8,6 +8,7 @@
|
||||
"PYTHONASYNCIODEBUG": "1"
|
||||
},
|
||||
"features": {
|
||||
"ghcr.io/anthropics/devcontainer-features/claude-code:1.0": {},
|
||||
"ghcr.io/devcontainers/features/github-cli:1": {}
|
||||
},
|
||||
// Port 5683 udp is used by Shelly integration
|
||||
|
||||
6
.github/ISSUE_TEMPLATE/task.yml
vendored
6
.github/ISSUE_TEMPLATE/task.yml
vendored
@@ -21,7 +21,7 @@ body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Task description
|
||||
label: Description
|
||||
description: |
|
||||
Provide a clear and detailed description of the task that needs to be accomplished.
|
||||
|
||||
@@ -43,9 +43,11 @@ body:
|
||||
|
||||
Include links to related issues, research, prototypes, roadmap opportunities etc.
|
||||
placeholder: |
|
||||
- Roadmap opportunity: [links]
|
||||
- Roadmap opportunity: [link]
|
||||
- Epic: [link]
|
||||
- Feature request: [link]
|
||||
- Technical design documents: [link]
|
||||
- Prototype/mockup: [link]
|
||||
- Dependencies: [links]
|
||||
validations:
|
||||
required: false
|
||||
|
||||
8
.github/copilot-instructions.md
vendored
8
.github/copilot-instructions.md
vendored
@@ -45,6 +45,12 @@ rules:
|
||||
|
||||
**When Reviewing/Creating Code**: Always check the integration's quality scale level and exemption status before applying rules.
|
||||
|
||||
## Code Review Guidelines
|
||||
|
||||
**When reviewing code, do NOT comment on:**
|
||||
- **Missing imports** - We use static analysis tooling to catch that
|
||||
- **Code formatting** - We have ruff as a formatting tool that will catch those if needed (unless specifically instructed otherwise in these instructions)
|
||||
|
||||
## Python Requirements
|
||||
|
||||
- **Compatibility**: Python 3.13+
|
||||
@@ -1149,7 +1155,7 @@ _LOGGER.debug("Processing data: %s", data) # Use lazy logging
|
||||
### Validation Commands
|
||||
```bash
|
||||
# Check specific integration
|
||||
python -m script.hassfest --integration my_integration
|
||||
python -m script.hassfest --integration-path homeassistant/components/my_integration
|
||||
|
||||
# Validate quality scale
|
||||
# Check quality_scale.yaml against current rules
|
||||
|
||||
3
.github/dependabot.yml
vendored
3
.github/dependabot.yml
vendored
@@ -6,3 +6,6 @@ updates:
|
||||
interval: daily
|
||||
time: "06:00"
|
||||
open-pull-requests-limit: 10
|
||||
labels:
|
||||
- dependency
|
||||
- github_actions
|
||||
|
||||
2
.github/workflows/builder.yml
vendored
2
.github/workflows/builder.yml
vendored
@@ -324,7 +324,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.9.1
|
||||
uses: sigstore/cosign-installer@v3.9.2
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.29.2
|
||||
uses: github/codeql-action/init@v3.29.4
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.29.2
|
||||
uses: github/codeql-action/analyze@v3.29.4
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@v1.1.0
|
||||
uses: actions/ai-inference@v1.2.3
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@v1.1.0
|
||||
uses: actions/ai-inference@v1.2.3
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
|
||||
@@ -377,6 +377,7 @@ homeassistant.components.onedrive.*
|
||||
homeassistant.components.onewire.*
|
||||
homeassistant.components.onkyo.*
|
||||
homeassistant.components.open_meteo.*
|
||||
homeassistant.components.open_router.*
|
||||
homeassistant.components.openai_conversation.*
|
||||
homeassistant.components.openexchangerates.*
|
||||
homeassistant.components.opensky.*
|
||||
@@ -535,6 +536,7 @@ homeassistant.components.unifiprotect.*
|
||||
homeassistant.components.upcloud.*
|
||||
homeassistant.components.update.*
|
||||
homeassistant.components.uptime.*
|
||||
homeassistant.components.uptime_kuma.*
|
||||
homeassistant.components.uptimerobot.*
|
||||
homeassistant.components.usb.*
|
||||
homeassistant.components.uvc.*
|
||||
|
||||
12
CODEOWNERS
generated
12
CODEOWNERS
generated
@@ -684,8 +684,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/husqvarna_automower/ @Thomas55555
|
||||
/homeassistant/components/husqvarna_automower_ble/ @alistair23
|
||||
/tests/components/husqvarna_automower_ble/ @alistair23
|
||||
/homeassistant/components/huum/ @frwickst
|
||||
/tests/components/huum/ @frwickst
|
||||
/homeassistant/components/huum/ @frwickst @vincentwolsink
|
||||
/tests/components/huum/ @frwickst @vincentwolsink
|
||||
/homeassistant/components/hvv_departures/ @vigonotion
|
||||
/tests/components/hvv_departures/ @vigonotion
|
||||
/homeassistant/components/hydrawise/ @dknowles2 @thomaskistler @ptcryan
|
||||
@@ -1102,6 +1102,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/onvif/ @hunterjm @jterrace
|
||||
/homeassistant/components/open_meteo/ @frenck
|
||||
/tests/components/open_meteo/ @frenck
|
||||
/homeassistant/components/open_router/ @joostlek
|
||||
/tests/components/open_router/ @joostlek
|
||||
/homeassistant/components/openai_conversation/ @balloob
|
||||
/tests/components/openai_conversation/ @balloob
|
||||
/homeassistant/components/openerz/ @misialq
|
||||
@@ -1658,6 +1660,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/upnp/ @StevenLooman
|
||||
/homeassistant/components/uptime/ @frenck
|
||||
/tests/components/uptime/ @frenck
|
||||
/homeassistant/components/uptime_kuma/ @tr4nt0r
|
||||
/tests/components/uptime_kuma/ @tr4nt0r
|
||||
/homeassistant/components/uptimerobot/ @ludeeus @chemelli74
|
||||
/tests/components/uptimerobot/ @ludeeus @chemelli74
|
||||
/homeassistant/components/usb/ @bdraco
|
||||
@@ -1756,8 +1760,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/wirelesstag/ @sergeymaysak
|
||||
/homeassistant/components/withings/ @joostlek
|
||||
/tests/components/withings/ @joostlek
|
||||
/homeassistant/components/wiz/ @sbidy
|
||||
/tests/components/wiz/ @sbidy
|
||||
/homeassistant/components/wiz/ @sbidy @arturpragacz
|
||||
/tests/components/wiz/ @sbidy @arturpragacz
|
||||
/homeassistant/components/wled/ @frenck
|
||||
/tests/components/wled/ @frenck
|
||||
/homeassistant/components/wmspro/ @mback2k
|
||||
|
||||
@@ -332,6 +332,9 @@ async def async_setup_hass(
|
||||
if not is_virtual_env():
|
||||
await async_mount_local_lib_path(runtime_config.config_dir)
|
||||
|
||||
if hass.config.safe_mode:
|
||||
_LOGGER.info("Starting in safe mode")
|
||||
|
||||
basic_setup_success = (
|
||||
await async_from_config_dict(config_dict, hass) is not None
|
||||
)
|
||||
@@ -384,8 +387,6 @@ async def async_setup_hass(
|
||||
{"recovery_mode": {}, "http": http_conf},
|
||||
hass,
|
||||
)
|
||||
elif hass.config.safe_mode:
|
||||
_LOGGER.info("Starting in safe mode")
|
||||
|
||||
if runtime_config.open_ui:
|
||||
hass.add_job(open_hass_ui, hass)
|
||||
@@ -694,10 +695,10 @@ async def async_mount_local_lib_path(config_dir: str) -> str:
|
||||
|
||||
def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
|
||||
"""Get domains of components to set up."""
|
||||
# Filter out the repeating and common config section [homeassistant]
|
||||
domains = {
|
||||
domain for key in config if (domain := cv.domain_key(key)) != core.DOMAIN
|
||||
}
|
||||
# The common config section [homeassistant] could be filtered here,
|
||||
# but that is not necessary, since it corresponds to the core integration,
|
||||
# that is always unconditionally loaded.
|
||||
domains = {cv.domain_key(key) for key in config}
|
||||
|
||||
# Add config entry and default domains
|
||||
if not hass.config.recovery_mode:
|
||||
@@ -725,34 +726,28 @@ async def _async_resolve_domains_and_preload(
|
||||
together with all their dependencies.
|
||||
"""
|
||||
domains_to_setup = _get_domains(hass, config)
|
||||
platform_integrations = conf_util.extract_platform_integrations(
|
||||
config, BASE_PLATFORMS
|
||||
)
|
||||
# Ensure base platforms that have platform integrations are added to `domains`,
|
||||
# so they can be setup first instead of discovering them later when a config
|
||||
# entry setup task notices that it's needed and there is already a long line
|
||||
# to use the import executor.
|
||||
|
||||
# Also process all base platforms since we do not require the manifest
|
||||
# to list them as dependencies.
|
||||
# We want to later avoid lock contention when multiple integrations try to load
|
||||
# their manifests at once.
|
||||
#
|
||||
# Additionally process integrations that are defined under base platforms
|
||||
# to speed things up.
|
||||
# For example if we have
|
||||
# sensor:
|
||||
# - platform: template
|
||||
#
|
||||
# `template` has to be loaded to validate the config for sensor
|
||||
# so we want to start loading `sensor` as soon as we know
|
||||
# it will be needed. The more platforms under `sensor:`, the longer
|
||||
# `template` has to be loaded to validate the config for sensor.
|
||||
# The more platforms under `sensor:`, the longer
|
||||
# it will take to finish setup for `sensor` because each of these
|
||||
# platforms has to be imported before we can validate the config.
|
||||
#
|
||||
# Thankfully we are migrating away from the platform pattern
|
||||
# so this will be less of a problem in the future.
|
||||
domains_to_setup.update(platform_integrations)
|
||||
|
||||
# Additionally process base platforms since we do not require the manifest
|
||||
# to list them as dependencies.
|
||||
# We want to later avoid lock contention when multiple integrations try to load
|
||||
# their manifests at once.
|
||||
# Also process integrations that are defined under base platforms
|
||||
# to speed things up.
|
||||
platform_integrations = conf_util.extract_platform_integrations(
|
||||
config, BASE_PLATFORMS
|
||||
)
|
||||
additional_domains_to_process = {
|
||||
*BASE_PLATFORMS,
|
||||
*chain.from_iterable(platform_integrations.values()),
|
||||
@@ -870,9 +865,9 @@ async def _async_set_up_integrations(
|
||||
domains = set(integrations) & all_domains
|
||||
|
||||
_LOGGER.info(
|
||||
"Domains to be set up: %s | %s",
|
||||
domains,
|
||||
all_domains - domains,
|
||||
"Domains to be set up: %s\nDependencies: %s",
|
||||
domains or "{}",
|
||||
(all_domains - domains) or "{}",
|
||||
)
|
||||
|
||||
async_set_domains_to_be_loaded(hass, all_domains)
|
||||
@@ -913,12 +908,13 @@ async def _async_set_up_integrations(
|
||||
stage_all_domains = stage_domains | stage_dep_domains
|
||||
|
||||
_LOGGER.info(
|
||||
"Setting up stage %s: %s | %s\nDependencies: %s | %s",
|
||||
"Setting up stage %s: %s; already set up: %s\n"
|
||||
"Dependencies: %s; already set up: %s",
|
||||
name,
|
||||
stage_domains,
|
||||
stage_domains_unfiltered - stage_domains,
|
||||
stage_dep_domains,
|
||||
stage_dep_domains_unfiltered - stage_dep_domains,
|
||||
(stage_domains_unfiltered - stage_domains) or "{}",
|
||||
stage_dep_domains or "{}",
|
||||
(stage_dep_domains_unfiltered - stage_dep_domains) or "{}",
|
||||
)
|
||||
|
||||
if timeout is None:
|
||||
|
||||
@@ -20,6 +20,7 @@ from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import UNDEFINED, ConfigType, UndefinedType
|
||||
|
||||
from .const import (
|
||||
ATTR_ATTACHMENTS,
|
||||
ATTR_INSTRUCTIONS,
|
||||
ATTR_REQUIRED,
|
||||
ATTR_STRUCTURE,
|
||||
@@ -92,6 +93,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
vol.Schema({str: STRUCTURE_FIELD_SCHEMA}),
|
||||
_validate_structure_fields,
|
||||
),
|
||||
vol.Optional(ATTR_ATTACHMENTS): vol.All(
|
||||
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
|
||||
),
|
||||
}
|
||||
),
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
|
||||
@@ -23,6 +23,7 @@ ATTR_INSTRUCTIONS: Final = "instructions"
|
||||
ATTR_TASK_NAME: Final = "task_name"
|
||||
ATTR_STRUCTURE: Final = "structure"
|
||||
ATTR_REQUIRED: Final = "required"
|
||||
ATTR_ATTACHMENTS: Final = "attachments"
|
||||
|
||||
DEFAULT_SYSTEM_PROMPT = (
|
||||
"You are a Home Assistant expert and help users with their tasks."
|
||||
@@ -34,3 +35,6 @@ class AITaskEntityFeature(IntFlag):
|
||||
|
||||
GENERATE_DATA = 1
|
||||
"""Generate data based on instructions."""
|
||||
|
||||
SUPPORT_ATTACHMENTS = 2
|
||||
"""Support attachments with generate data."""
|
||||
|
||||
@@ -13,7 +13,7 @@ from homeassistant.components.conversation import (
|
||||
)
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.chat_session import async_get_chat_session
|
||||
from homeassistant.helpers.chat_session import ChatSession
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@@ -56,12 +56,12 @@ class AITaskEntity(RestoreEntity):
|
||||
@contextlib.asynccontextmanager
|
||||
async def _async_get_ai_task_chat_log(
|
||||
self,
|
||||
session: ChatSession,
|
||||
task: GenDataTask,
|
||||
) -> AsyncGenerator[ChatLog]:
|
||||
"""Context manager used to manage the ChatLog used during an AI Task."""
|
||||
# pylint: disable-next=contextmanager-generator-missing-cleanup
|
||||
with (
|
||||
async_get_chat_session(self.hass) as session,
|
||||
async_get_chat_log(
|
||||
self.hass,
|
||||
session,
|
||||
@@ -79,19 +79,22 @@ class AITaskEntity(RestoreEntity):
|
||||
user_llm_prompt=DEFAULT_SYSTEM_PROMPT,
|
||||
)
|
||||
|
||||
chat_log.async_add_user_content(UserContent(task.instructions))
|
||||
chat_log.async_add_user_content(
|
||||
UserContent(task.instructions, attachments=task.attachments)
|
||||
)
|
||||
|
||||
yield chat_log
|
||||
|
||||
@final
|
||||
async def internal_async_generate_data(
|
||||
self,
|
||||
session: ChatSession,
|
||||
task: GenDataTask,
|
||||
) -> GenDataTaskResult:
|
||||
"""Run a gen data task."""
|
||||
self.__last_activity = dt_util.utcnow().isoformat()
|
||||
self.async_write_ha_state()
|
||||
async with self._async_get_ai_task_chat_log(task) as chat_log:
|
||||
async with self._async_get_ai_task_chat_log(session, task) as chat_log:
|
||||
return await self._async_generate_data(task, chat_log)
|
||||
|
||||
async def _async_generate_data(
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
{
|
||||
"domain": "ai_task",
|
||||
"name": "AI Task",
|
||||
"after_dependencies": ["camera"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["conversation"],
|
||||
"dependencies": ["conversation", "media_source"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ai_task",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal"
|
||||
|
||||
@@ -10,16 +10,24 @@ generate_data:
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
multiline: true
|
||||
entity_id:
|
||||
required: false
|
||||
selector:
|
||||
entity:
|
||||
domain: ai_task
|
||||
supported_features:
|
||||
- ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
filter:
|
||||
domain: ai_task
|
||||
supported_features:
|
||||
- ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
structure:
|
||||
advanced: true
|
||||
required: false
|
||||
example: '{ "name": { "selector": { "text": }, "description": "Name of the user", "required": "True" } } }, "age": { "selector": { "number": }, "description": "Age of the user" } }'
|
||||
selector:
|
||||
object:
|
||||
attachments:
|
||||
required: false
|
||||
selector:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
|
||||
@@ -19,6 +19,10 @@
|
||||
"structure": {
|
||||
"name": "Structured output",
|
||||
"description": "When set, the AI Task will output fields with this in structure. The structure is a dictionary where the keys are the field names and the values contain a 'description', a 'selector', and an optional 'required' field."
|
||||
},
|
||||
"attachments": {
|
||||
"name": "Attachments",
|
||||
"description": "List of files to attach for multi-modal AI analysis."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,16 +3,32 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.components import camera, conversation, media_source
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.chat_session import async_get_chat_session
|
||||
|
||||
from .const import DATA_COMPONENT, DATA_PREFERENCES, AITaskEntityFeature
|
||||
|
||||
|
||||
def _save_camera_snapshot(image: camera.Image) -> Path:
|
||||
"""Save camera snapshot to temp file."""
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="wb",
|
||||
suffix=mimetypes.guess_extension(image.content_type, False),
|
||||
delete=False,
|
||||
) as temp_file:
|
||||
temp_file.write(image.content)
|
||||
return Path(temp_file.name)
|
||||
|
||||
|
||||
async def async_generate_data(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
@@ -20,6 +36,7 @@ async def async_generate_data(
|
||||
entity_id: str | None = None,
|
||||
instructions: str,
|
||||
structure: vol.Schema | None = None,
|
||||
attachments: list[dict] | None = None,
|
||||
) -> GenDataTaskResult:
|
||||
"""Run a task in the AI Task integration."""
|
||||
if entity_id is None:
|
||||
@@ -37,13 +54,80 @@ async def async_generate_data(
|
||||
f"AI Task entity {entity_id} does not support generating data"
|
||||
)
|
||||
|
||||
return await entity.internal_async_generate_data(
|
||||
GenDataTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
structure=structure,
|
||||
# Resolve attachments
|
||||
resolved_attachments: list[conversation.Attachment] = []
|
||||
created_files: list[Path] = []
|
||||
|
||||
if (
|
||||
attachments
|
||||
and AITaskEntityFeature.SUPPORT_ATTACHMENTS not in entity.supported_features
|
||||
):
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support attachments"
|
||||
)
|
||||
|
||||
for attachment in attachments or []:
|
||||
media_content_id = attachment["media_content_id"]
|
||||
|
||||
# Special case for camera media sources
|
||||
if media_content_id.startswith("media-source://camera/"):
|
||||
# Extract entity_id from the media content ID
|
||||
entity_id = media_content_id.removeprefix("media-source://camera/")
|
||||
|
||||
# Get snapshot from camera
|
||||
image = await camera.async_get_image(hass, entity_id)
|
||||
|
||||
temp_filename = await hass.async_add_executor_job(
|
||||
_save_camera_snapshot, image
|
||||
)
|
||||
created_files.append(temp_filename)
|
||||
|
||||
resolved_attachments.append(
|
||||
conversation.Attachment(
|
||||
media_content_id=media_content_id,
|
||||
mime_type=image.content_type,
|
||||
path=temp_filename,
|
||||
)
|
||||
)
|
||||
else:
|
||||
# Handle regular media sources
|
||||
media = await media_source.async_resolve_media(hass, media_content_id, None)
|
||||
if media.path is None:
|
||||
raise HomeAssistantError(
|
||||
"Only local attachments are currently supported"
|
||||
)
|
||||
resolved_attachments.append(
|
||||
conversation.Attachment(
|
||||
media_content_id=media_content_id,
|
||||
mime_type=media.mime_type,
|
||||
path=media.path,
|
||||
)
|
||||
)
|
||||
|
||||
with async_get_chat_session(hass) as session:
|
||||
if created_files:
|
||||
|
||||
def cleanup_files() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
for file in created_files:
|
||||
file.unlink(missing_ok=True)
|
||||
|
||||
@callback
|
||||
def cleanup_files_callback() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
hass.async_add_executor_job(cleanup_files)
|
||||
|
||||
session.async_on_cleanup(cleanup_files_callback)
|
||||
|
||||
return await entity.internal_async_generate_data(
|
||||
session,
|
||||
GenDataTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
structure=structure,
|
||||
attachments=resolved_attachments or None,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
@@ -59,6 +143,9 @@ class GenDataTask:
|
||||
structure: vol.Schema | None = None
|
||||
"""Optional structure for the data to be generated."""
|
||||
|
||||
attachments: list[conversation.Attachment] | None = None
|
||||
"""List of attachments to go along the instructions."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return task as a string."""
|
||||
return f"<GenDataTask {self.name}: {id(self)}>"
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airgradient",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["airgradient==0.9.2"],
|
||||
"zeroconf": ["_airgradient._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -14,9 +14,9 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: todo
|
||||
docs-installation-instructions: todo
|
||||
docs-removal-instructions: todo
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
@@ -34,7 +34,7 @@ rules:
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: No options to configure
|
||||
docs-installation-parameters: todo
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
@@ -43,23 +43,19 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not require authentication.
|
||||
test-coverage: todo
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: todo
|
||||
comment: DHCP is still possible
|
||||
discovery:
|
||||
status: todo
|
||||
comment: DHCP is still possible
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
|
||||
@@ -45,9 +45,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirNowConfigEntry) -> bo
|
||||
# Store Entity and Initialize Platforms
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
# Listen for option changes
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
# Clean up unused device entries with no entities
|
||||
@@ -88,8 +85,3 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AirNowConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
@@ -13,7 +13,7 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
OptionsFlowWithReload,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -126,7 +126,7 @@ class AirNowConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return AirNowOptionsFlowHandler()
|
||||
|
||||
|
||||
class AirNowOptionsFlowHandler(OptionsFlow):
|
||||
class AirNowOptionsFlowHandler(OptionsFlowWithReload):
|
||||
"""Handle an options flow for AirNow."""
|
||||
|
||||
async def async_step_init(
|
||||
|
||||
@@ -6,6 +6,5 @@ CONF_RETURN_AVERAGE: Final = "return_average"
|
||||
CONF_CLIP_NEGATIVE: Final = "clip_negatives"
|
||||
DOMAIN: Final = "airq"
|
||||
MANUFACTURER: Final = "CorantGmbH"
|
||||
CONCENTRATION_GRAMS_PER_CUBIC_METER: Final = "g/m³"
|
||||
ACTIVITY_BECQUEREL_PER_CUBIC_METER: Final = "Bq/m³"
|
||||
UPDATE_INTERVAL: float = 10.0
|
||||
|
||||
@@ -4,9 +4,6 @@
|
||||
"health_index": {
|
||||
"default": "mdi:heart-pulse"
|
||||
},
|
||||
"absolute_humidity": {
|
||||
"default": "mdi:water"
|
||||
},
|
||||
"oxygen": {
|
||||
"default": "mdi:leaf"
|
||||
},
|
||||
|
||||
@@ -14,6 +14,7 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
@@ -28,10 +29,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import AirQConfigEntry, AirQCoordinator
|
||||
from .const import (
|
||||
ACTIVITY_BECQUEREL_PER_CUBIC_METER,
|
||||
CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||
)
|
||||
from .const import ACTIVITY_BECQUEREL_PER_CUBIC_METER
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -195,7 +193,7 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="humidity_abs",
|
||||
translation_key="absolute_humidity",
|
||||
device_class=SensorDeviceClass.ABSOLUTE_HUMIDITY,
|
||||
native_unit_of_measurement=CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("humidity_abs"),
|
||||
|
||||
@@ -93,9 +93,6 @@
|
||||
"health_index": {
|
||||
"name": "Health index"
|
||||
},
|
||||
"absolute_humidity": {
|
||||
"name": "Absolute humidity"
|
||||
},
|
||||
"hydrogen": {
|
||||
"name": "Hydrogen"
|
||||
},
|
||||
|
||||
@@ -45,6 +45,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
errors = {}
|
||||
await self.async_set_unique_id(user_input[CONF_ID])
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
try:
|
||||
await airthings.get_token(
|
||||
@@ -60,9 +62,6 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_ID])
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(title="Airthings", data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
|
||||
@@ -150,7 +150,7 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
entities = [
|
||||
AirthingsHeaterEnergySensor(
|
||||
AirthingsDeviceSensor(
|
||||
coordinator,
|
||||
airthings_device,
|
||||
SENSORS[sensor_types],
|
||||
@@ -162,7 +162,7 @@ async def async_setup_entry(
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class AirthingsHeaterEnergySensor(
|
||||
class AirthingsDeviceSensor(
|
||||
CoordinatorEntity[AirthingsDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
"""Representation of a Airthings Sensor device."""
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioairzone_cloud"],
|
||||
"requirements": ["aioairzone-cloud==0.6.12"]
|
||||
"requirements": ["aioairzone-cloud==0.7.0"]
|
||||
}
|
||||
|
||||
@@ -505,8 +505,13 @@ class ClimateCapabilities(AlexaEntity):
|
||||
):
|
||||
yield AlexaThermostatController(self.hass, self.entity)
|
||||
yield AlexaTemperatureSensor(self.hass, self.entity)
|
||||
if self.entity.domain == water_heater.DOMAIN and (
|
||||
supported_features & water_heater.WaterHeaterEntityFeature.OPERATION_MODE
|
||||
if (
|
||||
self.entity.domain == water_heater.DOMAIN
|
||||
and (
|
||||
supported_features
|
||||
& water_heater.WaterHeaterEntityFeature.OPERATION_MODE
|
||||
)
|
||||
and self.entity.attributes.get(water_heater.ATTR_OPERATION_LIST)
|
||||
):
|
||||
yield AlexaModeController(
|
||||
self.entity,
|
||||
@@ -634,7 +639,9 @@ class FanCapabilities(AlexaEntity):
|
||||
self.entity, instance=f"{fan.DOMAIN}.{fan.ATTR_OSCILLATING}"
|
||||
)
|
||||
force_range_controller = False
|
||||
if supported & fan.FanEntityFeature.PRESET_MODE:
|
||||
if supported & fan.FanEntityFeature.PRESET_MODE and self.entity.attributes.get(
|
||||
fan.ATTR_PRESET_MODES
|
||||
):
|
||||
yield AlexaModeController(
|
||||
self.entity, instance=f"{fan.DOMAIN}.{fan.ATTR_PRESET_MODE}"
|
||||
)
|
||||
@@ -672,7 +679,11 @@ class RemoteCapabilities(AlexaEntity):
|
||||
yield AlexaPowerController(self.entity)
|
||||
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
activities = self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST) or []
|
||||
if activities and supported & remote.RemoteEntityFeature.ACTIVITY:
|
||||
if (
|
||||
activities
|
||||
and (supported & remote.RemoteEntityFeature.ACTIVITY)
|
||||
and self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST)
|
||||
):
|
||||
yield AlexaModeController(
|
||||
self.entity, instance=f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}"
|
||||
)
|
||||
@@ -692,7 +703,9 @@ class HumidifierCapabilities(AlexaEntity):
|
||||
"""Yield the supported interfaces."""
|
||||
yield AlexaPowerController(self.entity)
|
||||
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
if supported & humidifier.HumidifierEntityFeature.MODES:
|
||||
if (
|
||||
supported & humidifier.HumidifierEntityFeature.MODES
|
||||
) and self.entity.attributes.get(humidifier.ATTR_AVAILABLE_MODES):
|
||||
yield AlexaModeController(
|
||||
self.entity, instance=f"{humidifier.DOMAIN}.{humidifier.ATTR_MODE}"
|
||||
)
|
||||
|
||||
@@ -6,7 +6,12 @@ from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from aioamazondevices.api import AmazonEchoApi
|
||||
from aioamazondevices.exceptions import CannotAuthenticate, CannotConnect, WrongCountry
|
||||
from aioamazondevices.exceptions import (
|
||||
CannotAuthenticate,
|
||||
CannotConnect,
|
||||
CannotRetrieveData,
|
||||
WrongCountry,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
@@ -57,6 +62,8 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except CannotAuthenticate:
|
||||
errors["base"] = "invalid_auth"
|
||||
except CannotRetrieveData:
|
||||
errors["base"] = "cannot_retrieve_data"
|
||||
except WrongCountry:
|
||||
errors["base"] = "wrong_country"
|
||||
else:
|
||||
@@ -106,6 +113,8 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except CannotAuthenticate:
|
||||
errors["base"] = "invalid_auth"
|
||||
except CannotRetrieveData:
|
||||
errors["base"] = "cannot_retrieve_data"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
|
||||
@@ -52,8 +52,18 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
try:
|
||||
await self.api.login_mode_stored_data()
|
||||
return await self.api.get_devices_data()
|
||||
except (CannotConnect, CannotRetrieveData) as err:
|
||||
raise UpdateFailed(f"Error occurred while updating {self.name}") from err
|
||||
except CannotConnect as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect_with_error",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except CannotRetrieveData as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_retrieve_data_with_error",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except CannotAuthenticate as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aioamazondevices==3.2.3"]
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioamazondevices==3.5.1"]
|
||||
}
|
||||
|
||||
@@ -28,33 +28,31 @@ rules:
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: all tests missing
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Network information not relevant
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: There are a ton of mac address ranges in use, but also by kindles which are not supported by this integration
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
docs-use-cases: done
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
|
||||
@@ -43,6 +43,7 @@
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"cannot_retrieve_data": "Unable to retrieve data from Amazon. Please try again later.",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"wrong_country": "Wrong country selected. Please select the country where your Amazon account is registered.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
@@ -84,10 +85,10 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"cannot_connect": {
|
||||
"cannot_connect_with_error": {
|
||||
"message": "Error connecting: {error}"
|
||||
},
|
||||
"cannot_retrieve_data": {
|
||||
"cannot_retrieve_data_with_error": {
|
||||
"message": "Error retrieving data: {error}"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,14 +26,14 @@ def alexa_api_call[_T: AmazonEntity, **_P](
|
||||
self.coordinator.last_update_success = False
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
translation_key="cannot_connect_with_error",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except CannotRetrieveData as err:
|
||||
self.coordinator.last_update_success = False
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_retrieve_data",
|
||||
translation_key="cannot_retrieve_data_with_error",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
|
||||
|
||||
@@ -2,11 +2,22 @@
|
||||
|
||||
import amberelectric
|
||||
|
||||
from homeassistant.components.sensor import ConfigType
|
||||
from homeassistant.const import CONF_API_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import CONF_SITE_ID, PLATFORMS
|
||||
from .const import CONF_SITE_ID, DOMAIN, PLATFORMS
|
||||
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
|
||||
from .services import setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Amber component."""
|
||||
setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AmberConfigEntry) -> bool:
|
||||
|
||||
@@ -1,14 +1,24 @@
|
||||
"""Amber Electric Constants."""
|
||||
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN = "amberelectric"
|
||||
DOMAIN: Final = "amberelectric"
|
||||
CONF_SITE_NAME = "site_name"
|
||||
CONF_SITE_ID = "site_id"
|
||||
|
||||
ATTR_CONFIG_ENTRY_ID = "config_entry_id"
|
||||
ATTR_CHANNEL_TYPE = "channel_type"
|
||||
|
||||
ATTRIBUTION = "Data provided by Amber Electric"
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
|
||||
SERVICE_GET_FORECASTS = "get_forecasts"
|
||||
|
||||
GENERAL_CHANNEL = "general"
|
||||
CONTROLLED_LOAD_CHANNEL = "controlled_load"
|
||||
FEED_IN_CHANNEL = "feed_in"
|
||||
|
||||
@@ -10,7 +10,6 @@ from amberelectric.models.actual_interval import ActualInterval
|
||||
from amberelectric.models.channel import ChannelType
|
||||
from amberelectric.models.current_interval import CurrentInterval
|
||||
from amberelectric.models.forecast_interval import ForecastInterval
|
||||
from amberelectric.models.price_descriptor import PriceDescriptor
|
||||
from amberelectric.rest import ApiException
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -18,6 +17,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import LOGGER
|
||||
from .helpers import normalize_descriptor
|
||||
|
||||
type AmberConfigEntry = ConfigEntry[AmberUpdateCoordinator]
|
||||
|
||||
@@ -49,27 +49,6 @@ def is_feed_in(interval: ActualInterval | CurrentInterval | ForecastInterval) ->
|
||||
return interval.channel_type == ChannelType.FEEDIN
|
||||
|
||||
|
||||
def normalize_descriptor(descriptor: PriceDescriptor | None) -> str | None:
|
||||
"""Return the snake case versions of descriptor names. Returns None if the name is not recognized."""
|
||||
if descriptor is None:
|
||||
return None
|
||||
if descriptor.value == "spike":
|
||||
return "spike"
|
||||
if descriptor.value == "high":
|
||||
return "high"
|
||||
if descriptor.value == "neutral":
|
||||
return "neutral"
|
||||
if descriptor.value == "low":
|
||||
return "low"
|
||||
if descriptor.value == "veryLow":
|
||||
return "very_low"
|
||||
if descriptor.value == "extremelyLow":
|
||||
return "extremely_low"
|
||||
if descriptor.value == "negative":
|
||||
return "negative"
|
||||
return None
|
||||
|
||||
|
||||
class AmberUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""AmberUpdateCoordinator - In charge of downloading the data for a site, which all the sensors read."""
|
||||
|
||||
@@ -103,7 +82,7 @@ class AmberUpdateCoordinator(DataUpdateCoordinator):
|
||||
"grid": {},
|
||||
}
|
||||
try:
|
||||
data = self._api.get_current_prices(self.site_id, next=48)
|
||||
data = self._api.get_current_prices(self.site_id, next=288)
|
||||
intervals = [interval.actual_instance for interval in data]
|
||||
except ApiException as api_exception:
|
||||
raise UpdateFailed("Missing price data, skipping update") from api_exception
|
||||
|
||||
25
homeassistant/components/amberelectric/helpers.py
Normal file
25
homeassistant/components/amberelectric/helpers.py
Normal file
@@ -0,0 +1,25 @@
|
||||
"""Formatting helpers used to convert things."""
|
||||
|
||||
from amberelectric.models.price_descriptor import PriceDescriptor
|
||||
|
||||
DESCRIPTOR_MAP: dict[str, str] = {
|
||||
PriceDescriptor.SPIKE: "spike",
|
||||
PriceDescriptor.HIGH: "high",
|
||||
PriceDescriptor.NEUTRAL: "neutral",
|
||||
PriceDescriptor.LOW: "low",
|
||||
PriceDescriptor.VERYLOW: "very_low",
|
||||
PriceDescriptor.EXTREMELYLOW: "extremely_low",
|
||||
PriceDescriptor.NEGATIVE: "negative",
|
||||
}
|
||||
|
||||
|
||||
def normalize_descriptor(descriptor: PriceDescriptor | None) -> str | None:
|
||||
"""Return the snake case versions of descriptor names. Returns None if the name is not recognized."""
|
||||
if descriptor in DESCRIPTOR_MAP:
|
||||
return DESCRIPTOR_MAP[descriptor]
|
||||
return None
|
||||
|
||||
|
||||
def format_cents_to_dollars(cents: float) -> float:
|
||||
"""Return a formatted conversion from cents to dollars."""
|
||||
return round(cents / 100, 2)
|
||||
@@ -22,5 +22,10 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"get_forecasts": {
|
||||
"service": "mdi:transmission-tower"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,16 +23,12 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import ATTRIBUTION
|
||||
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator, normalize_descriptor
|
||||
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
|
||||
from .helpers import format_cents_to_dollars, normalize_descriptor
|
||||
|
||||
UNIT = f"{CURRENCY_DOLLAR}/{UnitOfEnergy.KILO_WATT_HOUR}"
|
||||
|
||||
|
||||
def format_cents_to_dollars(cents: float) -> float:
|
||||
"""Return a formatted conversion from cents to dollars."""
|
||||
return round(cents / 100, 2)
|
||||
|
||||
|
||||
def friendly_channel_type(channel_type: str) -> str:
|
||||
"""Return a human readable version of the channel type."""
|
||||
if channel_type == "controlled_load":
|
||||
|
||||
121
homeassistant/components/amberelectric/services.py
Normal file
121
homeassistant/components/amberelectric/services.py
Normal file
@@ -0,0 +1,121 @@
|
||||
"""Amber Electric Service class."""
|
||||
|
||||
from amberelectric.models.channel import ChannelType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
from homeassistant.util.json import JsonValueType
|
||||
|
||||
from .const import (
|
||||
ATTR_CHANNEL_TYPE,
|
||||
ATTR_CONFIG_ENTRY_ID,
|
||||
CONTROLLED_LOAD_CHANNEL,
|
||||
DOMAIN,
|
||||
FEED_IN_CHANNEL,
|
||||
GENERAL_CHANNEL,
|
||||
SERVICE_GET_FORECASTS,
|
||||
)
|
||||
from .coordinator import AmberConfigEntry
|
||||
from .helpers import format_cents_to_dollars, normalize_descriptor
|
||||
|
||||
GET_FORECASTS_SCHEMA = vol.Schema(
|
||||
{
|
||||
ATTR_CONFIG_ENTRY_ID: ConfigEntrySelector({"integration": DOMAIN}),
|
||||
ATTR_CHANNEL_TYPE: vol.In(
|
||||
[GENERAL_CHANNEL, CONTROLLED_LOAD_CHANNEL, FEED_IN_CHANNEL]
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def async_get_entry(hass: HomeAssistant, config_entry_id: str) -> AmberConfigEntry:
|
||||
"""Get the Amber config entry."""
|
||||
if not (entry := hass.config_entries.async_get_entry(config_entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": config_entry_id},
|
||||
)
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_loaded",
|
||||
translation_placeholders={"target": entry.title},
|
||||
)
|
||||
return entry
|
||||
|
||||
|
||||
def get_forecasts(channel_type: str, data: dict) -> list[JsonValueType]:
|
||||
"""Return an array of forecasts."""
|
||||
results: list[JsonValueType] = []
|
||||
|
||||
if channel_type not in data["forecasts"]:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="channel_not_found",
|
||||
translation_placeholders={"channel_type": channel_type},
|
||||
)
|
||||
|
||||
intervals = data["forecasts"][channel_type]
|
||||
|
||||
for interval in intervals:
|
||||
datum = {}
|
||||
datum["duration"] = interval.duration
|
||||
datum["date"] = interval.var_date.isoformat()
|
||||
datum["nem_date"] = interval.nem_time.isoformat()
|
||||
datum["per_kwh"] = format_cents_to_dollars(interval.per_kwh)
|
||||
if interval.channel_type == ChannelType.FEEDIN:
|
||||
datum["per_kwh"] = datum["per_kwh"] * -1
|
||||
datum["spot_per_kwh"] = format_cents_to_dollars(interval.spot_per_kwh)
|
||||
datum["start_time"] = interval.start_time.isoformat()
|
||||
datum["end_time"] = interval.end_time.isoformat()
|
||||
datum["renewables"] = round(interval.renewables)
|
||||
datum["spike_status"] = interval.spike_status.value
|
||||
datum["descriptor"] = normalize_descriptor(interval.descriptor)
|
||||
|
||||
if interval.range is not None:
|
||||
datum["range_min"] = format_cents_to_dollars(interval.range.min)
|
||||
datum["range_max"] = format_cents_to_dollars(interval.range.max)
|
||||
|
||||
if interval.advanced_price is not None:
|
||||
multiplier = -1 if interval.channel_type == ChannelType.FEEDIN else 1
|
||||
datum["advanced_price_low"] = multiplier * format_cents_to_dollars(
|
||||
interval.advanced_price.low
|
||||
)
|
||||
datum["advanced_price_predicted"] = multiplier * format_cents_to_dollars(
|
||||
interval.advanced_price.predicted
|
||||
)
|
||||
datum["advanced_price_high"] = multiplier * format_cents_to_dollars(
|
||||
interval.advanced_price.high
|
||||
)
|
||||
|
||||
results.append(datum)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Amber integration."""
|
||||
|
||||
async def handle_get_forecasts(call: ServiceCall) -> ServiceResponse:
|
||||
channel_type = call.data[ATTR_CHANNEL_TYPE]
|
||||
entry = async_get_entry(hass, call.data[ATTR_CONFIG_ENTRY_ID])
|
||||
coordinator = entry.runtime_data
|
||||
forecasts = get_forecasts(channel_type, coordinator.data)
|
||||
return {"forecasts": forecasts}
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GET_FORECASTS,
|
||||
handle_get_forecasts,
|
||||
GET_FORECASTS_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
16
homeassistant/components/amberelectric/services.yaml
Normal file
16
homeassistant/components/amberelectric/services.yaml
Normal file
@@ -0,0 +1,16 @@
|
||||
get_forecasts:
|
||||
fields:
|
||||
config_entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: amberelectric
|
||||
channel_type:
|
||||
required: true
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- general
|
||||
- controlled_load
|
||||
- feed_in
|
||||
translation_key: channel_type
|
||||
@@ -1,25 +1,61 @@
|
||||
{
|
||||
"config": {
|
||||
"error": {
|
||||
"invalid_api_token": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"no_site": "No site provided",
|
||||
"unknown_error": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"site": {
|
||||
"data": {
|
||||
"site_id": "Site NMI",
|
||||
"site_name": "Site name"
|
||||
},
|
||||
"description": "Select the NMI of the site you would like to add"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"api_token": "[%key:common::config_flow::data::api_token%]",
|
||||
"site_id": "Site ID"
|
||||
},
|
||||
"description": "Go to {api_url} to generate an API key"
|
||||
},
|
||||
"site": {
|
||||
"data": {
|
||||
"site_id": "Site NMI",
|
||||
"site_name": "Site Name"
|
||||
},
|
||||
"description": "Select the NMI of the site you would like to add"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"get_forecasts": {
|
||||
"name": "Get price forecasts",
|
||||
"description": "Retrieves price forecasts from Amber Electric for a site.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"description": "The config entry of the site to get forecasts for.",
|
||||
"name": "Config entry"
|
||||
},
|
||||
"channel_type": {
|
||||
"name": "Channel type",
|
||||
"description": "The channel to get forecasts for."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"integration_not_found": {
|
||||
"message": "Config entry \"{target}\" not found in registry."
|
||||
},
|
||||
"error": {
|
||||
"invalid_api_token": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"no_site": "No site provided",
|
||||
"unknown_error": "[%key:common::config_flow::error::unknown%]"
|
||||
"not_loaded": {
|
||||
"message": "{target} is not loaded."
|
||||
},
|
||||
"channel_not_found": {
|
||||
"message": "There is no {channel_type} channel at this site."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"channel_type": {
|
||||
"options": {
|
||||
"general": "General",
|
||||
"controlled_load": "Controlled load",
|
||||
"feed_in": "Feed-in"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["amcrest"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["amcrest==1.9.8"]
|
||||
"requirements": ["amcrest==1.9.9"]
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .analytics import Analytics
|
||||
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, INTERVAL, PREFERENCE_SCHEMA
|
||||
from .http import AnalyticsDevicesView
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
@@ -55,6 +56,8 @@ async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool:
|
||||
websocket_api.async_register_command(hass, websocket_analytics)
|
||||
websocket_api.async_register_command(hass, websocket_analytics_preferences)
|
||||
|
||||
hass.http.register_view(AnalyticsDevicesView)
|
||||
|
||||
hass.data[DATA_COMPONENT] = analytics
|
||||
return True
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ from homeassistant.config_entries import SOURCE_IGNORE
|
||||
from homeassistant.const import ATTR_DOMAIN, BASE_PLATFORMS, __version__ as HA_VERSION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.storage import Store
|
||||
@@ -77,6 +77,11 @@ from .const import (
|
||||
)
|
||||
|
||||
|
||||
def gen_uuid() -> str:
|
||||
"""Generate a new UUID."""
|
||||
return uuid.uuid4().hex
|
||||
|
||||
|
||||
@dataclass
|
||||
class AnalyticsData:
|
||||
"""Analytics data."""
|
||||
@@ -184,7 +189,7 @@ class Analytics:
|
||||
return
|
||||
|
||||
if self._data.uuid is None:
|
||||
self._data.uuid = uuid.uuid4().hex
|
||||
self._data.uuid = gen_uuid()
|
||||
await self._store.async_save(dataclass_asdict(self._data))
|
||||
|
||||
if self.supervisor:
|
||||
@@ -381,3 +386,83 @@ def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
|
||||
).values():
|
||||
domains.update(platforms)
|
||||
return domains
|
||||
|
||||
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
"""Return the devices payload."""
|
||||
integrations_without_model_id: set[str] = set()
|
||||
devices: list[dict[str, Any]] = []
|
||||
dev_reg = dr.async_get(hass)
|
||||
# Devices that need via device info set
|
||||
new_indexes: dict[str, int] = {}
|
||||
via_devices: dict[str, str] = {}
|
||||
|
||||
seen_integrations = set()
|
||||
|
||||
for device in dev_reg.devices.values():
|
||||
# Ignore services
|
||||
if device.entry_type:
|
||||
continue
|
||||
|
||||
if not device.primary_config_entry:
|
||||
continue
|
||||
|
||||
config_entry = hass.config_entries.async_get_entry(device.primary_config_entry)
|
||||
|
||||
if config_entry is None:
|
||||
continue
|
||||
|
||||
seen_integrations.add(config_entry.domain)
|
||||
|
||||
if not device.model_id:
|
||||
integrations_without_model_id.add(config_entry.domain)
|
||||
continue
|
||||
|
||||
if not device.manufacturer:
|
||||
continue
|
||||
|
||||
new_indexes[device.id] = len(devices)
|
||||
devices.append(
|
||||
{
|
||||
"integration": config_entry.domain,
|
||||
"manufacturer": device.manufacturer,
|
||||
"model_id": device.model_id,
|
||||
"model": device.model,
|
||||
"sw_version": device.sw_version,
|
||||
"hw_version": device.hw_version,
|
||||
"has_suggested_area": device.suggested_area is not None,
|
||||
"has_configuration_url": device.configuration_url is not None,
|
||||
"via_device": None,
|
||||
}
|
||||
)
|
||||
if device.via_device_id:
|
||||
via_devices[device.id] = device.via_device_id
|
||||
|
||||
for from_device, via_device in via_devices.items():
|
||||
if via_device not in new_indexes:
|
||||
continue
|
||||
devices[new_indexes[from_device]]["via_device"] = new_indexes[via_device]
|
||||
|
||||
integrations = {
|
||||
domain: integration
|
||||
for domain, integration in (
|
||||
await async_get_integrations(hass, seen_integrations)
|
||||
).items()
|
||||
if isinstance(integration, Integration)
|
||||
}
|
||||
|
||||
for device_info in devices:
|
||||
if integration := integrations.get(device_info["integration"]):
|
||||
device_info["is_custom_integration"] = not integration.is_built_in
|
||||
|
||||
return {
|
||||
"version": "home-assistant:1",
|
||||
"no_model_id": sorted(
|
||||
[
|
||||
domain
|
||||
for domain in integrations_without_model_id
|
||||
if domain in integrations and integrations[domain].is_built_in
|
||||
]
|
||||
),
|
||||
"devices": devices,
|
||||
}
|
||||
|
||||
27
homeassistant/components/analytics/http.py
Normal file
27
homeassistant/components/analytics/http.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""HTTP endpoints for analytics integration."""
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .analytics import async_devices_payload
|
||||
|
||||
|
||||
class AnalyticsDevicesView(HomeAssistantView):
|
||||
"""View to handle analytics devices payload download requests."""
|
||||
|
||||
url = "/api/analytics/devices"
|
||||
name = "api:analytics:devices"
|
||||
|
||||
@require_admin
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
"""Return analytics devices payload as JSON."""
|
||||
hass: HomeAssistant = request.app[KEY_HASS]
|
||||
payload = await async_devices_payload(hass)
|
||||
return self.json(
|
||||
payload,
|
||||
headers={
|
||||
"Content-Disposition": "attachment; filename=analytics_devices.json"
|
||||
},
|
||||
)
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Analytics",
|
||||
"after_dependencies": ["energy", "hassio", "recorder"],
|
||||
"codeowners": ["@home-assistant/core", "@ludeeus"],
|
||||
"dependencies": ["api", "websocket_api"],
|
||||
"dependencies": ["api", "websocket_api", "http"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/analytics",
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
|
||||
@@ -55,7 +55,6 @@ async def async_setup_entry(
|
||||
entry.runtime_data = AnalyticsInsightsData(coordinator=coordinator, names=names)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
|
||||
return True
|
||||
|
||||
@@ -65,10 +64,3 @@ async def async_unload_entry(
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def update_listener(
|
||||
hass: HomeAssistant, entry: AnalyticsInsightsConfigEntry
|
||||
) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
@@ -11,7 +11,11 @@ from python_homeassistant_analytics import (
|
||||
from python_homeassistant_analytics.models import Environment, IntegrationType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
||||
from homeassistant.config_entries import (
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlowWithReload,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
@@ -129,7 +133,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
|
||||
class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlow):
|
||||
class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithReload):
|
||||
"""Handle Homeassistant Analytics options."""
|
||||
|
||||
async def async_step_init(
|
||||
|
||||
@@ -68,7 +68,6 @@ async def async_setup_entry(
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop)
|
||||
)
|
||||
entry.async_on_unload(entry.add_update_listener(async_update_options))
|
||||
entry.async_on_unload(api.disconnect)
|
||||
|
||||
return True
|
||||
@@ -80,13 +79,3 @@ async def async_unload_entry(
|
||||
"""Unload a config entry."""
|
||||
_LOGGER.debug("async_unload_entry: %s", entry.data)
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_update_options(
|
||||
hass: HomeAssistant, entry: AndroidTVRemoteConfigEntry
|
||||
) -> None:
|
||||
"""Handle options update."""
|
||||
_LOGGER.debug(
|
||||
"async_update_options: data: %s options: %s", entry.data, entry.options
|
||||
)
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
@@ -19,7 +19,7 @@ from homeassistant.config_entries import (
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
OptionsFlowWithReload,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME
|
||||
from homeassistant.core import callback
|
||||
@@ -116,10 +116,10 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
pin = user_input["pin"]
|
||||
await self.api.async_finish_pairing(pin)
|
||||
if self.source == SOURCE_REAUTH:
|
||||
await self.hass.config_entries.async_reload(
|
||||
self._get_reauth_entry().entry_id
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), reload_even_if_entry_is_unchanged=True
|
||||
)
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self.name,
|
||||
data={
|
||||
@@ -243,7 +243,7 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return AndroidTVRemoteOptionsFlowHandler(config_entry)
|
||||
|
||||
|
||||
class AndroidTVRemoteOptionsFlowHandler(OptionsFlow):
|
||||
class AndroidTVRemoteOptionsFlowHandler(OptionsFlowWithReload):
|
||||
"""Android TV Remote options flow."""
|
||||
|
||||
def __init__(self, config_entry: AndroidTVRemoteConfigEntry) -> None:
|
||||
|
||||
@@ -27,4 +27,4 @@ def create_api(hass: HomeAssistant, host: str, enable_ime: bool) -> AndroidTVRem
|
||||
|
||||
def get_enable_ime(entry: AndroidTVRemoteConfigEntry) -> bool:
|
||||
"""Get value of enable_ime option or its default value."""
|
||||
return entry.options.get(CONF_ENABLE_IME, CONF_ENABLE_IME_DEFAULT_VALUE)
|
||||
return entry.options.get(CONF_ENABLE_IME, CONF_ENABLE_IME_DEFAULT_VALUE) # type: ignore[no-any-return]
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"cannot_receive_deviceinfo": "Failed to retrieve MAC Address. Make sure the device is turned on"
|
||||
"cannot_receive_deviceinfo": "Failed to retrieve MAC address. Make sure the device is turned on"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
|
||||
@@ -10,9 +10,9 @@ DEFAULT_CONVERSATION_NAME = "Claude conversation"
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_PROMPT = "prompt"
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
RECOMMENDED_CHAT_MODEL = "claude-3-haiku-20240307"
|
||||
RECOMMENDED_CHAT_MODEL = "claude-3-5-haiku-latest"
|
||||
CONF_MAX_TOKENS = "max_tokens"
|
||||
RECOMMENDED_MAX_TOKENS = 1024
|
||||
RECOMMENDED_MAX_TOKENS = 3000
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_THINKING_BUDGET = "thinking_budget"
|
||||
|
||||
@@ -6,7 +6,6 @@ from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import intent
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AnthropicConfigEntry
|
||||
@@ -72,13 +71,4 @@ class AnthropicConversationEntity(
|
||||
|
||||
await self._async_handle_chat_log(chat_log)
|
||||
|
||||
response_content = chat_log.content[-1]
|
||||
if not isinstance(response_content, conversation.AssistantContent):
|
||||
raise TypeError("Last message must be an assistant message")
|
||||
intent_response = intent.IntentResponse(language=user_input.language)
|
||||
intent_response.async_set_speech(response_content.content or "")
|
||||
return conversation.ConversationResult(
|
||||
response=intent_response,
|
||||
conversation_id=chat_log.conversation_id,
|
||||
continue_conversation=chat_log.continue_conversation,
|
||||
)
|
||||
return conversation.async_get_result_from_chat_log(user_input, chat_log)
|
||||
|
||||
@@ -311,11 +311,13 @@ def _create_token_stats(
|
||||
class AnthropicBaseLLMEntity(Entity):
|
||||
"""Anthropic base LLM entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, entry: AnthropicConfigEntry, subentry: ConfigSubentry) -> None:
|
||||
"""Initialize the entity."""
|
||||
self.entry = entry
|
||||
self.subentry = subentry
|
||||
self._attr_name = subentry.title
|
||||
self._attr_unique_id = subentry.subentry_id
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
"set_options": {
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"prompt": "Instructions",
|
||||
"prompt": "[%key:common::config_flow::data::prompt%]",
|
||||
"chat_model": "[%key:common::generic::model%]",
|
||||
"max_tokens": "Maximum tokens to return in response",
|
||||
"temperature": "Temperature",
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apple_tv",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyatv", "srptools"],
|
||||
"requirements": ["pyatv==0.16.0"],
|
||||
"requirements": ["pyatv==0.16.1"],
|
||||
"zeroconf": [
|
||||
"_mediaremotetv._tcp.local.",
|
||||
"_companion-link._tcp.local.",
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/arcam_fmj",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["arcam"],
|
||||
"requirements": ["arcam-fmj==1.8.1"],
|
||||
"requirements": ["arcam-fmj==1.8.2"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||
|
||||
@@ -38,8 +38,6 @@ from .pipeline import (
|
||||
async_create_default_pipeline,
|
||||
async_get_pipeline,
|
||||
async_get_pipelines,
|
||||
async_migrate_engine,
|
||||
async_run_migrations,
|
||||
async_setup_pipeline_store,
|
||||
async_update_pipeline,
|
||||
)
|
||||
@@ -61,7 +59,6 @@ __all__ = (
|
||||
"WakeWordSettings",
|
||||
"async_create_default_pipeline",
|
||||
"async_get_pipelines",
|
||||
"async_migrate_engine",
|
||||
"async_pipeline_from_audio_stream",
|
||||
"async_setup",
|
||||
"async_update_pipeline",
|
||||
@@ -87,7 +84,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass.data[DATA_LAST_WAKE_UP] = {}
|
||||
|
||||
await async_setup_pipeline_store(hass)
|
||||
await async_run_migrations(hass)
|
||||
async_register_websocket_api(hass)
|
||||
|
||||
return True
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
DOMAIN = "assist_pipeline"
|
||||
|
||||
DATA_CONFIG = f"{DOMAIN}.config"
|
||||
DATA_MIGRATIONS = f"{DOMAIN}_migrations"
|
||||
|
||||
DEFAULT_PIPELINE_TIMEOUT = 60 * 5 # seconds
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ from pathlib import Path
|
||||
from queue import Empty, Queue
|
||||
from threading import Thread
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Any, Literal, cast
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
import wave
|
||||
|
||||
import hass_nabucasa
|
||||
@@ -49,7 +49,6 @@ from .const import (
|
||||
CONF_DEBUG_RECORDING_DIR,
|
||||
DATA_CONFIG,
|
||||
DATA_LAST_WAKE_UP,
|
||||
DATA_MIGRATIONS,
|
||||
DOMAIN,
|
||||
MS_PER_CHUNK,
|
||||
SAMPLE_CHANNELS,
|
||||
@@ -2059,50 +2058,6 @@ async def async_setup_pipeline_store(hass: HomeAssistant) -> PipelineData:
|
||||
return PipelineData(pipeline_store)
|
||||
|
||||
|
||||
@callback
|
||||
def async_migrate_engine(
|
||||
hass: HomeAssistant,
|
||||
engine_type: Literal["conversation", "stt", "tts", "wake_word"],
|
||||
old_value: str,
|
||||
new_value: str,
|
||||
) -> None:
|
||||
"""Register a migration of an engine used in pipelines."""
|
||||
hass.data.setdefault(DATA_MIGRATIONS, {})[engine_type] = (old_value, new_value)
|
||||
|
||||
# Run migrations when config is already loaded
|
||||
if DATA_CONFIG in hass.data:
|
||||
hass.async_create_background_task(
|
||||
async_run_migrations(hass), "assist_pipeline_migration", eager_start=True
|
||||
)
|
||||
|
||||
|
||||
async def async_run_migrations(hass: HomeAssistant) -> None:
|
||||
"""Run pipeline migrations."""
|
||||
if not (migrations := hass.data.get(DATA_MIGRATIONS)):
|
||||
return
|
||||
|
||||
engine_attr = {
|
||||
"conversation": "conversation_engine",
|
||||
"stt": "stt_engine",
|
||||
"tts": "tts_engine",
|
||||
"wake_word": "wake_word_entity",
|
||||
}
|
||||
|
||||
updates = []
|
||||
|
||||
for pipeline in async_get_pipelines(hass):
|
||||
attr_updates = {}
|
||||
for engine_type, (old_value, new_value) in migrations.items():
|
||||
if getattr(pipeline, engine_attr[engine_type]) == old_value:
|
||||
attr_updates[engine_attr[engine_type]] = new_value
|
||||
|
||||
if attr_updates:
|
||||
updates.append((pipeline, attr_updates))
|
||||
|
||||
for pipeline, attr_updates in updates:
|
||||
await async_update_pipeline(hass, pipeline, **attr_updates)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PipelineConversationData:
|
||||
"""Hold data for the duration of a conversation."""
|
||||
|
||||
@@ -68,9 +68,10 @@ ask_question:
|
||||
required: true
|
||||
selector:
|
||||
entity:
|
||||
domain: assist_satellite
|
||||
supported_features:
|
||||
- assist_satellite.AssistSatelliteEntityFeature.START_CONVERSATION
|
||||
filter:
|
||||
domain: assist_satellite
|
||||
supported_features:
|
||||
- assist_satellite.AssistSatelliteEntityFeature.START_CONVERSATION
|
||||
question:
|
||||
required: false
|
||||
example: "What kind of music would you like to play?"
|
||||
|
||||
@@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.6.0"]
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==3.1.0"]
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ from datetime import timedelta
|
||||
import logging
|
||||
|
||||
API_CO2 = "carbon_dioxide"
|
||||
API_DEW_POINT = "dew_point"
|
||||
API_DUST = "dust"
|
||||
API_HUMID = "humidity"
|
||||
API_LUX = "illuminance"
|
||||
|
||||
@@ -34,6 +34,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import (
|
||||
API_CO2,
|
||||
API_DEW_POINT,
|
||||
API_DUST,
|
||||
API_HUMID,
|
||||
API_LUX,
|
||||
@@ -110,6 +111,15 @@ SENSOR_TYPES: tuple[AwairSensorEntityDescription, ...] = (
|
||||
unique_id_tag="CO2", # matches legacy format
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
AwairSensorEntityDescription(
|
||||
key=API_DEW_POINT,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
translation_key="dew_point",
|
||||
unique_id_tag="dew_point",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
SENSOR_TYPES_DUST: tuple[AwairSensorEntityDescription, ...] = (
|
||||
|
||||
@@ -57,6 +57,9 @@
|
||||
},
|
||||
"sound_level": {
|
||||
"name": "Sound level"
|
||||
},
|
||||
"dew_point": {
|
||||
"name": "Dew point"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,7 +30,9 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: AxisConfigEntry)
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
hub.setup()
|
||||
|
||||
config_entry.add_update_listener(hub.async_new_address_callback)
|
||||
config_entry.async_on_unload(
|
||||
config_entry.add_update_listener(hub.async_new_address_callback)
|
||||
)
|
||||
config_entry.async_on_unload(hub.teardown)
|
||||
config_entry.async_on_unload(
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, hub.shutdown)
|
||||
|
||||
1
homeassistant/components/bauknecht/__init__.py
Normal file
1
homeassistant/components/bauknecht/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Bauknecht virtual integration."""
|
||||
6
homeassistant/components/bauknecht/manifest.json
Normal file
6
homeassistant/components/bauknecht/manifest.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "bauknecht",
|
||||
"name": "Bauknecht",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "whirlpool"
|
||||
}
|
||||
@@ -15,23 +15,31 @@ from bluecurrent_api.exceptions import (
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_NAME, CONF_API_TOKEN, Platform
|
||||
from homeassistant.const import CONF_API_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
from .const import DOMAIN, EVSE_ID, LOGGER, MODEL_TYPE
|
||||
from .const import (
|
||||
CHARGEPOINT_SETTINGS,
|
||||
CHARGEPOINT_STATUS,
|
||||
DOMAIN,
|
||||
EVSE_ID,
|
||||
LOGGER,
|
||||
PLUG_AND_CHARGE,
|
||||
VALUE,
|
||||
)
|
||||
|
||||
type BlueCurrentConfigEntry = ConfigEntry[Connector]
|
||||
|
||||
PLATFORMS = [Platform.BUTTON, Platform.SENSOR]
|
||||
PLATFORMS = [Platform.BUTTON, Platform.SENSOR, Platform.SWITCH]
|
||||
CHARGE_POINTS = "CHARGE_POINTS"
|
||||
DATA = "data"
|
||||
DELAY = 5
|
||||
|
||||
GRID = "GRID"
|
||||
OBJECT = "object"
|
||||
VALUE_TYPES = ["CH_STATUS"]
|
||||
VALUE_TYPES = [CHARGEPOINT_STATUS, CHARGEPOINT_SETTINGS]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -94,7 +102,7 @@ class Connector:
|
||||
elif object_name in VALUE_TYPES:
|
||||
value_data: dict = message[DATA]
|
||||
evse_id = value_data.pop(EVSE_ID)
|
||||
self.update_charge_point(evse_id, value_data)
|
||||
self.update_charge_point(evse_id, object_name, value_data)
|
||||
|
||||
# gets grid key / values
|
||||
elif GRID in object_name:
|
||||
@@ -106,26 +114,37 @@ class Connector:
|
||||
"""Handle incoming chargepoint data."""
|
||||
await asyncio.gather(
|
||||
*(
|
||||
self.handle_charge_point(
|
||||
entry[EVSE_ID], entry[MODEL_TYPE], entry[ATTR_NAME]
|
||||
)
|
||||
self.handle_charge_point(entry[EVSE_ID], entry)
|
||||
for entry in charge_points_data
|
||||
),
|
||||
self.client.get_grid_status(charge_points_data[0][EVSE_ID]),
|
||||
)
|
||||
|
||||
async def handle_charge_point(self, evse_id: str, model: str, name: str) -> None:
|
||||
async def handle_charge_point(
|
||||
self, evse_id: str, charge_point: dict[str, Any]
|
||||
) -> None:
|
||||
"""Add the chargepoint and request their data."""
|
||||
self.add_charge_point(evse_id, model, name)
|
||||
self.add_charge_point(evse_id, charge_point)
|
||||
await self.client.get_status(evse_id)
|
||||
|
||||
def add_charge_point(self, evse_id: str, model: str, name: str) -> None:
|
||||
def add_charge_point(self, evse_id: str, charge_point: dict[str, Any]) -> None:
|
||||
"""Add a charge point to charge_points."""
|
||||
self.charge_points[evse_id] = {MODEL_TYPE: model, ATTR_NAME: name}
|
||||
self.charge_points[evse_id] = charge_point
|
||||
|
||||
def update_charge_point(self, evse_id: str, data: dict) -> None:
|
||||
def update_charge_point(self, evse_id: str, update_type: str, data: dict) -> None:
|
||||
"""Update the charge point data."""
|
||||
self.charge_points[evse_id].update(data)
|
||||
charge_point = self.charge_points[evse_id]
|
||||
if update_type == CHARGEPOINT_SETTINGS:
|
||||
# Update the plug and charge object. The library parses this object to a bool instead of an object.
|
||||
plug_and_charge = charge_point.get(PLUG_AND_CHARGE)
|
||||
if plug_and_charge is not None:
|
||||
plug_and_charge[VALUE] = data[PLUG_AND_CHARGE]
|
||||
|
||||
# Remove the plug and charge object from the data list before updating.
|
||||
del data[PLUG_AND_CHARGE]
|
||||
|
||||
charge_point.update(data)
|
||||
|
||||
self.dispatch_charge_point_update_signal(evse_id)
|
||||
|
||||
def dispatch_charge_point_update_signal(self, evse_id: str) -> None:
|
||||
|
||||
@@ -8,3 +8,14 @@ LOGGER = logging.getLogger(__package__)
|
||||
|
||||
EVSE_ID = "evse_id"
|
||||
MODEL_TYPE = "model_type"
|
||||
PLUG_AND_CHARGE = "plug_and_charge"
|
||||
VALUE = "value"
|
||||
PERMISSION = "permission"
|
||||
CHARGEPOINT_STATUS = "CH_STATUS"
|
||||
CHARGEPOINT_SETTINGS = "CH_SETTINGS"
|
||||
BLOCK = "block"
|
||||
UNAVAILABLE = "unavailable"
|
||||
AVAILABLE = "available"
|
||||
LINKED_CHARGE_CARDS = "linked_charge_cards_only"
|
||||
PUBLIC_CHARGING = "public_charging"
|
||||
ACTIVITY = "activity"
|
||||
|
||||
@@ -30,6 +30,17 @@
|
||||
"stop_charge_session": {
|
||||
"default": "mdi:stop"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"plug_and_charge": {
|
||||
"default": "mdi:ev-plug-type2"
|
||||
},
|
||||
"linked_charge_cards": {
|
||||
"default": "mdi:account-group"
|
||||
},
|
||||
"block": {
|
||||
"default": "mdi:lock"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/blue_current",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["bluecurrent_api"],
|
||||
"requirements": ["bluecurrent-api==1.2.3"]
|
||||
"requirements": ["bluecurrent-api==1.2.4"]
|
||||
}
|
||||
|
||||
@@ -124,6 +124,17 @@
|
||||
"reset": {
|
||||
"name": "Reset"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"plug_and_charge": {
|
||||
"name": "Plug & Charge"
|
||||
},
|
||||
"linked_charge_cards_only": {
|
||||
"name": "Linked charging cards only"
|
||||
},
|
||||
"block": {
|
||||
"name": "Block charge point"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
169
homeassistant/components/blue_current/switch.py
Normal file
169
homeassistant/components/blue_current/switch.py
Normal file
@@ -0,0 +1,169 @@
|
||||
"""Support for Blue Current switches."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import PLUG_AND_CHARGE, BlueCurrentConfigEntry, Connector
|
||||
from .const import (
|
||||
AVAILABLE,
|
||||
BLOCK,
|
||||
LINKED_CHARGE_CARDS,
|
||||
PUBLIC_CHARGING,
|
||||
UNAVAILABLE,
|
||||
VALUE,
|
||||
)
|
||||
from .entity import ChargepointEntity
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class BlueCurrentSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Describes a Blue Current switch entity."""
|
||||
|
||||
function: Callable[[Connector, str, bool], Any]
|
||||
|
||||
turn_on_off_fn: Callable[[str, Connector], tuple[bool, bool]]
|
||||
"""Update the switch based on the latest data received from the websocket. The first returned boolean is _attr_is_on, the second one has_value."""
|
||||
|
||||
|
||||
def update_on_value_and_activity(
|
||||
key: str, evse_id: str, connector: Connector, reverse_is_on: bool = False
|
||||
) -> tuple[bool, bool]:
|
||||
"""Return the updated state of the switch based on received chargepoint data and activity."""
|
||||
|
||||
data_object = connector.charge_points[evse_id].get(key)
|
||||
is_on = data_object[VALUE] if data_object is not None else None
|
||||
activity = connector.charge_points[evse_id].get("activity")
|
||||
|
||||
if is_on is not None and activity == AVAILABLE:
|
||||
return is_on if not reverse_is_on else not is_on, True
|
||||
return False, False
|
||||
|
||||
|
||||
def update_block_switch(evse_id: str, connector: Connector) -> tuple[bool, bool]:
|
||||
"""Return the updated data for a block switch."""
|
||||
activity = connector.charge_points[evse_id].get("activity")
|
||||
return activity == UNAVAILABLE, activity in [AVAILABLE, UNAVAILABLE]
|
||||
|
||||
|
||||
def update_charge_point(
|
||||
key: str, evse_id: str, connector: Connector, new_switch_value: bool
|
||||
) -> None:
|
||||
"""Change charge point data when the state of the switch changes."""
|
||||
data_objects = connector.charge_points[evse_id].get(key)
|
||||
if data_objects is not None:
|
||||
data_objects[VALUE] = new_switch_value
|
||||
|
||||
|
||||
async def set_plug_and_charge(connector: Connector, evse_id: str, value: bool) -> None:
|
||||
"""Toggle the plug and charge setting for a specific charging point."""
|
||||
await connector.client.set_plug_and_charge(evse_id, value)
|
||||
update_charge_point(PLUG_AND_CHARGE, evse_id, connector, value)
|
||||
|
||||
|
||||
async def set_linked_charge_cards(
|
||||
connector: Connector, evse_id: str, value: bool
|
||||
) -> None:
|
||||
"""Toggle the plug and charge setting for a specific charging point."""
|
||||
await connector.client.set_linked_charge_cards_only(evse_id, value)
|
||||
update_charge_point(PUBLIC_CHARGING, evse_id, connector, not value)
|
||||
|
||||
|
||||
SWITCHES = (
|
||||
BlueCurrentSwitchEntityDescription(
|
||||
key=PLUG_AND_CHARGE,
|
||||
translation_key=PLUG_AND_CHARGE,
|
||||
function=set_plug_and_charge,
|
||||
turn_on_off_fn=lambda evse_id, connector: (
|
||||
update_on_value_and_activity(PLUG_AND_CHARGE, evse_id, connector)
|
||||
),
|
||||
),
|
||||
BlueCurrentSwitchEntityDescription(
|
||||
key=LINKED_CHARGE_CARDS,
|
||||
translation_key=LINKED_CHARGE_CARDS,
|
||||
function=set_linked_charge_cards,
|
||||
turn_on_off_fn=lambda evse_id, connector: (
|
||||
update_on_value_and_activity(
|
||||
PUBLIC_CHARGING, evse_id, connector, reverse_is_on=True
|
||||
)
|
||||
),
|
||||
),
|
||||
BlueCurrentSwitchEntityDescription(
|
||||
key=BLOCK,
|
||||
translation_key=BLOCK,
|
||||
function=lambda connector, evse_id, value: connector.client.block(
|
||||
evse_id, value
|
||||
),
|
||||
turn_on_off_fn=update_block_switch,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: BlueCurrentConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Blue Current switches."""
|
||||
connector = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
ChargePointSwitch(
|
||||
connector,
|
||||
evse_id,
|
||||
switch,
|
||||
)
|
||||
for evse_id in connector.charge_points
|
||||
for switch in SWITCHES
|
||||
)
|
||||
|
||||
|
||||
class ChargePointSwitch(ChargepointEntity, SwitchEntity):
|
||||
"""Base charge point switch."""
|
||||
|
||||
has_value = True
|
||||
entity_description: BlueCurrentSwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
connector: Connector,
|
||||
evse_id: str,
|
||||
switch: BlueCurrentSwitchEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the switch."""
|
||||
super().__init__(connector, evse_id)
|
||||
|
||||
self.key = switch.key
|
||||
self.entity_description = switch
|
||||
self.evse_id = evse_id
|
||||
self._attr_available = True
|
||||
self._attr_unique_id = f"{switch.key}_{evse_id}"
|
||||
|
||||
async def call_function(self, value: bool) -> None:
|
||||
"""Call the function to set setting."""
|
||||
await self.entity_description.function(self.connector, self.evse_id, value)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
await self.call_function(True)
|
||||
self._attr_is_on = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
await self.call_function(False)
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def update_from_latest_data(self) -> None:
|
||||
"""Fetch new state data for the switch."""
|
||||
new_state = self.entity_description.turn_on_off_fn(self.evse_id, self.connector)
|
||||
self._attr_is_on = new_state[0]
|
||||
self.has_value = new_state[1]
|
||||
@@ -15,12 +15,12 @@
|
||||
],
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"bleak==0.22.3",
|
||||
"bleak-retry-connector==3.9.0",
|
||||
"bluetooth-adapters==0.21.4",
|
||||
"bleak==1.0.1",
|
||||
"bleak-retry-connector==4.0.0",
|
||||
"bluetooth-adapters==2.0.0",
|
||||
"bluetooth-auto-recovery==1.5.2",
|
||||
"bluetooth-data-tools==1.28.2",
|
||||
"dbus-fast==2.43.0",
|
||||
"habluetooth==3.49.0"
|
||||
"dbus-fast==2.44.2",
|
||||
"habluetooth==4.0.1"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -8,20 +8,33 @@ from bring_api import Bring
|
||||
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import (
|
||||
BringActivityCoordinator,
|
||||
BringConfigEntry,
|
||||
BringCoordinators,
|
||||
BringDataUpdateCoordinator,
|
||||
)
|
||||
from .services import async_setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.EVENT, Platform.SENSOR, Platform.TODO]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Bring! services."""
|
||||
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BringConfigEntry) -> bool:
|
||||
"""Set up Bring! from a config entry."""
|
||||
|
||||
|
||||
@@ -7,5 +7,8 @@ DOMAIN = "bring"
|
||||
ATTR_SENDER: Final = "sender"
|
||||
ATTR_ITEM_NAME: Final = "item"
|
||||
ATTR_NOTIFICATION_TYPE: Final = "message"
|
||||
|
||||
ATTR_REACTION: Final = "reaction"
|
||||
ATTR_ACTIVITY: Final = "uuid"
|
||||
ATTR_RECEIVER: Final = "publicUserUuid"
|
||||
SERVICE_PUSH_NOTIFICATION = "send_message"
|
||||
SERVICE_ACTIVITY_STREAM_REACTION = "send_reaction"
|
||||
|
||||
@@ -35,6 +35,9 @@
|
||||
"services": {
|
||||
"send_message": {
|
||||
"service": "mdi:cellphone-message"
|
||||
},
|
||||
"send_reaction": {
|
||||
"service": "mdi:thumb-up"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
110
homeassistant/components/bring/services.py
Normal file
110
homeassistant/components/bring/services.py
Normal file
@@ -0,0 +1,110 @@
|
||||
"""Actions for Bring! integration."""
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from bring_api import (
|
||||
ActivityType,
|
||||
BringAuthException,
|
||||
BringNotificationType,
|
||||
BringRequestException,
|
||||
ReactionType,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.event import ATTR_EVENT_TYPE
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
|
||||
from .const import (
|
||||
ATTR_ACTIVITY,
|
||||
ATTR_REACTION,
|
||||
ATTR_RECEIVER,
|
||||
DOMAIN,
|
||||
SERVICE_ACTIVITY_STREAM_REACTION,
|
||||
)
|
||||
from .coordinator import BringConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SERVICE_ACTIVITY_STREAM_REACTION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(ATTR_REACTION): vol.All(
|
||||
vol.Upper,
|
||||
vol.Coerce(ReactionType),
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def get_config_entry(hass: HomeAssistant, entry_id: str) -> BringConfigEntry:
|
||||
"""Return config entry or raise if not found or not loaded."""
|
||||
entry = hass.config_entries.async_get_entry(entry_id)
|
||||
if TYPE_CHECKING:
|
||||
assert entry
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_loaded",
|
||||
)
|
||||
return entry
|
||||
|
||||
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up services for Bring! integration."""
|
||||
|
||||
async def async_send_activity_stream_reaction(call: ServiceCall) -> None:
|
||||
"""Send a reaction in response to recent activity of a list member."""
|
||||
|
||||
if (
|
||||
not (state := hass.states.get(call.data[ATTR_ENTITY_ID]))
|
||||
or not (entity := er.async_get(hass).async_get(call.data[ATTR_ENTITY_ID]))
|
||||
or not entity.config_entry_id
|
||||
):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entity_not_found",
|
||||
translation_placeholders={
|
||||
ATTR_ENTITY_ID: call.data[ATTR_ENTITY_ID],
|
||||
},
|
||||
)
|
||||
config_entry = get_config_entry(hass, entity.config_entry_id)
|
||||
|
||||
coordinator = config_entry.runtime_data.data
|
||||
|
||||
list_uuid = entity.unique_id.split("_")[1]
|
||||
|
||||
activity = state.attributes[ATTR_EVENT_TYPE]
|
||||
|
||||
reaction: ReactionType = call.data[ATTR_REACTION]
|
||||
|
||||
if not activity:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="activity_not_found",
|
||||
)
|
||||
try:
|
||||
await coordinator.bring.notify(
|
||||
list_uuid,
|
||||
BringNotificationType.LIST_ACTIVITY_STREAM_REACTION,
|
||||
receiver=state.attributes[ATTR_RECEIVER],
|
||||
activity=state.attributes[ATTR_ACTIVITY],
|
||||
activity_type=ActivityType(activity.upper()),
|
||||
reaction=reaction,
|
||||
)
|
||||
except (BringRequestException, BringAuthException) as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="reaction_request_failed",
|
||||
) from e
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_ACTIVITY_STREAM_REACTION,
|
||||
async_send_activity_stream_reaction,
|
||||
SERVICE_ACTIVITY_STREAM_REACTION_SCHEMA,
|
||||
)
|
||||
@@ -21,3 +21,28 @@ send_message:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
send_reaction:
|
||||
fields:
|
||||
entity_id:
|
||||
required: true
|
||||
selector:
|
||||
entity:
|
||||
filter:
|
||||
- integration: bring
|
||||
domain: event
|
||||
example: event.shopping_list
|
||||
reaction:
|
||||
required: true
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- label: 👍🏼
|
||||
value: thumbs_up
|
||||
- label: 🧐
|
||||
value: monocle
|
||||
- label: 🤤
|
||||
value: drooling
|
||||
- label: ❤️
|
||||
value: heart
|
||||
mode: dropdown
|
||||
example: thumbs_up
|
||||
|
||||
@@ -144,6 +144,19 @@
|
||||
},
|
||||
"notify_request_failed": {
|
||||
"message": "Failed to send push notification for Bring! due to a connection error, try again later"
|
||||
},
|
||||
"reaction_request_failed": {
|
||||
"message": "Failed to send reaction for Bring! due to a connection error, try again later"
|
||||
},
|
||||
"activity_not_found": {
|
||||
"message": "Failed to send reaction for Bring! — No recent activity found"
|
||||
},
|
||||
"entity_not_found": {
|
||||
"message": "Failed to send reaction for Bring! — Unknown entity {entity_id}"
|
||||
},
|
||||
|
||||
"entry_not_loaded": {
|
||||
"message": "The account associated with this Bring! list is either not loaded or disabled in Home Assistant."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -164,6 +177,20 @@
|
||||
"description": "Item name(s) to include in an urgent message e.g. 'Attention! Attention! - We still urgently need: [Items]'"
|
||||
}
|
||||
}
|
||||
},
|
||||
"send_reaction": {
|
||||
"name": "Send reaction",
|
||||
"description": "Sends a reaction to a recent activity on a Bring! list by a member of the shared list.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Activities",
|
||||
"description": "Select the Bring! activities event entity for reacting to its most recent event"
|
||||
},
|
||||
"reaction": {
|
||||
"name": "Reaction",
|
||||
"description": "Type of reaction to send in response."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
|
||||
@@ -11,6 +11,7 @@ DOMAINS_AND_TYPES = {
|
||||
Platform.SELECT: {"HYS"},
|
||||
Platform.SENSOR: {
|
||||
"A1",
|
||||
"A2",
|
||||
"MP1S",
|
||||
"RM4MINI",
|
||||
"RM4PRO",
|
||||
|
||||
@@ -10,6 +10,7 @@ from homeassistant.components.sensor import (
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
PERCENTAGE,
|
||||
UnitOfElectricCurrent,
|
||||
UnitOfElectricPotential,
|
||||
@@ -34,6 +35,24 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
key="air_quality",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="pm10",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM10,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="pm2_5",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="pm1",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM1,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="humidity",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
|
||||
@@ -25,6 +25,7 @@ def get_update_manager(device: BroadlinkDevice[_ApiT]) -> BroadlinkUpdateManager
|
||||
"""Return an update manager for a given Broadlink device."""
|
||||
update_managers: dict[str, type[BroadlinkUpdateManager]] = {
|
||||
"A1": BroadlinkA1UpdateManager,
|
||||
"A2": BroadlinkA2UpdateManager,
|
||||
"BG1": BroadlinkBG1UpdateManager,
|
||||
"HYS": BroadlinkThermostatUpdateManager,
|
||||
"LB1": BroadlinkLB1UpdateManager,
|
||||
@@ -118,6 +119,16 @@ class BroadlinkA1UpdateManager(BroadlinkUpdateManager[blk.a1]):
|
||||
return await self.device.async_request(self.device.api.check_sensors_raw)
|
||||
|
||||
|
||||
class BroadlinkA2UpdateManager(BroadlinkUpdateManager[blk.a2]):
|
||||
"""Manages updates for Broadlink A2 devices."""
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=10)
|
||||
|
||||
async def async_fetch_data(self) -> dict[str, Any]:
|
||||
"""Fetch data from the device."""
|
||||
return await self.device.async_request(self.device.api.check_sensors_raw)
|
||||
|
||||
|
||||
class BroadlinkMP1UpdateManager(BroadlinkUpdateManager[blk.mp1]):
|
||||
"""Manages updates for Broadlink MP1 devices."""
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
|
||||
"requirements": ["brother==4.3.1"],
|
||||
"requirements": ["brother==5.0.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_printer._tcp.local.",
|
||||
|
||||
@@ -12,6 +12,7 @@ from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNA
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import CONF_PASSKEY, DEFAULT_PORT, DOMAIN
|
||||
|
||||
@@ -21,12 +22,15 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
host: str
|
||||
port: int
|
||||
mac: str
|
||||
passkey: str | None = None
|
||||
username: str | None = None
|
||||
password: str | None = None
|
||||
def __init__(self) -> None:
|
||||
"""Initialize BSBLan flow."""
|
||||
self.host: str | None = None
|
||||
self.port: int = DEFAULT_PORT
|
||||
self.mac: str | None = None
|
||||
self.passkey: str | None = None
|
||||
self.username: str | None = None
|
||||
self.password: str | None = None
|
||||
self._auth_required = True
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -41,9 +45,111 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self.username = user_input.get(CONF_USERNAME)
|
||||
self.password = user_input.get(CONF_PASSWORD)
|
||||
|
||||
return await self._validate_and_create()
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle Zeroconf discovery."""
|
||||
|
||||
self.host = str(discovery_info.ip_address)
|
||||
self.port = discovery_info.port or DEFAULT_PORT
|
||||
|
||||
# Get MAC from properties
|
||||
self.mac = discovery_info.properties.get("mac")
|
||||
|
||||
# If MAC was found in zeroconf, use it immediately
|
||||
if self.mac:
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_HOST: self.host,
|
||||
CONF_PORT: self.port,
|
||||
}
|
||||
)
|
||||
else:
|
||||
# MAC not available from zeroconf - check for existing host/port first
|
||||
self._async_abort_entries_match(
|
||||
{CONF_HOST: self.host, CONF_PORT: self.port}
|
||||
)
|
||||
|
||||
# Try to get device info without authentication to minimize discovery popup
|
||||
config = BSBLANConfig(host=self.host, port=self.port)
|
||||
session = async_get_clientsession(self.hass)
|
||||
bsblan = BSBLAN(config, session)
|
||||
try:
|
||||
device = await bsblan.device()
|
||||
except BSBLANError:
|
||||
# Device requires authentication - proceed to discovery confirm
|
||||
self.mac = None
|
||||
else:
|
||||
self.mac = device.MAC
|
||||
|
||||
# Got MAC without auth - set unique ID and check for existing device
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_HOST: self.host,
|
||||
CONF_PORT: self.port,
|
||||
}
|
||||
)
|
||||
# No auth needed, so we can proceed to a confirmation step without fields
|
||||
self._auth_required = False
|
||||
|
||||
# Proceed to get credentials
|
||||
self.context["title_placeholders"] = {"name": f"BSBLAN {self.host}"}
|
||||
return await self.async_step_discovery_confirm()
|
||||
|
||||
async def async_step_discovery_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle getting credentials for discovered device."""
|
||||
if user_input is None:
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_PASSKEY): str,
|
||||
vol.Optional(CONF_USERNAME): str,
|
||||
vol.Optional(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
if not self._auth_required:
|
||||
data_schema = vol.Schema({})
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
data_schema=data_schema,
|
||||
description_placeholders={"host": str(self.host)},
|
||||
)
|
||||
|
||||
if not self._auth_required:
|
||||
return self._async_create_entry()
|
||||
|
||||
self.passkey = user_input.get(CONF_PASSKEY)
|
||||
self.username = user_input.get(CONF_USERNAME)
|
||||
self.password = user_input.get(CONF_PASSWORD)
|
||||
|
||||
return await self._validate_and_create(is_discovery=True)
|
||||
|
||||
async def _validate_and_create(
|
||||
self, is_discovery: bool = False
|
||||
) -> ConfigFlowResult:
|
||||
"""Validate device connection and create entry."""
|
||||
try:
|
||||
await self._get_bsblan_info()
|
||||
await self._get_bsblan_info(is_discovery=is_discovery)
|
||||
except BSBLANError:
|
||||
if is_discovery:
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_PASSKEY): str,
|
||||
vol.Optional(CONF_USERNAME): str,
|
||||
vol.Optional(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
errors={"base": "cannot_connect"},
|
||||
description_placeholders={"host": str(self.host)},
|
||||
)
|
||||
return self._show_setup_form({"base": "cannot_connect"})
|
||||
|
||||
return self._async_create_entry()
|
||||
@@ -67,6 +173,7 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@callback
|
||||
def _async_create_entry(self) -> ConfigFlowResult:
|
||||
"""Create the config entry."""
|
||||
return self.async_create_entry(
|
||||
title=format_mac(self.mac),
|
||||
data={
|
||||
@@ -78,8 +185,10 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
)
|
||||
|
||||
async def _get_bsblan_info(self, raise_on_progress: bool = True) -> None:
|
||||
"""Get device information from an BSBLAN device."""
|
||||
async def _get_bsblan_info(
|
||||
self, raise_on_progress: bool = True, is_discovery: bool = False
|
||||
) -> None:
|
||||
"""Get device information from a BSBLAN device."""
|
||||
config = BSBLANConfig(
|
||||
host=self.host,
|
||||
passkey=self.passkey,
|
||||
@@ -90,11 +199,18 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
session = async_get_clientsession(self.hass)
|
||||
bsblan = BSBLAN(config, session)
|
||||
device = await bsblan.device()
|
||||
self.mac = device.MAC
|
||||
retrieved_mac = device.MAC
|
||||
|
||||
await self.async_set_unique_id(
|
||||
format_mac(self.mac), raise_on_progress=raise_on_progress
|
||||
)
|
||||
# Handle unique ID assignment based on whether MAC was available from zeroconf
|
||||
if not self.mac:
|
||||
# MAC wasn't available from zeroconf, now we have it from API
|
||||
self.mac = retrieved_mac
|
||||
await self.async_set_unique_id(
|
||||
format_mac(self.mac), raise_on_progress=raise_on_progress
|
||||
)
|
||||
|
||||
# Always allow updating host/port for both user and discovery flows
|
||||
# This ensures connectivity is maintained when devices change IP addresses
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_HOST: self.host,
|
||||
|
||||
@@ -7,5 +7,11 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"requirements": ["python-bsblan==2.1.0"]
|
||||
"requirements": ["python-bsblan==2.1.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_http._tcp.local.",
|
||||
"name": "bsb-lan*"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -20,6 +20,8 @@ from . import BSBLanConfigEntry, BSBLanData
|
||||
from .coordinator import BSBLanCoordinatorData
|
||||
from .entity import BSBLanEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class BSBLanSensorEntityDescription(SensorEntityDescription):
|
||||
|
||||
@@ -13,7 +13,25 @@
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your BSB-Lan device."
|
||||
"host": "The hostname or IP address of your BSB-Lan device.",
|
||||
"port": "The port number of your BSB-Lan device.",
|
||||
"passkey": "The passkey for your BSB-Lan device.",
|
||||
"username": "The username for your BSB-Lan device.",
|
||||
"password": "The password for your BSB-Lan device."
|
||||
}
|
||||
},
|
||||
"discovery_confirm": {
|
||||
"title": "BSB-Lan device discovered",
|
||||
"description": "A BSB-Lan device was discovered at {host}. Please provide credentials if required.",
|
||||
"data": {
|
||||
"passkey": "[%key:component::bsblan::config::step::user::data::passkey%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"passkey": "[%key:component::bsblan::config::step::user::data_description::passkey%]",
|
||||
"username": "[%key:component::bsblan::config::step::user::data_description::username%]",
|
||||
"password": "[%key:component::bsblan::config::step::user::data_description::password%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -45,7 +45,7 @@ class BTHomePassiveBluetoothProcessorCoordinator(
|
||||
@property
|
||||
def sleepy_device(self) -> bool:
|
||||
"""Return True if the device is a sleepy device."""
|
||||
return self.entry.data.get(CONF_SLEEPY_DEVICE, self.device_data.sleepy_device)
|
||||
return self.entry.data.get(CONF_SLEEPY_DEVICE, self.device_data.sleepy_device) # type: ignore[no-any-return]
|
||||
|
||||
|
||||
class BTHomePassiveBluetoothDataProcessor[_T](
|
||||
|
||||
@@ -70,7 +70,7 @@ def get_event_classes_by_device_id(hass: HomeAssistant, device_id: str) -> list[
|
||||
bthome_config_entry = next(
|
||||
entry for entry in config_entries if entry and entry.domain == DOMAIN
|
||||
)
|
||||
return bthome_config_entry.data.get(CONF_DISCOVERED_EVENT_CLASSES, [])
|
||||
return bthome_config_entry.data.get(CONF_DISCOVERED_EVENT_CLASSES, []) # type: ignore[no-any-return]
|
||||
|
||||
|
||||
def get_event_types_by_event_class(event_class: str) -> set[str]:
|
||||
|
||||
@@ -10,14 +10,8 @@ import random
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientError, ClientResponseError
|
||||
from hass_nabucasa import Cloud, CloudError
|
||||
from hass_nabucasa.api import CloudApiError, CloudApiNonRetryableError
|
||||
from hass_nabucasa.cloud_api import (
|
||||
FilesHandlerListEntry,
|
||||
async_files_delete_file,
|
||||
async_files_list,
|
||||
)
|
||||
from hass_nabucasa.files import FilesError, StorageType, calculate_b64md5
|
||||
from hass_nabucasa import Cloud, CloudApiError, CloudApiNonRetryableError, CloudError
|
||||
from hass_nabucasa.files import FilesError, StorageType, StoredFile, calculate_b64md5
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
@@ -186,8 +180,7 @@ class CloudBackupAgent(BackupAgent):
|
||||
"""
|
||||
backup = await self._async_get_backup(backup_id)
|
||||
try:
|
||||
await async_files_delete_file(
|
||||
self._cloud,
|
||||
await self._cloud.files.delete(
|
||||
storage_type=StorageType.BACKUP,
|
||||
filename=backup["Key"],
|
||||
)
|
||||
@@ -199,12 +192,10 @@ class CloudBackupAgent(BackupAgent):
|
||||
backups = await self._async_list_backups()
|
||||
return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups]
|
||||
|
||||
async def _async_list_backups(self) -> list[FilesHandlerListEntry]:
|
||||
async def _async_list_backups(self) -> list[StoredFile]:
|
||||
"""List backups."""
|
||||
try:
|
||||
backups = await async_files_list(
|
||||
self._cloud, storage_type=StorageType.BACKUP
|
||||
)
|
||||
backups = await self._cloud.files.list(storage_type=StorageType.BACKUP)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to list backups") from err
|
||||
|
||||
@@ -220,7 +211,7 @@ class CloudBackupAgent(BackupAgent):
|
||||
backup = await self._async_get_backup(backup_id)
|
||||
return AgentBackup.from_dict(backup["Metadata"])
|
||||
|
||||
async def _async_get_backup(self, backup_id: str) -> FilesHandlerListEntry:
|
||||
async def _async_get_backup(self, backup_id: str) -> StoredFile:
|
||||
"""Return a backup."""
|
||||
backups = await self._async_list_backups()
|
||||
|
||||
|
||||
@@ -40,10 +40,11 @@ from .prefs import CloudPreferences
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
VALID_REPAIR_TRANSLATION_KEYS = {
|
||||
"connection_error",
|
||||
"no_subscription",
|
||||
"warn_bad_custom_domain_configuration",
|
||||
"reset_bad_custom_domain_configuration",
|
||||
"subscription_expired",
|
||||
"warn_bad_custom_domain_configuration",
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -71,7 +71,7 @@ _CLOUD_ERRORS: dict[
|
||||
] = {
|
||||
TimeoutError: (
|
||||
HTTPStatus.BAD_GATEWAY,
|
||||
"Unable to reach the Home Assistant cloud.",
|
||||
"Unable to reach the Home Assistant Cloud.",
|
||||
),
|
||||
aiohttp.ClientError: (
|
||||
HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==0.105.0"],
|
||||
"requirements": ["hass-nabucasa==0.108.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user