mirror of
https://github.com/home-assistant/core.git
synced 2026-03-24 08:18:29 +01:00
Compare commits
922 Commits
2025.1.0b8
...
gj-2024120
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e20bda33a4 | ||
|
|
f2125a1133 | ||
|
|
557caa7ecb | ||
|
|
d973e42c97 | ||
|
|
428c7ced94 | ||
|
|
7b14bd89a0 | ||
|
|
f1f45d6b5b | ||
|
|
50e4311416 | ||
|
|
1a32a2f9c2 | ||
|
|
f9cecc0cd5 | ||
|
|
6fe2612f1d | ||
|
|
43fbb2ab7b | ||
|
|
2295e3779a | ||
|
|
53f80e9759 | ||
|
|
f5d35bca72 | ||
|
|
77221f53b3 | ||
|
|
a2d76cac5a | ||
|
|
a69786f64f | ||
|
|
2900baac04 | ||
|
|
2092456c7e | ||
|
|
2bedb2cadb | ||
|
|
5329356f20 | ||
|
|
0c68854fdf | ||
|
|
8777dd9065 | ||
|
|
57294fa461 | ||
|
|
3a078d5414 | ||
|
|
568a27000d | ||
|
|
4612f4da19 | ||
|
|
ec45cb4939 | ||
|
|
ccd7b1c21a | ||
|
|
3ee2dc9790 | ||
|
|
889f699e5d | ||
|
|
5ffae140af | ||
|
|
04eb86e5a0 | ||
|
|
3077a4cdee | ||
|
|
02bf8447b3 | ||
|
|
cf29ef91ee | ||
|
|
439f22f584 | ||
|
|
b17c36eeff | ||
|
|
41fe863b72 | ||
|
|
dfc4cdf785 | ||
|
|
654e111c23 | ||
|
|
9d5fe77b71 | ||
|
|
958b1e7759 | ||
|
|
2f5545e7b8 | ||
|
|
15d57692d9 | ||
|
|
a55bd593af | ||
|
|
3978c4cdb3 | ||
|
|
4690aef8b8 | ||
|
|
6292d6c0dc | ||
|
|
af0f416497 | ||
|
|
acbb15a496 | ||
|
|
9f3b39a2d2 | ||
|
|
5a91562d1d | ||
|
|
ac58494b55 | ||
|
|
33d552e3f7 | ||
|
|
f3222045ae | ||
|
|
0d968267a2 | ||
|
|
85bea5b70e | ||
|
|
02347d5d36 | ||
|
|
754de6f998 | ||
|
|
32d7a23bff | ||
|
|
fe4e001fa5 | ||
|
|
725d835fab | ||
|
|
640da1cc67 | ||
|
|
6690b121c0 | ||
|
|
8a3ef101e6 | ||
|
|
09ae388f4e | ||
|
|
659450dac9 | ||
|
|
37c3a9546c | ||
|
|
b32c401c24 | ||
|
|
19e5b091c5 | ||
|
|
24c50e0988 | ||
|
|
fe8a93d62f | ||
|
|
b39c2719d7 | ||
|
|
0c9fd7c482 | ||
|
|
dedcef7230 | ||
|
|
595f49ee9f | ||
|
|
5a7b6cd7a0 | ||
|
|
f0c6b47522 | ||
|
|
d349c47694 | ||
|
|
f878465a9a | ||
|
|
81b7d01a7d | ||
|
|
f5dd3ef530 | ||
|
|
88f16807a0 | ||
|
|
76d9bcbdfb | ||
|
|
f01598aadd | ||
|
|
c56eee3639 | ||
|
|
06d8bc658f | ||
|
|
f724ae9a01 | ||
|
|
bbe897745e | ||
|
|
089c9c41ba | ||
|
|
43fe4ebbbe | ||
|
|
fc1b6292cd | ||
|
|
174f3ca755 | ||
|
|
51d277fc0c | ||
|
|
b98e1a1d2f | ||
|
|
a08e42399d | ||
|
|
2b0e383b2e | ||
|
|
9868138fc4 | ||
|
|
c601170b1d | ||
|
|
5ea5413064 | ||
|
|
abc256fb3e | ||
|
|
2ec971ad9d | ||
|
|
235fda55fe | ||
|
|
028a0d4eec | ||
|
|
14f3868c26 | ||
|
|
54e4e8a7bb | ||
|
|
a8cb618f96 | ||
|
|
ca5aca4ab9 | ||
|
|
ea7e53d10d | ||
|
|
c7de3112fb | ||
|
|
4a64c797d4 | ||
|
|
2a514ebc3f | ||
|
|
44b577cadb | ||
|
|
24bb623567 | ||
|
|
9e0df89bee | ||
|
|
fb309a3f98 | ||
|
|
829d3bf621 | ||
|
|
a2afc1b670 | ||
|
|
7b413b5faf | ||
|
|
734d1898cf | ||
|
|
c651e2b3c3 | ||
|
|
ef8b8fbbaa | ||
|
|
23e04ced9c | ||
|
|
13a7ad759c | ||
|
|
99d250f222 | ||
|
|
689d7d3cd9 | ||
|
|
b4f4b06f29 | ||
|
|
85b4be2f16 | ||
|
|
5e0bbf65e4 | ||
|
|
514b74096a | ||
|
|
b1d8994751 | ||
|
|
c215aee940 | ||
|
|
5f9457ab6e | ||
|
|
76cdfe861c | ||
|
|
bd91cc4bdc | ||
|
|
cde3ba5504 | ||
|
|
21256cab85 | ||
|
|
d62a66eaf2 | ||
|
|
46b17b539c | ||
|
|
6aed2dcc0f | ||
|
|
7430238c0a | ||
|
|
cd88913daf | ||
|
|
8e39c65759 | ||
|
|
0f8785d8bc | ||
|
|
566f514a75 | ||
|
|
f3683f0b5e | ||
|
|
a39137c3fc | ||
|
|
c2b6c4b4fc | ||
|
|
daac986e00 | ||
|
|
02ec1d1b71 | ||
|
|
632c166201 | ||
|
|
8b12f5270e | ||
|
|
b0d3aa1c34 | ||
|
|
e6c696933f | ||
|
|
e5164496cf | ||
|
|
88c3be4ecf | ||
|
|
619917c679 | ||
|
|
e433c2250c | ||
|
|
59429dea39 | ||
|
|
3e4d92f6a7 | ||
|
|
a3d24f2472 | ||
|
|
46c5591336 | ||
|
|
99f24ca59c | ||
|
|
1fee0a5aa2 | ||
|
|
ef34a33a7b | ||
|
|
bb505baae7 | ||
|
|
b446eaf2d0 | ||
|
|
60d51bf4ad | ||
|
|
9b66ba61a8 | ||
|
|
eb651a8a71 | ||
|
|
1b520e37e2 | ||
|
|
9331b1572c | ||
|
|
762bc7b8d1 | ||
|
|
6e255060c6 | ||
|
|
93b3d76ee2 | ||
|
|
e188d9a00c | ||
|
|
d908d2ab55 | ||
|
|
55bde60f1a | ||
|
|
5ca68cb273 | ||
|
|
2e189480a5 | ||
|
|
eb98f110d3 | ||
|
|
5cf56207fe | ||
|
|
9f7a38f189 | ||
|
|
476935050a | ||
|
|
27c2f2333e | ||
|
|
40a3e19ce5 | ||
|
|
9d7706c9be | ||
|
|
a67bc12bb8 | ||
|
|
6cbe18ebbd | ||
|
|
1cff45b8b7 | ||
|
|
fc39b6792c | ||
|
|
3638d25f6a | ||
|
|
421f9aa638 | ||
|
|
9a1b965c7f | ||
|
|
9db6be11f7 | ||
|
|
6fdccda225 | ||
|
|
1172887c80 | ||
|
|
f3b7317373 | ||
|
|
edddd6edfb | ||
|
|
016a274698 | ||
|
|
c89d60fb5d | ||
|
|
b5a7d0258a | ||
|
|
137666982d | ||
|
|
77a351f992 | ||
|
|
a8645ea4ed | ||
|
|
e886c9e054 | ||
|
|
79ee2e954b | ||
|
|
e736ca72f0 | ||
|
|
be06ef46c1 | ||
|
|
51e3bf42f2 | ||
|
|
c6cab3259c | ||
|
|
146d6bbc68 | ||
|
|
f36a10126c | ||
|
|
3622e8331b | ||
|
|
241fc2af67 | ||
|
|
9d7c917771 | ||
|
|
d3bedd693a | ||
|
|
082ef3f85f | ||
|
|
5e648ebb5c | ||
|
|
7a442af9fa | ||
|
|
406c00997f | ||
|
|
19a89ebcf3 | ||
|
|
bc8a2b58d3 | ||
|
|
6a50648223 | ||
|
|
e83ee00af8 | ||
|
|
8ae02aaba0 | ||
|
|
0eea265415 | ||
|
|
8c13daf6d9 | ||
|
|
4ccc686295 | ||
|
|
31c36beb2e | ||
|
|
9c5c1a35a4 | ||
|
|
b046ca9abe | ||
|
|
650e14379c | ||
|
|
1421f4c124 | ||
|
|
f0257fec88 | ||
|
|
8a35261fd8 | ||
|
|
f57640c2cd | ||
|
|
23a2b19ca0 | ||
|
|
6cbbfec5f5 | ||
|
|
65df8b946f | ||
|
|
4b37b367de | ||
|
|
c1520a9b20 | ||
|
|
239aa94b6f | ||
|
|
c4d8cda92b | ||
|
|
6e88c6570e | ||
|
|
ecc89fd9a9 | ||
|
|
18de735619 | ||
|
|
f80f6d9e3d | ||
|
|
c408bd6aad | ||
|
|
faf2c64cc4 | ||
|
|
60bdc13c94 | ||
|
|
fa96168488 | ||
|
|
526277da0f | ||
|
|
934f59449d | ||
|
|
026df07451 | ||
|
|
38d008bb66 | ||
|
|
406c3b5925 | ||
|
|
7cc61d1b86 | ||
|
|
421c4889bf | ||
|
|
d6ee7a2c1e | ||
|
|
6a032baa48 | ||
|
|
edc7c0ff2f | ||
|
|
8109efe810 | ||
|
|
5e50b11114 | ||
|
|
4f796174fd | ||
|
|
5fc3618b4a | ||
|
|
d970b728ce | ||
|
|
c66176cfa5 | ||
|
|
6f138c71b4 | ||
|
|
6e80ad505b | ||
|
|
8db63adc11 | ||
|
|
2b51ab1c75 | ||
|
|
f4e7c9d6c3 | ||
|
|
6359a75977 | ||
|
|
096c6b8575 | ||
|
|
959cea45b8 | ||
|
|
e3f03c9da1 | ||
|
|
1426c421f3 | ||
|
|
58df5f2394 | ||
|
|
d333fa320f | ||
|
|
6d7e9f10d9 | ||
|
|
0c144092c6 | ||
|
|
1de4d0efda | ||
|
|
440cd5bee0 | ||
|
|
09e2168f72 | ||
|
|
b897e6a85f | ||
|
|
3e9b410b7c | ||
|
|
3c825bb826 | ||
|
|
e8ad391df2 | ||
|
|
504ed83ffb | ||
|
|
eaaab4ccfe | ||
|
|
4ddb72314d | ||
|
|
c489f94026 | ||
|
|
38dcc782d1 | ||
|
|
2d2f4f5cec | ||
|
|
ca34541b04 | ||
|
|
984c380e13 | ||
|
|
1c053485a9 | ||
|
|
ab28115d2b | ||
|
|
d986fe7a07 | ||
|
|
6fd73730cc | ||
|
|
b93aa760c5 | ||
|
|
b84a4dc120 | ||
|
|
cdcc7dbbe8 | ||
|
|
8d38279993 | ||
|
|
153496b5f4 | ||
|
|
1fa3d90d73 | ||
|
|
1e4c7e832d | ||
|
|
275365a9d3 | ||
|
|
4709a3162c | ||
|
|
157548609b | ||
|
|
fc0a6c2ff3 | ||
|
|
0d116ec6a2 | ||
|
|
6060f637a8 | ||
|
|
ba9ad009e9 | ||
|
|
ec5759d3b9 | ||
|
|
c7a5c49a03 | ||
|
|
9b55faa879 | ||
|
|
6fd9476bb9 | ||
|
|
d33ee130bc | ||
|
|
e1ffd9380d | ||
|
|
fc6695b05c | ||
|
|
8f71d7a6f3 | ||
|
|
4dbf2b0320 | ||
|
|
3aa466806e | ||
|
|
7b63c17101 | ||
|
|
dae87db244 | ||
|
|
fba1b4be5b | ||
|
|
c15073cc27 | ||
|
|
25041aa02d | ||
|
|
96ad2b6ed8 | ||
|
|
a649ff4a91 | ||
|
|
1ceebd92a9 | ||
|
|
b009f11013 | ||
|
|
2d67aca550 | ||
|
|
98ef32c668 | ||
|
|
3a0072d42d | ||
|
|
86ea68eaec | ||
|
|
e67a131bd9 | ||
|
|
c36d73e469 | ||
|
|
ac279d9794 | ||
|
|
4e5bf5ac22 | ||
|
|
2e5e2c50dd | ||
|
|
c9a7afe439 | ||
|
|
0a444de39c | ||
|
|
559c411dd2 | ||
|
|
61ea732caa | ||
|
|
11ebc27bfe | ||
|
|
ccb94ac6a6 | ||
|
|
ab0dfe304c | ||
|
|
8b0be70fdd | ||
|
|
f7df214dd8 | ||
|
|
11fa6b2e4e | ||
|
|
52c57eb2e5 | ||
|
|
0d85f54e76 | ||
|
|
b3af12c9b1 | ||
|
|
6571ebf15b | ||
|
|
2237ed9af7 | ||
|
|
c442935fdd | ||
|
|
6dc9c6819f | ||
|
|
a745e079e9 | ||
|
|
19f460614e | ||
|
|
20d6ba4286 | ||
|
|
4cf7a51a05 | ||
|
|
8e2b284a7f | ||
|
|
74c3e9629f | ||
|
|
907f1e062a | ||
|
|
fd169affd7 | ||
|
|
81c390d3b8 | ||
|
|
d356d4bb82 | ||
|
|
4d93fbcb52 | ||
|
|
b9259b6f77 | ||
|
|
22b84450e8 | ||
|
|
9ef93517e7 | ||
|
|
cdc96fdf6f | ||
|
|
ab8af033c0 | ||
|
|
619dee5d93 | ||
|
|
00c3b8cc3e | ||
|
|
bf747bb733 | ||
|
|
560d15effb | ||
|
|
39aa0339ac | ||
|
|
675cc32534 | ||
|
|
31b45e6d3f | ||
|
|
6fd4d7acaa | ||
|
|
c4b4cad335 | ||
|
|
32d3fe714f | ||
|
|
6fd0760f25 | ||
|
|
59d61104d1 | ||
|
|
028c5349ac | ||
|
|
9388879b78 | ||
|
|
246a9f95a3 | ||
|
|
f31f6d7ed0 | ||
|
|
1f0eda8e47 | ||
|
|
bce7e9ba5e | ||
|
|
475a2fb828 | ||
|
|
24c70caf33 | ||
|
|
eba090c9ef | ||
|
|
b5971ec55d | ||
|
|
ad84490541 | ||
|
|
033064f832 | ||
|
|
a2d9920aa9 | ||
|
|
8386eaa92b | ||
|
|
aa741a9207 | ||
|
|
024b9ae414 | ||
|
|
02956f9a83 | ||
|
|
9d1989125f | ||
|
|
04d5cc8f79 | ||
|
|
e29ead2a36 | ||
|
|
5df7092f41 | ||
|
|
823feae0f9 | ||
|
|
3c6113e37c | ||
|
|
139b747a70 | ||
|
|
da30dbcfe4 | ||
|
|
0deb46295d | ||
|
|
1abcac5fb5 | ||
|
|
3b6f47e438 | ||
|
|
6e1a13f878 | ||
|
|
ee865d2f0f | ||
|
|
dd57c75e64 | ||
|
|
0cc586a3ac | ||
|
|
b6c0257c43 | ||
|
|
cabdae98e8 | ||
|
|
07482de4ab | ||
|
|
31719bc84c | ||
|
|
1ca5f79708 | ||
|
|
a5f70dec96 | ||
|
|
6e111d18ec | ||
|
|
ec37e1ff8d | ||
|
|
8705fd8546 | ||
|
|
050a17db4d | ||
|
|
9dc4597f59 | ||
|
|
9dd7021d63 | ||
|
|
6a4160bcc4 | ||
|
|
411d14c2ce | ||
|
|
d7315f4500 | ||
|
|
c4ac648a2b | ||
|
|
e9616f38d8 | ||
|
|
1550086dd6 | ||
|
|
8e28b7b49b | ||
|
|
4a33b1d936 | ||
|
|
8bfdbc173a | ||
|
|
3ce4c47cfc | ||
|
|
0d9ac25257 | ||
|
|
15e785b974 | ||
|
|
13527768cc | ||
|
|
071e675d9d | ||
|
|
316a61fcde | ||
|
|
9901f3c3dd | ||
|
|
c9d8c59b45 | ||
|
|
0184d8e954 | ||
|
|
2f892678f6 | ||
|
|
fe8cae8eb5 | ||
|
|
64752af4c2 | ||
|
|
c5f80dd01d | ||
|
|
2704090418 | ||
|
|
f01c860c44 | ||
|
|
bb4a497247 | ||
|
|
488c5a6b9f | ||
|
|
acbd501ede | ||
|
|
d06cd1ad3b | ||
|
|
4129697dd9 | ||
|
|
4086d092ff | ||
|
|
988a0639f4 | ||
|
|
c9c553047c | ||
|
|
f05cffea17 | ||
|
|
d2a188ad3c | ||
|
|
02e30edc6c | ||
|
|
0e52ea482f | ||
|
|
d46be61b6f | ||
|
|
f05e234c30 | ||
|
|
bc09e825a9 | ||
|
|
6f6d485530 | ||
|
|
63eb27df7b | ||
|
|
da29b2f711 | ||
|
|
c2f6f93f1d | ||
|
|
39143a2e79 | ||
|
|
99e65c38b0 | ||
|
|
ec7d2f3731 | ||
|
|
d43187327f | ||
|
|
8be01ac9d6 | ||
|
|
e052ab27f2 | ||
|
|
43ec63eabc | ||
|
|
7a2a6cf7d8 | ||
|
|
eff440d2a8 | ||
|
|
3fea4efb9f | ||
|
|
dc1928f3eb | ||
|
|
f8618e65f6 | ||
|
|
e99aaed7fa | ||
|
|
d000558227 | ||
|
|
7daf442271 | ||
|
|
b8f458458b | ||
|
|
85ecb04abf | ||
|
|
20db7fdc96 | ||
|
|
a1d43b9387 | ||
|
|
de9c05ad53 | ||
|
|
a01521b224 | ||
|
|
2413bb4f52 | ||
|
|
1496da8e94 | ||
|
|
802ad55493 | ||
|
|
48da88583f | ||
|
|
0ab66a4ed1 | ||
|
|
3b13c5bfdd | ||
|
|
42532e9695 | ||
|
|
0dd9845501 | ||
|
|
3a213b2d17 | ||
|
|
d155d93462 | ||
|
|
5888b83f22 | ||
|
|
471f77fea4 | ||
|
|
c684b06734 | ||
|
|
393551d696 | ||
|
|
24b81df0e6 | ||
|
|
a66cf62b09 | ||
|
|
901099325b | ||
|
|
30695cfef5 | ||
|
|
5d2a8e8208 | ||
|
|
4019045e7b | ||
|
|
ec2c8da1c5 | ||
|
|
d1e8a2a32d | ||
|
|
feeee2d15e | ||
|
|
8a052177a4 | ||
|
|
875727ed27 | ||
|
|
f1c62000e1 | ||
|
|
e38f21c4ef | ||
|
|
00c052bb22 | ||
|
|
111ef13a3f | ||
|
|
89c73f56b1 | ||
|
|
d13c14eedb | ||
|
|
9532e98166 | ||
|
|
6884d790ca | ||
|
|
6ab45f8c9e | ||
|
|
7009a96711 | ||
|
|
a47fa08a9b | ||
|
|
4eb23f3039 | ||
|
|
1c314b5c02 | ||
|
|
edee58f114 | ||
|
|
ef652e57d1 | ||
|
|
b956aa68da | ||
|
|
75ce89dc41 | ||
|
|
a9540e893f | ||
|
|
dd5625436b | ||
|
|
7a484ee0ae | ||
|
|
e5c5d1bcfd | ||
|
|
56a9cd010e | ||
|
|
b7b5577f0c | ||
|
|
0787257cc0 | ||
|
|
54263f1325 | ||
|
|
14d2f2c589 | ||
|
|
c533f63a87 | ||
|
|
cd30f75be9 | ||
|
|
527775a5f1 | ||
|
|
99d7f462a0 | ||
|
|
67e2379d2b | ||
|
|
fb0047ead0 | ||
|
|
9764d704bd | ||
|
|
3690d7c2b4 | ||
|
|
204b5989e0 | ||
|
|
3892f6d8f3 | ||
|
|
140ff50eaf | ||
|
|
5ef06b1f33 | ||
|
|
9638bee8de | ||
|
|
cd88a8cebd | ||
|
|
d896b4e66a | ||
|
|
e4eb414be8 | ||
|
|
fce5be928e | ||
|
|
c4455c709b | ||
|
|
2c7a1446b8 | ||
|
|
20cf21d88e | ||
|
|
eafbf1d1fd | ||
|
|
acd95975e4 | ||
|
|
bc22e34fc3 | ||
|
|
bf0cf1c30f | ||
|
|
e95bfe438b | ||
|
|
0a457979ec | ||
|
|
2f295efb3f | ||
|
|
74613ae0c4 | ||
|
|
4d4cfabfba | ||
|
|
7ae81bae4c | ||
|
|
7ec10bfd6f | ||
|
|
d662a4465c | ||
|
|
66b4b24612 | ||
|
|
a2077405e2 | ||
|
|
f0a1a6c2ad | ||
|
|
32b7b5aa66 | ||
|
|
871a7d0dc1 | ||
|
|
da807001ab | ||
|
|
a104799893 | ||
|
|
45d1624d70 | ||
|
|
1059cf3f07 | ||
|
|
dd34a10934 | ||
|
|
d4f3dd2335 | ||
|
|
0ecb1ea8cf | ||
|
|
3d5a42749d | ||
|
|
a2c2d37eb1 | ||
|
|
f68c16586d | ||
|
|
11d80065ef | ||
|
|
7012648bf8 | ||
|
|
d96b2499e2 | ||
|
|
a41bdfe0cc | ||
|
|
0d3872a4c7 | ||
|
|
65d8d071dd | ||
|
|
bb97a16756 | ||
|
|
c9a607aa45 | ||
|
|
c7993eff99 | ||
|
|
8a880d6134 | ||
|
|
cc0fb80481 | ||
|
|
276806d3e1 | ||
|
|
0589df7d95 | ||
|
|
aab676a313 | ||
|
|
7f473b8260 | ||
|
|
fea4a00424 | ||
|
|
7d146ddae0 | ||
|
|
8f06e0903f | ||
|
|
677ba3a6a6 | ||
|
|
a322deaab8 | ||
|
|
584439cade | ||
|
|
baa13debcc | ||
|
|
1d42890748 | ||
|
|
622d23cadd | ||
|
|
ebeb2ecb09 | ||
|
|
b3cb2928fc | ||
|
|
b639466453 | ||
|
|
69241e4ca6 | ||
|
|
80371a865e | ||
|
|
c9dbb205dd | ||
|
|
197ff932af | ||
|
|
287b7eec13 | ||
|
|
e6da6d9612 | ||
|
|
d4f38099ae | ||
|
|
9f2cb7bf56 | ||
|
|
8a84abd50f | ||
|
|
b15e08ca9c | ||
|
|
3fb980901e | ||
|
|
bd3a3fd26c | ||
|
|
dfcb977a1d | ||
|
|
94ad6ae814 | ||
|
|
97aa93f92b | ||
|
|
ee025198e8 | ||
|
|
90265e2afd | ||
|
|
a53554dad3 | ||
|
|
2b6ad84cf5 | ||
|
|
92655fd640 | ||
|
|
e43f72c452 | ||
|
|
9320ccfa4f | ||
|
|
336af8b551 | ||
|
|
8a2f8dc736 | ||
|
|
dc048bfcf5 | ||
|
|
fb474827b5 | ||
|
|
eec5fb2133 | ||
|
|
8ad7c522f4 | ||
|
|
c7f6630718 | ||
|
|
afa95293dc | ||
|
|
36582f9ac2 | ||
|
|
19852ecc24 | ||
|
|
5726d090b0 | ||
|
|
add401ffcf | ||
|
|
fd12ae2ccd | ||
|
|
e15eda3aa2 | ||
|
|
cc0adcf47f | ||
|
|
06580ce10f | ||
|
|
b78e39da2d | ||
|
|
46824a2a53 | ||
|
|
ee01289ee8 | ||
|
|
0bd22eabc7 | ||
|
|
c901352bef | ||
|
|
23ed62c1bc | ||
|
|
0ef254bc9a | ||
|
|
629d108078 | ||
|
|
6f3544fa47 | ||
|
|
cb389d29ea | ||
|
|
ac26ca2da5 | ||
|
|
d5bcb73d33 | ||
|
|
e6a18357db | ||
|
|
13ec0659ff | ||
|
|
a7fb20ab58 | ||
|
|
657da47458 | ||
|
|
a4708876a9 | ||
|
|
4239c5b557 | ||
|
|
836354bb99 | ||
|
|
a7af042e57 | ||
|
|
09476ade82 | ||
|
|
25937d7868 | ||
|
|
4e74d14beb | ||
|
|
309b7eb436 | ||
|
|
cf238cd8f7 | ||
|
|
ee46edffa3 | ||
|
|
876b3423ba | ||
|
|
2752a35e23 | ||
|
|
9e8df72c0d | ||
|
|
5439613bff | ||
|
|
3b5455bc49 | ||
|
|
104151d322 | ||
|
|
a329828bdf | ||
|
|
aa9e721e8b | ||
|
|
1b49f88be9 | ||
|
|
c345f2d548 | ||
|
|
1d731875ae | ||
|
|
0c3489c1b3 | ||
|
|
c5865c6d18 | ||
|
|
e1a0fb2f1a | ||
|
|
d725cdae13 | ||
|
|
e1bd82ea32 | ||
|
|
4bcc551b61 | ||
|
|
08019e76d8 | ||
|
|
0b32342bf0 | ||
|
|
add4e1a708 | ||
|
|
fb3105bdc0 | ||
|
|
3845acd0ce | ||
|
|
b45c68554c | ||
|
|
8a45aa4c42 | ||
|
|
51ccba12af | ||
|
|
c8699dc066 | ||
|
|
87454babfa | ||
|
|
c9ff575628 | ||
|
|
877d16273b | ||
|
|
dc5bfba902 | ||
|
|
5e7a405f34 | ||
|
|
5228f3d85c | ||
|
|
2efc75fdf5 | ||
|
|
a435fd12f0 | ||
|
|
a5d0c3528c | ||
|
|
5e981d00a4 | ||
|
|
97dc72a6e2 | ||
|
|
088b097a03 | ||
|
|
85c94e6403 | ||
|
|
a2ef1604af | ||
|
|
55dc4b0d2c | ||
|
|
18e8a3b185 | ||
|
|
3a68a0a67f | ||
|
|
7ab2d2e07a | ||
|
|
809629c0e2 | ||
|
|
2be578a33f | ||
|
|
5cff79ce50 | ||
|
|
513c8487c5 | ||
|
|
031de8da51 | ||
|
|
2e1463b9e9 | ||
|
|
9a58440296 | ||
|
|
26e0fcdb08 | ||
|
|
e835e41d59 | ||
|
|
c53c0a13be | ||
|
|
8098122dfe | ||
|
|
1d6ecbd1d5 | ||
|
|
c8276ec325 | ||
|
|
ddfad614ab | ||
|
|
8eb21749b5 | ||
|
|
a6ba25d3d4 | ||
|
|
1e70a0060b | ||
|
|
6c47f03d17 | ||
|
|
2054988790 | ||
|
|
f1ad3040b8 | ||
|
|
53ca31c112 | ||
|
|
23459a0355 | ||
|
|
a8bfe285bf | ||
|
|
0888d1a169 | ||
|
|
8b20272272 | ||
|
|
06b33e5589 | ||
|
|
9348569f90 | ||
|
|
4a9d545ffe | ||
|
|
277ee03145 | ||
|
|
6c9c17f129 | ||
|
|
bf59241dab | ||
|
|
57b7635b70 | ||
|
|
4b96266647 | ||
|
|
6266a4153d | ||
|
|
a9949a0aab | ||
|
|
428a74fa48 | ||
|
|
9f1023b195 | ||
|
|
256fc54aa1 | ||
|
|
94c1b9a434 | ||
|
|
275c15e2ae | ||
|
|
9cdbcd93cd | ||
|
|
f2e856b8a2 | ||
|
|
820f04e1e1 | ||
|
|
b7541f098c | ||
|
|
a345e80368 | ||
|
|
7a3d9a9345 | ||
|
|
a0fb6df5ba | ||
|
|
04020d5a56 | ||
|
|
f785b17314 | ||
|
|
6631c57cfb | ||
|
|
bc76dc3c34 | ||
|
|
ea4931ca3a | ||
|
|
dd20204bf0 | ||
|
|
ef46c62bc6 | ||
|
|
2bb6e03a36 | ||
|
|
2288f89415 | ||
|
|
e7ab5afc14 | ||
|
|
4db88dfaff | ||
|
|
906c95048c | ||
|
|
df38c1b1d7 | ||
|
|
af97bf1c5f | ||
|
|
a7c2d96ecf | ||
|
|
1b06b4e45b | ||
|
|
b74b9bc360 | ||
|
|
810689ce66 | ||
|
|
249d93574a | ||
|
|
e2c59f276a | ||
|
|
9804e8aa98 | ||
|
|
53e69af088 | ||
|
|
1530edbe20 | ||
|
|
7dbf32d693 | ||
|
|
49646ad994 | ||
|
|
1e652db37f | ||
|
|
88d366b0c5 | ||
|
|
65147f8d4c | ||
|
|
52b919101a | ||
|
|
24fd74d839 | ||
|
|
2599faa622 | ||
|
|
3df91cfba5 | ||
|
|
d3fab42c85 | ||
|
|
beb881492a | ||
|
|
9d7c7f9fcf | ||
|
|
419307a7c4 | ||
|
|
409dc4ad48 | ||
|
|
7704ef95a4 | ||
|
|
0db07a033b | ||
|
|
4717eb3142 | ||
|
|
c23f5c9f2c | ||
|
|
873b078bb3 | ||
|
|
0dd93a18c5 | ||
|
|
da96e2077b | ||
|
|
1d69cf11a5 | ||
|
|
adb1fbbbc4 | ||
|
|
645f2e44b9 | ||
|
|
b3aede611a | ||
|
|
72a96249b1 | ||
|
|
80dbce14ec | ||
|
|
0376f75ee3 | ||
|
|
e58bd62c68 | ||
|
|
6dbcd130b0 | ||
|
|
4639f57014 | ||
|
|
4080455c12 | ||
|
|
df7d518f38 | ||
|
|
47adfb574f | ||
|
|
4c5d0c2ec4 | ||
|
|
4febe43021 | ||
|
|
af13979855 | ||
|
|
d9f2140df3 | ||
|
|
cc80108629 | ||
|
|
16af76b968 | ||
|
|
590f0ce61f | ||
|
|
14059c6df8 | ||
|
|
268c21addd | ||
|
|
565fa4ea1f | ||
|
|
28cd7f2473 | ||
|
|
aceb1b39ba | ||
|
|
6edf06f8a4 | ||
|
|
07ae9b15d0 | ||
|
|
d676169b04 | ||
|
|
24ce3d7daa | ||
|
|
417e736746 | ||
|
|
bb8d4ca255 | ||
|
|
375af6cb1c | ||
|
|
263e0acd3a | ||
|
|
da531d0e4e | ||
|
|
844e36c8fe | ||
|
|
9976c07f89 | ||
|
|
7df9d2e938 | ||
|
|
52318f5f37 | ||
|
|
b9c2b3f7e3 | ||
|
|
a9ff5b8007 | ||
|
|
7076ba7c9d | ||
|
|
5e0088feaa | ||
|
|
f8399b2c0f | ||
|
|
415fdf4956 | ||
|
|
ad89004189 | ||
|
|
b6afbe4b29 | ||
|
|
402340955e | ||
|
|
b2a160d926 | ||
|
|
9840785363 | ||
|
|
a53c92d4b5 | ||
|
|
adc97b6c15 | ||
|
|
7b2a5d0684 | ||
|
|
acb511d395 | ||
|
|
c025390c6c | ||
|
|
942fbdedcf | ||
|
|
3bfb6707e9 | ||
|
|
5172139579 | ||
|
|
cfb43c7b58 | ||
|
|
45657ece7c | ||
|
|
f7fe2f2122 | ||
|
|
c75222e63c | ||
|
|
299250ebec | ||
|
|
ed8e242049 | ||
|
|
95e4a40ad5 | ||
|
|
e61717ce7a | ||
|
|
73b6bd8bd3 | ||
|
|
60774c69cd | ||
|
|
c383b41a12 | ||
|
|
05a8b773b9 | ||
|
|
1bee423c22 | ||
|
|
687afd23bc | ||
|
|
0020c48a15 | ||
|
|
760cbcc596 | ||
|
|
da8f4e5b57 | ||
|
|
5c0659c8df | ||
|
|
15806c2af6 | ||
|
|
97d8d16cc5 | ||
|
|
33435fa36f | ||
|
|
6fc1cfded9 | ||
|
|
a9d6a42781 | ||
|
|
f2a706ecf7 | ||
|
|
4a2ae7f6fd | ||
|
|
771ead9d7b | ||
|
|
2d5e2aa4b4 | ||
|
|
6f11524b84 | ||
|
|
561f319e3b | ||
|
|
0c9ec4b699 | ||
|
|
cbb2930805 | ||
|
|
aa29a93fbe | ||
|
|
ff4ba553c4 | ||
|
|
2f101c5054 | ||
|
|
72e2b835d9 | ||
|
|
8f6e4cd294 | ||
|
|
bd0edd4996 | ||
|
|
3f441e7090 | ||
|
|
253098d79c | ||
|
|
53ebf84339 | ||
|
|
7cfbc3eeae | ||
|
|
8d32531bc1 | ||
|
|
30d95f37d8 |
@@ -62,7 +62,7 @@
|
||||
"json.schemas": [
|
||||
{
|
||||
"fileMatch": ["homeassistant/components/*/manifest.json"],
|
||||
"url": "./script/json_schemas/manifest_schema.json"
|
||||
"url": "${containerWorkspaceFolder}/script/json_schemas/manifest_schema.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
11
.gitattributes
vendored
11
.gitattributes
vendored
@@ -11,3 +11,14 @@
|
||||
*.pcm binary
|
||||
|
||||
Dockerfile.dev linguist-language=Dockerfile
|
||||
|
||||
# Generated files
|
||||
CODEOWNERS linguist-generated=true
|
||||
Dockerfile linguist-generated=true
|
||||
homeassistant/generated/*.py linguist-generated=true
|
||||
mypy.ini linguist-generated=true
|
||||
requirements.txt linguist-generated=true
|
||||
requirements_all.txt linguist-generated=true
|
||||
requirements_test_all.txt linguist-generated=true
|
||||
requirements_test_pre_commit.txt linguist-generated=true
|
||||
script/hassfest/docker/Dockerfile linguist-generated=true
|
||||
|
||||
6
.github/workflows/builder.yml
vendored
6
.github/workflows/builder.yml
vendored
@@ -69,7 +69,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@@ -509,7 +509,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0
|
||||
uses: docker/build-push-action@b32b51a8eda65d6793cd0494a773d4f6bcef32dc # v6.11.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -522,7 +522,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0
|
||||
uses: docker/build-push-action@b32b51a8eda65d6793cd0494a773d4f6bcef32dc # v6.11.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
|
||||
28
.github/workflows/ci.yaml
vendored
28
.github/workflows/ci.yaml
vendored
@@ -40,9 +40,9 @@ env:
|
||||
CACHE_VERSION: 11
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2025.1"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
ALL_PYTHON_VERSIONS: "['3.12', '3.13']"
|
||||
HA_SHORT_VERSION: "2025.2"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
|
||||
# 10.6 is the current long-term-support
|
||||
@@ -537,7 +537,7 @@ jobs:
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
@@ -661,7 +661,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
@@ -877,7 +877,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@@ -979,14 +979,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1106,7 +1106,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1114,7 +1114,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1236,7 +1236,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1244,7 +1244,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1378,14 +1378,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.28.0
|
||||
uses: github/codeql-action/init@v3.28.1
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.28.0
|
||||
uses: github/codeql-action/analyze@v3.28.1
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -10,7 +10,7 @@ on:
|
||||
- "**strings.json"
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
|
||||
45
.github/workflows/wheels.yml
vendored
45
.github/workflows/wheels.yml
vendored
@@ -17,7 +17,7 @@ on:
|
||||
- "script/gen_requirements_all.py"
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name}}
|
||||
@@ -76,18 +76,37 @@ jobs:
|
||||
|
||||
# Use C-Extension for SQLAlchemy
|
||||
echo "REQUIRE_SQLALCHEMY_CEXT=1"
|
||||
|
||||
# Add additional pip wheel build constraints
|
||||
echo "PIP_CONSTRAINT=build_constraints.txt"
|
||||
) > .env_file
|
||||
|
||||
- name: Write pip wheel build constraints
|
||||
run: |
|
||||
(
|
||||
# ninja 1.11.1.2 + 1.11.1.3 seem to be broken on at least armhf
|
||||
# this caused the numpy builds to fail
|
||||
# https://github.com/scikit-build/ninja-python-distributions/issues/274
|
||||
echo "ninja==1.11.1.1"
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
include-hidden-files: true
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@@ -99,7 +118,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
@@ -123,6 +142,11 @@ jobs:
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
@@ -142,7 +166,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev"
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;multidict;propcache;yarl;SQLAlchemy
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@@ -167,6 +191,11 @@ jobs:
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
@@ -205,7 +234,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@@ -219,7 +248,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@@ -233,7 +262,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.8.3
|
||||
rev: v0.9.1
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
||||
@@ -224,6 +224,7 @@ homeassistant.components.gpsd.*
|
||||
homeassistant.components.greeneye_monitor.*
|
||||
homeassistant.components.group.*
|
||||
homeassistant.components.guardian.*
|
||||
homeassistant.components.habitica.*
|
||||
homeassistant.components.hardkernel.*
|
||||
homeassistant.components.hardware.*
|
||||
homeassistant.components.here_travel_time.*
|
||||
@@ -291,6 +292,7 @@ homeassistant.components.lcn.*
|
||||
homeassistant.components.ld2410_ble.*
|
||||
homeassistant.components.led_ble.*
|
||||
homeassistant.components.lektrico.*
|
||||
homeassistant.components.letpot.*
|
||||
homeassistant.components.lidarr.*
|
||||
homeassistant.components.lifx.*
|
||||
homeassistant.components.light.*
|
||||
@@ -311,6 +313,7 @@ homeassistant.components.manual.*
|
||||
homeassistant.components.mastodon.*
|
||||
homeassistant.components.matrix.*
|
||||
homeassistant.components.matter.*
|
||||
homeassistant.components.mcp_server.*
|
||||
homeassistant.components.mealie.*
|
||||
homeassistant.components.media_extractor.*
|
||||
homeassistant.components.media_player.*
|
||||
@@ -362,11 +365,14 @@ homeassistant.components.openuv.*
|
||||
homeassistant.components.oralb.*
|
||||
homeassistant.components.otbr.*
|
||||
homeassistant.components.overkiz.*
|
||||
homeassistant.components.overseerr.*
|
||||
homeassistant.components.p1_monitor.*
|
||||
homeassistant.components.pandora.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.peblar.*
|
||||
homeassistant.components.peco.*
|
||||
homeassistant.components.persistent_notification.*
|
||||
homeassistant.components.person.*
|
||||
homeassistant.components.pi_hole.*
|
||||
homeassistant.components.ping.*
|
||||
homeassistant.components.plugwise.*
|
||||
@@ -380,6 +386,8 @@ homeassistant.components.pure_energie.*
|
||||
homeassistant.components.purpleair.*
|
||||
homeassistant.components.pushbullet.*
|
||||
homeassistant.components.pvoutput.*
|
||||
homeassistant.components.python_script.*
|
||||
homeassistant.components.qbus.*
|
||||
homeassistant.components.qnap_qsw.*
|
||||
homeassistant.components.rabbitair.*
|
||||
homeassistant.components.radarr.*
|
||||
|
||||
3
.vscode/settings.default.json
vendored
3
.vscode/settings.default.json
vendored
@@ -1,5 +1,5 @@
|
||||
{
|
||||
// Please keep this file in sync with settings in home-assistant/.devcontainer/devcontainer.json
|
||||
// Please keep this file (mostly!) in sync with settings in home-assistant/.devcontainer/devcontainer.json
|
||||
// Added --no-cov to work around TypeError: message must be set
|
||||
// https://github.com/microsoft/vscode-python/issues/14067
|
||||
"python.testing.pytestArgs": ["--no-cov"],
|
||||
@@ -12,6 +12,7 @@
|
||||
"fileMatch": [
|
||||
"homeassistant/components/*/manifest.json"
|
||||
],
|
||||
// This value differs between working with devcontainer and locally, therefor this value should NOT be in sync!
|
||||
"url": "./script/json_schemas/manifest_schema.json"
|
||||
}
|
||||
]
|
||||
|
||||
46
CODEOWNERS
generated
46
CODEOWNERS
generated
@@ -637,6 +637,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/homeassistant_sky_connect/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_yellow/ @home-assistant/core
|
||||
/tests/components/homeassistant_yellow/ @home-assistant/core
|
||||
/homeassistant/components/homee/ @Taraman17
|
||||
/tests/components/homee/ @Taraman17
|
||||
/homeassistant/components/homekit/ @bdraco
|
||||
/tests/components/homekit/ @bdraco
|
||||
/homeassistant/components/homekit_controller/ @Jc2k @bdraco
|
||||
@@ -686,6 +688,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/icloud/ @Quentame @nzapponi
|
||||
/homeassistant/components/idasen_desk/ @abmantis
|
||||
/tests/components/idasen_desk/ @abmantis
|
||||
/homeassistant/components/igloohome/ @keithle888
|
||||
/tests/components/igloohome/ @keithle888
|
||||
/homeassistant/components/ign_sismologia/ @exxamalte
|
||||
/tests/components/ign_sismologia/ @exxamalte
|
||||
/homeassistant/components/image/ @home-assistant/core
|
||||
@@ -827,6 +831,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/led_ble/ @bdraco
|
||||
/homeassistant/components/lektrico/ @lektrico
|
||||
/tests/components/lektrico/ @lektrico
|
||||
/homeassistant/components/letpot/ @jpelgrom
|
||||
/tests/components/letpot/ @jpelgrom
|
||||
/homeassistant/components/lg_netcast/ @Drafteed @splinter98
|
||||
/tests/components/lg_netcast/ @Drafteed @splinter98
|
||||
/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
@@ -887,6 +893,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/matrix/ @PaarthShah
|
||||
/homeassistant/components/matter/ @home-assistant/matter
|
||||
/tests/components/matter/ @home-assistant/matter
|
||||
/homeassistant/components/mcp_server/ @allenporter
|
||||
/tests/components/mcp_server/ @allenporter
|
||||
/homeassistant/components/mealie/ @joostlek @andrew-codechimp
|
||||
/tests/components/mealie/ @joostlek @andrew-codechimp
|
||||
/homeassistant/components/meater/ @Sotolotl @emontnemery
|
||||
@@ -1016,7 +1024,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/nina/ @DeerMaximum
|
||||
/tests/components/nina/ @DeerMaximum
|
||||
/homeassistant/components/nissan_leaf/ @filcole
|
||||
/homeassistant/components/nmbs/ @thibmaek
|
||||
/homeassistant/components/noaa_tides/ @jdelaney72
|
||||
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
/tests/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
@@ -1068,8 +1075,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/onewire/ @garbled1 @epenet
|
||||
/homeassistant/components/onkyo/ @arturpragacz @eclair4151
|
||||
/tests/components/onkyo/ @arturpragacz @eclair4151
|
||||
/homeassistant/components/onvif/ @hunterjm
|
||||
/tests/components/onvif/ @hunterjm
|
||||
/homeassistant/components/onvif/ @hunterjm @jterrace
|
||||
/tests/components/onvif/ @hunterjm @jterrace
|
||||
/homeassistant/components/open_meteo/ @frenck
|
||||
/tests/components/open_meteo/ @frenck
|
||||
/homeassistant/components/openai_conversation/ @balloob
|
||||
@@ -1103,8 +1110,10 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/otbr/ @home-assistant/core
|
||||
/homeassistant/components/ourgroceries/ @OnFreund
|
||||
/tests/components/ourgroceries/ @OnFreund
|
||||
/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
|
||||
/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
|
||||
/homeassistant/components/overkiz/ @imicknl
|
||||
/tests/components/overkiz/ @imicknl
|
||||
/homeassistant/components/overseerr/ @joostlek
|
||||
/tests/components/overseerr/ @joostlek
|
||||
/homeassistant/components/ovo_energy/ @timmo001
|
||||
/tests/components/ovo_energy/ @timmo001
|
||||
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
||||
@@ -1135,8 +1144,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/plaato/ @JohNan
|
||||
/homeassistant/components/plex/ @jjlawren
|
||||
/tests/components/plex/ @jjlawren
|
||||
/homeassistant/components/plugwise/ @CoMPaTech @bouwew @frenck
|
||||
/tests/components/plugwise/ @CoMPaTech @bouwew @frenck
|
||||
/homeassistant/components/plugwise/ @CoMPaTech @bouwew
|
||||
/tests/components/plugwise/ @CoMPaTech @bouwew
|
||||
/homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||
/tests/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||
/homeassistant/components/point/ @fredrike
|
||||
@@ -1182,6 +1191,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/pyload/ @tr4nt0r
|
||||
/homeassistant/components/qbittorrent/ @geoffreylagaisse @finder39
|
||||
/tests/components/qbittorrent/ @geoffreylagaisse @finder39
|
||||
/homeassistant/components/qbus/ @Qbus-iot @thomasddn
|
||||
/tests/components/qbus/ @Qbus-iot @thomasddn
|
||||
/homeassistant/components/qingping/ @bdraco
|
||||
/tests/components/qingping/ @bdraco
|
||||
/homeassistant/components/qld_bushfire/ @exxamalte
|
||||
@@ -1278,6 +1289,7 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
|
||||
/homeassistant/components/russound_rio/ @noahhusby
|
||||
/tests/components/russound_rio/ @noahhusby
|
||||
/homeassistant/components/russound_rnet/ @noahhusby
|
||||
/homeassistant/components/ruuvi_gateway/ @akx
|
||||
/tests/components/ruuvi_gateway/ @akx
|
||||
/homeassistant/components/ruuvitag_ble/ @akx
|
||||
@@ -1371,8 +1383,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/slide_local/ @dontinelli
|
||||
/homeassistant/components/slimproto/ @marcelveldt
|
||||
/tests/components/slimproto/ @marcelveldt
|
||||
/homeassistant/components/sma/ @kellerza @rklomp
|
||||
/tests/components/sma/ @kellerza @rklomp
|
||||
/homeassistant/components/sma/ @kellerza @rklomp @erwindouna
|
||||
/tests/components/sma/ @kellerza @rklomp @erwindouna
|
||||
/homeassistant/components/smappee/ @bsmappee
|
||||
/tests/components/smappee/ @bsmappee
|
||||
/homeassistant/components/smart_meter_texas/ @grahamwetzler
|
||||
@@ -1478,8 +1490,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/system_bridge/ @timmo001
|
||||
/homeassistant/components/systemmonitor/ @gjohansson-ST
|
||||
/tests/components/systemmonitor/ @gjohansson-ST
|
||||
/homeassistant/components/tado/ @chiefdragon @erwindouna
|
||||
/tests/components/tado/ @chiefdragon @erwindouna
|
||||
/homeassistant/components/tado/ @erwindouna
|
||||
/tests/components/tado/ @erwindouna
|
||||
/homeassistant/components/tag/ @balloob @dmulcahey
|
||||
/tests/components/tag/ @balloob @dmulcahey
|
||||
/homeassistant/components/tailscale/ @frenck
|
||||
@@ -1573,8 +1585,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/triggercmd/ @rvmey
|
||||
/homeassistant/components/tts/ @home-assistant/core
|
||||
/tests/components/tts/ @home-assistant/core
|
||||
/homeassistant/components/tuya/ @Tuya @zlinoliver @frenck
|
||||
/tests/components/tuya/ @Tuya @zlinoliver @frenck
|
||||
/homeassistant/components/tuya/ @Tuya @zlinoliver
|
||||
/tests/components/tuya/ @Tuya @zlinoliver
|
||||
/homeassistant/components/twentemilieu/ @frenck
|
||||
/tests/components/twentemilieu/ @frenck
|
||||
/homeassistant/components/twinkly/ @dr1rrb @Robbie1221 @Olen
|
||||
@@ -1618,15 +1630,15 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/valve/ @home-assistant/core
|
||||
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
||||
/tests/components/velbus/ @Cereal2nd @brefra
|
||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum
|
||||
/tests/components/velux/ @Julius2342 @DeerMaximum
|
||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio
|
||||
/tests/components/velux/ @Julius2342 @DeerMaximum @pawlizio
|
||||
/homeassistant/components/venstar/ @garbled1 @jhollowe
|
||||
/tests/components/venstar/ @garbled1 @jhollowe
|
||||
/homeassistant/components/versasense/ @imstevenxyz
|
||||
/homeassistant/components/version/ @ludeeus
|
||||
/tests/components/version/ @ludeeus
|
||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja
|
||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak
|
||||
/homeassistant/components/vicare/ @CFenner
|
||||
/tests/components/vicare/ @CFenner
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
|
||||
4
Dockerfile
generated
4
Dockerfile
generated
@@ -13,7 +13,7 @@ ENV \
|
||||
ARG QEMU_CPU
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.5.8
|
||||
RUN pip3 install uv==0.5.18
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
@@ -55,7 +55,7 @@ RUN \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.7/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.8/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
|
||||
@@ -308,7 +308,7 @@ class AuthStore:
|
||||
credentials.data = data
|
||||
self._async_schedule_save()
|
||||
|
||||
async def async_load(self) -> None: # noqa: C901
|
||||
async def async_load(self) -> None:
|
||||
"""Load the users."""
|
||||
if self._loaded:
|
||||
raise RuntimeError("Auth storage is already loaded")
|
||||
|
||||
@@ -4,9 +4,8 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import types
|
||||
from typing import Any, Generic
|
||||
from typing import Any
|
||||
|
||||
from typing_extensions import TypeVar
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
@@ -35,12 +34,6 @@ DATA_REQS: HassKey[set[str]] = HassKey("mfa_auth_module_reqs_processed")
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_MultiFactorAuthModuleT = TypeVar(
|
||||
"_MultiFactorAuthModuleT",
|
||||
bound="MultiFactorAuthModule",
|
||||
default="MultiFactorAuthModule",
|
||||
)
|
||||
|
||||
|
||||
class MultiFactorAuthModule:
|
||||
"""Multi-factor Auth Module of validation function."""
|
||||
@@ -102,7 +95,9 @@ class MultiFactorAuthModule:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class SetupFlow(data_entry_flow.FlowHandler, Generic[_MultiFactorAuthModuleT]):
|
||||
class SetupFlow[_MultiFactorAuthModuleT: MultiFactorAuthModule = MultiFactorAuthModule](
|
||||
data_entry_flow.FlowHandler
|
||||
):
|
||||
"""Handler for the setup flow."""
|
||||
|
||||
def __init__(
|
||||
|
||||
@@ -17,12 +17,12 @@ POLICY_SCHEMA = vol.Schema({vol.Optional(CAT_ENTITIES): ENTITY_POLICY_SCHEMA})
|
||||
|
||||
__all__ = [
|
||||
"POLICY_SCHEMA",
|
||||
"merge_policies",
|
||||
"PermissionLookup",
|
||||
"PolicyType",
|
||||
"AbstractPermissions",
|
||||
"PolicyPermissions",
|
||||
"OwnerPermissions",
|
||||
"PermissionLookup",
|
||||
"PolicyPermissions",
|
||||
"PolicyType",
|
||||
"merge_policies",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -5,9 +5,8 @@ from __future__ import annotations
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
import types
|
||||
from typing import Any, Generic
|
||||
from typing import Any
|
||||
|
||||
from typing_extensions import TypeVar
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
@@ -47,8 +46,6 @@ AUTH_PROVIDER_SCHEMA = vol.Schema(
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
_AuthProviderT = TypeVar("_AuthProviderT", bound="AuthProvider", default="AuthProvider")
|
||||
|
||||
|
||||
class AuthProvider:
|
||||
"""Provider of user authentication."""
|
||||
@@ -195,9 +192,8 @@ async def load_auth_provider_module(
|
||||
return module
|
||||
|
||||
|
||||
class LoginFlow(
|
||||
class LoginFlow[_AuthProviderT: AuthProvider = AuthProvider](
|
||||
FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]],
|
||||
Generic[_AuthProviderT],
|
||||
):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
|
||||
@@ -119,7 +119,7 @@ def _extract_backup(
|
||||
Path(
|
||||
tempdir,
|
||||
"extracted",
|
||||
f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}",
|
||||
f"homeassistant.tar{'.gz' if backup_meta['compressed'] else ''}",
|
||||
),
|
||||
gzip=backup_meta["compressed"],
|
||||
key=password_to_key(restore_content.password)
|
||||
|
||||
@@ -31,7 +31,7 @@ def _check_import_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||
def _check_file_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||
# If the file is in /proc we can ignore it.
|
||||
args = mapped_args["args"]
|
||||
path = args[0] if type(args[0]) is str else str(args[0]) # noqa: E721
|
||||
path = args[0] if type(args[0]) is str else str(args[0])
|
||||
return path.startswith(ALLOWED_FILE_PREFIXES)
|
||||
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
"domain": "microsoft",
|
||||
"name": "Microsoft",
|
||||
"integrations": [
|
||||
"azure_data_explorer",
|
||||
"azure_devops",
|
||||
"azure_event_hub",
|
||||
"azure_service_bus",
|
||||
|
||||
@@ -34,17 +34,17 @@
|
||||
"services": {
|
||||
"capture_image": {
|
||||
"name": "Capture image",
|
||||
"description": "Request a new image capture from a camera device.",
|
||||
"description": "Requests a new image capture from a camera device.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
"description": "Entity id of the camera to request an image."
|
||||
"description": "Entity ID of the camera to request an image from."
|
||||
}
|
||||
}
|
||||
},
|
||||
"change_setting": {
|
||||
"name": "Change setting",
|
||||
"description": "Change an Abode system setting.",
|
||||
"description": "Changes an Abode system setting.",
|
||||
"fields": {
|
||||
"setting": {
|
||||
"name": "Setting",
|
||||
@@ -58,11 +58,11 @@
|
||||
},
|
||||
"trigger_automation": {
|
||||
"name": "Trigger automation",
|
||||
"description": "Trigger an Abode automation.",
|
||||
"description": "Triggers an Abode automation.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
"description": "Entity id of the automation to trigger."
|
||||
"description": "Entity ID of the automation to trigger."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -70,7 +70,7 @@ class PulseHub:
|
||||
|
||||
async def async_notify_update(self, update_type: aiopulse.UpdateType) -> None:
|
||||
"""Evaluate entities when hub reports that update has occurred."""
|
||||
LOGGER.debug("Hub {update_type.name} updated")
|
||||
LOGGER.debug("Hub %s updated", update_type.name)
|
||||
|
||||
if update_type == aiopulse.UpdateType.rollers:
|
||||
await update_devices(self.hass, self.config_entry, self.api.rollers)
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import telnetlib # pylint: disable=deprecated-module
|
||||
from typing import Final
|
||||
|
||||
import telnetlib # pylint: disable=deprecated-module
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.device_tracker import (
|
||||
|
||||
@@ -34,9 +34,12 @@ from .const import (
|
||||
SERVICE_REMOVE_URL,
|
||||
)
|
||||
|
||||
SERVICE_URL_SCHEMA = vol.Schema({vol.Required(CONF_URL): cv.url})
|
||||
SERVICE_URL_SCHEMA = vol.Schema({vol.Required(CONF_URL): vol.Any(cv.url, cv.path)})
|
||||
SERVICE_ADD_URL_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_NAME): cv.string, vol.Required(CONF_URL): cv.url}
|
||||
{
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Required(CONF_URL): vol.Any(cv.url, cv.path),
|
||||
}
|
||||
)
|
||||
SERVICE_REFRESH_SCHEMA = vol.Schema(
|
||||
{vol.Optional(CONF_FORCE, default=False): cv.boolean}
|
||||
|
||||
@@ -66,7 +66,7 @@ class AdvantageAirZoneMotion(AdvantageAirZoneEntity, BinarySensorEntity):
|
||||
def __init__(self, instance: AdvantageAirData, ac_key: str, zone_key: str) -> None:
|
||||
"""Initialize an Advantage Air Zone Motion sensor."""
|
||||
super().__init__(instance, ac_key, zone_key)
|
||||
self._attr_name = f'{self._zone["name"]} motion'
|
||||
self._attr_name = f"{self._zone['name']} motion"
|
||||
self._attr_unique_id += "-motion"
|
||||
|
||||
@property
|
||||
@@ -84,7 +84,7 @@ class AdvantageAirZoneMyZone(AdvantageAirZoneEntity, BinarySensorEntity):
|
||||
def __init__(self, instance: AdvantageAirData, ac_key: str, zone_key: str) -> None:
|
||||
"""Initialize an Advantage Air Zone MyZone sensor."""
|
||||
super().__init__(instance, ac_key, zone_key)
|
||||
self._attr_name = f'{self._zone["name"]} myZone'
|
||||
self._attr_name = f"{self._zone['name']} myZone"
|
||||
self._attr_unique_id += "-myzone"
|
||||
|
||||
@property
|
||||
|
||||
@@ -103,7 +103,7 @@ class AdvantageAirZoneVent(AdvantageAirZoneEntity, SensorEntity):
|
||||
def __init__(self, instance: AdvantageAirData, ac_key: str, zone_key: str) -> None:
|
||||
"""Initialize an Advantage Air Zone Vent Sensor."""
|
||||
super().__init__(instance, ac_key, zone_key=zone_key)
|
||||
self._attr_name = f'{self._zone["name"]} vent'
|
||||
self._attr_name = f"{self._zone['name']} vent"
|
||||
self._attr_unique_id += "-vent"
|
||||
|
||||
@property
|
||||
@@ -131,7 +131,7 @@ class AdvantageAirZoneSignal(AdvantageAirZoneEntity, SensorEntity):
|
||||
def __init__(self, instance: AdvantageAirData, ac_key: str, zone_key: str) -> None:
|
||||
"""Initialize an Advantage Air Zone wireless signal sensor."""
|
||||
super().__init__(instance, ac_key, zone_key)
|
||||
self._attr_name = f'{self._zone["name"]} signal'
|
||||
self._attr_name = f"{self._zone['name']} signal"
|
||||
self._attr_unique_id += "-signal"
|
||||
|
||||
@property
|
||||
@@ -165,7 +165,7 @@ class AdvantageAirZoneTemp(AdvantageAirZoneEntity, SensorEntity):
|
||||
def __init__(self, instance: AdvantageAirData, ac_key: str, zone_key: str) -> None:
|
||||
"""Initialize an Advantage Air Zone Temp Sensor."""
|
||||
super().__init__(instance, ac_key, zone_key)
|
||||
self._attr_name = f'{self._zone["name"]} temperature'
|
||||
self._attr_name = f"{self._zone['name']} temperature"
|
||||
self._attr_unique_id += "-temp"
|
||||
|
||||
@property
|
||||
|
||||
@@ -11,10 +11,10 @@ from airgradient import (
|
||||
from awesomeversion import AwesomeVersion
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_MODEL
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@@ -37,7 +37,7 @@ class AirGradientConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await self.client.set_configuration_control(ConfigurationControl.LOCAL)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
self.data[CONF_HOST] = host = discovery_info.host
|
||||
|
||||
@@ -137,6 +137,15 @@ MEASUREMENT_SENSOR_TYPES: tuple[AirGradientMeasurementSensorEntityDescription, .
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda status: status.raw_total_volatile_organic_component,
|
||||
),
|
||||
AirGradientMeasurementSensorEntityDescription(
|
||||
key="pm02_raw",
|
||||
translation_key="raw_pm02",
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda status: status.raw_pm02,
|
||||
),
|
||||
)
|
||||
|
||||
CONFIG_SENSOR_TYPES: tuple[AirGradientConfigSensorEntityDescription, ...] = (
|
||||
|
||||
@@ -119,6 +119,9 @@
|
||||
"raw_nitrogen": {
|
||||
"name": "Raw NOx"
|
||||
},
|
||||
"raw_pm02": {
|
||||
"name": "Raw PM2.5"
|
||||
},
|
||||
"display_pm_standard": {
|
||||
"name": "[%key:component::airgradient::entity::select::display_pm_standard::name%]",
|
||||
"state": {
|
||||
|
||||
@@ -39,45 +39,54 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
key="temp",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"humidity": SensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"pressure": SensorEntityDescription(
|
||||
key="pressure",
|
||||
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfPressure.MBAR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"battery": SensorEntityDescription(
|
||||
key="battery",
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"co2": SensorEntityDescription(
|
||||
key="co2",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"voc": SensorEntityDescription(
|
||||
key="voc",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"light": SensorEntityDescription(
|
||||
key="light",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
translation_key="light",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"virusRisk": SensorEntityDescription(
|
||||
key="virusRisk",
|
||||
translation_key="virus_risk",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"mold": SensorEntityDescription(
|
||||
key="mold",
|
||||
translation_key="mold",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"rssi": SensorEntityDescription(
|
||||
key="rssi",
|
||||
@@ -85,16 +94,19 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"pm1": SensorEntityDescription(
|
||||
key="pm1",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM1,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"pm25": SensorEntityDescription(
|
||||
key="pm25",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
}
|
||||
|
||||
@@ -143,8 +155,7 @@ class AirthingsHeaterEnergySensor(
|
||||
self._id = airthings_device.device_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=(
|
||||
"https://dashboard.airthings.com/devices/"
|
||||
f"{airthings_device.device_id}"
|
||||
f"https://dashboard.airthings.com/devices/{airthings_device.device_id}"
|
||||
),
|
||||
identifiers={(DOMAIN, airthings_device.device_id)},
|
||||
name=airthings_device.name,
|
||||
|
||||
@@ -67,18 +67,21 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"humidity": SensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"pressure": SensorEntityDescription(
|
||||
key="pressure",
|
||||
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfPressure.MBAR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"battery": SensorEntityDescription(
|
||||
key="battery",
|
||||
@@ -86,24 +89,28 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"co2": SensorEntityDescription(
|
||||
key="co2",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"voc": SensorEntityDescription(
|
||||
key="voc",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"illuminance": SensorEntityDescription(
|
||||
key="illuminance",
|
||||
translation_key="illuminance",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda settings, status, measurements, history: int(
|
||||
history.get(
|
||||
f'Outdoor {"AQI(US)" if settings["is_aqi_usa"] else "AQI(CN)"}', -1
|
||||
f"Outdoor {'AQI(US)' if settings['is_aqi_usa'] else 'AQI(CN)'}", -1
|
||||
)
|
||||
),
|
||||
translation_key="outdoor_air_quality_index",
|
||||
|
||||
@@ -5,7 +5,14 @@ from __future__ import annotations
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aioairzone.const import AZD_MAC, AZD_WEBSERVER, DEFAULT_SYSTEM_ID
|
||||
from aioairzone.const import (
|
||||
AZD_FIRMWARE,
|
||||
AZD_FULL_NAME,
|
||||
AZD_MAC,
|
||||
AZD_MODEL,
|
||||
AZD_WEBSERVER,
|
||||
DEFAULT_SYSTEM_ID,
|
||||
)
|
||||
from aioairzone.localapi import AirzoneLocalApi, ConnectionOptions
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -17,6 +24,7 @@ from homeassistant.helpers import (
|
||||
entity_registry as er,
|
||||
)
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .coordinator import AirzoneUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
@@ -88,6 +96,22 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirzoneConfigEntry) -> b
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
|
||||
ws_data: dict[str, Any] | None = coordinator.data.get(AZD_WEBSERVER)
|
||||
if ws_data is not None:
|
||||
mac = ws_data.get(AZD_MAC, "")
|
||||
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, mac)},
|
||||
identifiers={(DOMAIN, f"{entry.entry_id}_ws")},
|
||||
manufacturer=MANUFACTURER,
|
||||
model=ws_data.get(AZD_MODEL),
|
||||
name=ws_data.get(AZD_FULL_NAME),
|
||||
sw_version=ws_data.get(AZD_FIRMWARE),
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
@@ -10,12 +10,12 @@ from aioairzone.exceptions import AirzoneError, InvalidSystem
|
||||
from aioairzone.localapi import AirzoneLocalApi, ConnectionOptions
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import dhcp
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_ID, CONF_PORT
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@@ -93,7 +93,7 @@ class AirZoneConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
async def async_step_dhcp(
|
||||
self, discovery_info: dhcp.DhcpServiceInfo
|
||||
self, discovery_info: DhcpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle DHCP discovery."""
|
||||
self._discovered_ip = discovery_info.ip
|
||||
|
||||
@@ -68,8 +68,9 @@ class AirzoneSystemEntity(AirzoneEntity):
|
||||
model=self.get_airzone_value(AZD_MODEL),
|
||||
name=f"System {self.system_id}",
|
||||
sw_version=self.get_airzone_value(AZD_FIRMWARE),
|
||||
via_device=(DOMAIN, f"{entry.entry_id}_ws"),
|
||||
)
|
||||
if AZD_WEBSERVER in self.coordinator.data:
|
||||
self._attr_device_info["via_device"] = (DOMAIN, f"{entry.entry_id}_ws")
|
||||
self._attr_unique_id = entry.unique_id or entry.entry_id
|
||||
|
||||
@property
|
||||
@@ -102,8 +103,9 @@ class AirzoneHotWaterEntity(AirzoneEntity):
|
||||
manufacturer=MANUFACTURER,
|
||||
model="DHW",
|
||||
name=self.get_airzone_value(AZD_NAME),
|
||||
via_device=(DOMAIN, f"{entry.entry_id}_ws"),
|
||||
)
|
||||
if AZD_WEBSERVER in self.coordinator.data:
|
||||
self._attr_device_info["via_device"] = (DOMAIN, f"{entry.entry_id}_ws")
|
||||
self._attr_unique_id = entry.unique_id or entry.entry_id
|
||||
|
||||
def get_airzone_value(self, key: str) -> Any:
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==0.9.7"]
|
||||
"requirements": ["aioairzone==0.9.9"]
|
||||
}
|
||||
|
||||
@@ -474,25 +474,30 @@ class ClimateCapabilities(AlexaEntity):
|
||||
# If we support two modes, one being off, we allow turning on too.
|
||||
supported_features = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
if (
|
||||
self.entity.domain == climate.DOMAIN
|
||||
and climate.HVACMode.OFF
|
||||
in (self.entity.attributes.get(climate.ATTR_HVAC_MODES) or [])
|
||||
or self.entity.domain == climate.DOMAIN
|
||||
and (
|
||||
supported_features
|
||||
& (
|
||||
climate.ClimateEntityFeature.TURN_ON
|
||||
| climate.ClimateEntityFeature.TURN_OFF
|
||||
(
|
||||
self.entity.domain == climate.DOMAIN
|
||||
and climate.HVACMode.OFF
|
||||
in (self.entity.attributes.get(climate.ATTR_HVAC_MODES) or [])
|
||||
)
|
||||
or (
|
||||
self.entity.domain == climate.DOMAIN
|
||||
and (
|
||||
supported_features
|
||||
& (
|
||||
climate.ClimateEntityFeature.TURN_ON
|
||||
| climate.ClimateEntityFeature.TURN_OFF
|
||||
)
|
||||
)
|
||||
)
|
||||
or self.entity.domain == water_heater.DOMAIN
|
||||
and (supported_features & water_heater.WaterHeaterEntityFeature.ON_OFF)
|
||||
or (
|
||||
self.entity.domain == water_heater.DOMAIN
|
||||
and (supported_features & water_heater.WaterHeaterEntityFeature.ON_OFF)
|
||||
)
|
||||
):
|
||||
yield AlexaPowerController(self.entity)
|
||||
|
||||
if (
|
||||
self.entity.domain == climate.DOMAIN
|
||||
or self.entity.domain == water_heater.DOMAIN
|
||||
if self.entity.domain == climate.DOMAIN or (
|
||||
self.entity.domain == water_heater.DOMAIN
|
||||
and (
|
||||
supported_features
|
||||
& water_heater.WaterHeaterEntityFeature.OPERATION_MODE
|
||||
|
||||
@@ -317,9 +317,8 @@ async def async_enable_proactive_mode(
|
||||
|
||||
if should_doorbell:
|
||||
old_state = data["old_state"]
|
||||
if (
|
||||
new_state.domain == event.DOMAIN
|
||||
or new_state.state == STATE_ON
|
||||
if new_state.domain == event.DOMAIN or (
|
||||
new_state.state == STATE_ON
|
||||
and (old_state is None or old_state.state != STATE_ON)
|
||||
):
|
||||
await async_send_doorbell_event_message(
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"invalid_unique_id": "Impossible to determine a valid unique id for the device"
|
||||
"invalid_unique_id": "Impossible to determine a valid unique ID for the device"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
@@ -38,17 +38,17 @@
|
||||
}
|
||||
},
|
||||
"apps": {
|
||||
"title": "Configure Android Apps",
|
||||
"description": "Configure application id {app_id}",
|
||||
"title": "Configure Android apps",
|
||||
"description": "Configure application ID {app_id}",
|
||||
"data": {
|
||||
"app_name": "Application Name",
|
||||
"app_name": "Application name",
|
||||
"app_id": "Application ID",
|
||||
"app_delete": "Check to delete this application"
|
||||
}
|
||||
},
|
||||
"rules": {
|
||||
"title": "Configure Android state detection rules",
|
||||
"description": "Configure detection rule for application id {rule_id}",
|
||||
"description": "Configure detection rule for application ID {rule_id}",
|
||||
"data": {
|
||||
"rule_id": "[%key:component::androidtv::options::step::apps::data::app_id%]",
|
||||
"rule_values": "List of state detection rules (see documentation)",
|
||||
|
||||
@@ -14,7 +14,6 @@ from androidtvremote2 import (
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_REAUTH,
|
||||
ConfigEntry,
|
||||
@@ -31,6 +30,7 @@ from homeassistant.helpers.selector import (
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import CONF_APP_ICON, CONF_APP_NAME, CONF_APPS, CONF_ENABLE_IME, DOMAIN
|
||||
from .helpers import create_api, get_enable_ime
|
||||
@@ -142,7 +142,7 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
_LOGGER.debug("Android TV device found via zeroconf: %s", discovery_info)
|
||||
|
||||
@@ -44,12 +44,12 @@
|
||||
}
|
||||
},
|
||||
"apps": {
|
||||
"title": "Configure Android Apps",
|
||||
"description": "Configure application id {app_id}",
|
||||
"title": "Configure Android apps",
|
||||
"description": "Configure application ID {app_id}",
|
||||
"data": {
|
||||
"app_name": "Application Name",
|
||||
"app_name": "Application name",
|
||||
"app_id": "Application ID",
|
||||
"app_icon": "Application Icon",
|
||||
"app_icon": "Application icon",
|
||||
"app_delete": "Check to delete this application"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,6 +34,7 @@ from homeassistant.helpers.schema_config_entry_flow import (
|
||||
SchemaFlowFormStep,
|
||||
SchemaOptionsFlowHandler,
|
||||
)
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import CONF_CREDENTIALS, CONF_IDENTIFIERS, CONF_START_OFF, DOMAIN
|
||||
|
||||
@@ -204,7 +205,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle device found via zeroconf."""
|
||||
if discovery_info.ip_address.version == 6:
|
||||
|
||||
@@ -38,7 +38,7 @@ from homeassistant.loader import (
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
__all__ = ["ClientCredential", "AuthorizationServer", "async_import_client_credential"]
|
||||
__all__ = ["AuthorizationServer", "ClientCredential", "async_import_client_credential"]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ async def async_setup_entry(
|
||||
|
||||
descriptions: list[AprilaireHumidifierDescription] = []
|
||||
|
||||
if coordinator.data.get(Attribute.HUMIDIFICATION_AVAILABLE) in (0, 1, 2):
|
||||
if coordinator.data.get(Attribute.HUMIDIFICATION_AVAILABLE) in (1, 2):
|
||||
descriptions.append(
|
||||
AprilaireHumidifierDescription(
|
||||
key="humidifier",
|
||||
@@ -67,7 +67,7 @@ async def async_setup_entry(
|
||||
)
|
||||
)
|
||||
|
||||
if coordinator.data.get(Attribute.DEHUMIDIFICATION_AVAILABLE) in (0, 1):
|
||||
if coordinator.data.get(Attribute.DEHUMIDIFICATION_AVAILABLE) == 1:
|
||||
descriptions.append(
|
||||
AprilaireHumidifierDescription(
|
||||
key="dehumidifier",
|
||||
|
||||
@@ -29,6 +29,8 @@ class ApSystemsSensorData:
|
||||
class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
"""Coordinator used for all sensors."""
|
||||
|
||||
device_version: str
|
||||
|
||||
def __init__(self, hass: HomeAssistant, api: APsystemsEZ1M) -> None:
|
||||
"""Initialize my coordinator."""
|
||||
super().__init__(
|
||||
@@ -46,6 +48,7 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
raise UpdateFailed from None
|
||||
self.api.max_power = device_info.maxPower
|
||||
self.api.min_power = device_info.minPower
|
||||
self.device_version = device_info.devVer
|
||||
|
||||
async def _async_update_data(self) -> ApSystemsSensorData:
|
||||
try:
|
||||
|
||||
@@ -21,7 +21,8 @@ class ApSystemsEntity(Entity):
|
||||
"""Initialize the APsystems entity."""
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, data.device_id)},
|
||||
serial_number=data.device_id,
|
||||
manufacturer="APsystems",
|
||||
model="EZ1-M",
|
||||
serial_number=data.device_id,
|
||||
sw_version=data.coordinator.device_version.split(" ")[1],
|
||||
)
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aranet",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aranet4==2.4.0"]
|
||||
"requirements": ["aranet4==2.5.0"]
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@ from homeassistant.components.sensor import (
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_MANUFACTURER,
|
||||
ATTR_MODEL,
|
||||
ATTR_NAME,
|
||||
ATTR_SW_VERSION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
@@ -142,6 +143,7 @@ def _sensor_device_info_to_hass(
|
||||
if adv.readings and adv.readings.name:
|
||||
hass_device_info[ATTR_NAME] = adv.readings.name
|
||||
hass_device_info[ATTR_MANUFACTURER] = ARANET_MANUFACTURER_NAME
|
||||
hass_device_info[ATTR_MODEL] = adv.readings.type.model
|
||||
if adv.manufacturer_data:
|
||||
hass_device_info[ATTR_SW_VERSION] = str(adv.manufacturer_data.version)
|
||||
return hass_device_info
|
||||
|
||||
@@ -9,10 +9,10 @@ from arcam.fmj.client import Client, ConnectionFailed
|
||||
from arcam.fmj.utils import get_uniqueid_from_host, get_uniqueid_from_udn
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import ssdp
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.ssdp import ATTR_UPNP_UDN, SsdpServiceInfo
|
||||
|
||||
from .const import DEFAULT_NAME, DEFAULT_PORT, DOMAIN
|
||||
|
||||
@@ -88,12 +88,12 @@ class ArcamFmjFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
async def async_step_ssdp(
|
||||
self, discovery_info: ssdp.SsdpServiceInfo
|
||||
self, discovery_info: SsdpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a discovered device."""
|
||||
host = str(urlparse(discovery_info.ssdp_location).hostname)
|
||||
port = DEFAULT_PORT
|
||||
uuid = get_uniqueid_from_udn(discovery_info.upnp[ssdp.ATTR_UPNP_UDN])
|
||||
uuid = get_uniqueid_from_udn(discovery_info.upnp[ATTR_UPNP_UDN])
|
||||
if not uuid:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
|
||||
@@ -90,7 +90,7 @@ class ArubaDeviceScanner(DeviceScanner):
|
||||
"""Retrieve data from Aruba Access Point and return parsed result."""
|
||||
|
||||
connect = f"ssh {self.username}@{self.host} -o HostKeyAlgorithms=ssh-rsa"
|
||||
ssh = pexpect.spawn(connect)
|
||||
ssh: pexpect.spawn[str] = pexpect.spawn(connect, encoding="utf-8")
|
||||
query = ssh.expect(
|
||||
[
|
||||
"password:",
|
||||
@@ -125,12 +125,12 @@ class ArubaDeviceScanner(DeviceScanner):
|
||||
ssh.expect("#")
|
||||
ssh.sendline("show clients")
|
||||
ssh.expect("#")
|
||||
devices_result = ssh.before.split(b"\r\n")
|
||||
devices_result = (ssh.before or "").splitlines()
|
||||
ssh.sendline("exit")
|
||||
|
||||
devices: dict[str, dict[str, str]] = {}
|
||||
for device in devices_result:
|
||||
if match := _DEVICES_REGEX.search(device.decode("utf-8")):
|
||||
if match := _DEVICES_REGEX.search(device):
|
||||
devices[match.group("ip")] = {
|
||||
"ip": match.group("ip"),
|
||||
"mac": match.group("mac").upper(),
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pexpect", "ptyprocess"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pexpect==4.6.0"]
|
||||
"requirements": ["pexpect==4.9.0"]
|
||||
}
|
||||
|
||||
@@ -46,24 +46,24 @@ from .websocket_api import async_register_websocket_api
|
||||
|
||||
__all__ = (
|
||||
"DOMAIN",
|
||||
"async_create_default_pipeline",
|
||||
"async_get_pipelines",
|
||||
"async_migrate_engine",
|
||||
"async_setup",
|
||||
"async_pipeline_from_audio_stream",
|
||||
"async_update_pipeline",
|
||||
"EVENT_RECORDING",
|
||||
"OPTION_PREFERRED",
|
||||
"SAMPLES_PER_CHUNK",
|
||||
"SAMPLE_CHANNELS",
|
||||
"SAMPLE_RATE",
|
||||
"SAMPLE_WIDTH",
|
||||
"AudioSettings",
|
||||
"Pipeline",
|
||||
"PipelineEvent",
|
||||
"PipelineEventType",
|
||||
"PipelineNotFound",
|
||||
"WakeWordSettings",
|
||||
"EVENT_RECORDING",
|
||||
"OPTION_PREFERRED",
|
||||
"SAMPLES_PER_CHUNK",
|
||||
"SAMPLE_RATE",
|
||||
"SAMPLE_WIDTH",
|
||||
"SAMPLE_CHANNELS",
|
||||
"async_create_default_pipeline",
|
||||
"async_get_pipelines",
|
||||
"async_migrate_engine",
|
||||
"async_pipeline_from_audio_stream",
|
||||
"async_setup",
|
||||
"async_update_pipeline",
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
@@ -108,6 +108,7 @@ async def async_pipeline_from_audio_stream(
|
||||
device_id: str | None = None,
|
||||
start_stage: PipelineStage = PipelineStage.STT,
|
||||
end_stage: PipelineStage = PipelineStage.TTS,
|
||||
conversation_extra_system_prompt: str | None = None,
|
||||
) -> None:
|
||||
"""Create an audio pipeline from an audio stream.
|
||||
|
||||
@@ -119,6 +120,7 @@ async def async_pipeline_from_audio_stream(
|
||||
stt_metadata=stt_metadata,
|
||||
stt_stream=stt_stream,
|
||||
wake_word_phrase=wake_word_phrase,
|
||||
conversation_extra_system_prompt=conversation_extra_system_prompt,
|
||||
run=PipelineRun(
|
||||
hass,
|
||||
context=context,
|
||||
|
||||
@@ -50,6 +50,7 @@ from homeassistant.util import (
|
||||
language as language_util,
|
||||
ulid as ulid_util,
|
||||
)
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.limited_size_dict import LimitedSizeDict
|
||||
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer
|
||||
@@ -91,6 +92,8 @@ ENGINE_LANGUAGE_PAIRS = (
|
||||
("tts_engine", "tts_language"),
|
||||
)
|
||||
|
||||
KEY_ASSIST_PIPELINE: HassKey[PipelineData] = HassKey(DOMAIN)
|
||||
|
||||
|
||||
def validate_language(data: dict[str, Any]) -> Any:
|
||||
"""Validate language settings."""
|
||||
@@ -248,7 +251,7 @@ async def async_create_default_pipeline(
|
||||
The default pipeline will use the homeassistant conversation agent and the
|
||||
specified stt / tts engines.
|
||||
"""
|
||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||
pipeline_data = hass.data[KEY_ASSIST_PIPELINE]
|
||||
pipeline_store = pipeline_data.pipeline_store
|
||||
pipeline_settings = _async_resolve_default_pipeline_settings(
|
||||
hass,
|
||||
@@ -283,7 +286,7 @@ def _async_get_pipeline_from_conversation_entity(
|
||||
@callback
|
||||
def async_get_pipeline(hass: HomeAssistant, pipeline_id: str | None = None) -> Pipeline:
|
||||
"""Get a pipeline by id or the preferred pipeline."""
|
||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||
pipeline_data = hass.data[KEY_ASSIST_PIPELINE]
|
||||
|
||||
if pipeline_id is None:
|
||||
# A pipeline was not specified, use the preferred one
|
||||
@@ -306,7 +309,7 @@ def async_get_pipeline(hass: HomeAssistant, pipeline_id: str | None = None) -> P
|
||||
@callback
|
||||
def async_get_pipelines(hass: HomeAssistant) -> list[Pipeline]:
|
||||
"""Get all pipelines."""
|
||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||
pipeline_data = hass.data[KEY_ASSIST_PIPELINE]
|
||||
|
||||
return list(pipeline_data.pipeline_store.data.values())
|
||||
|
||||
@@ -329,7 +332,7 @@ async def async_update_pipeline(
|
||||
prefer_local_intents: bool | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Update a pipeline."""
|
||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||
pipeline_data = hass.data[KEY_ASSIST_PIPELINE]
|
||||
|
||||
updates: dict[str, Any] = pipeline.to_json()
|
||||
updates.pop("id")
|
||||
@@ -587,7 +590,7 @@ class PipelineRun:
|
||||
):
|
||||
raise InvalidPipelineStagesError(self.start_stage, self.end_stage)
|
||||
|
||||
pipeline_data: PipelineData = self.hass.data[DOMAIN]
|
||||
pipeline_data = self.hass.data[KEY_ASSIST_PIPELINE]
|
||||
if self.pipeline.id not in pipeline_data.pipeline_debug:
|
||||
pipeline_data.pipeline_debug[self.pipeline.id] = LimitedSizeDict(
|
||||
size_limit=STORED_PIPELINE_RUNS
|
||||
@@ -615,7 +618,7 @@ class PipelineRun:
|
||||
def process_event(self, event: PipelineEvent) -> None:
|
||||
"""Log an event and call listener."""
|
||||
self.event_callback(event)
|
||||
pipeline_data: PipelineData = self.hass.data[DOMAIN]
|
||||
pipeline_data = self.hass.data[KEY_ASSIST_PIPELINE]
|
||||
if self.id not in pipeline_data.pipeline_debug[self.pipeline.id]:
|
||||
# This run has been evicted from the logged pipeline runs already
|
||||
return
|
||||
@@ -650,7 +653,7 @@ class PipelineRun:
|
||||
)
|
||||
)
|
||||
|
||||
pipeline_data: PipelineData = self.hass.data[DOMAIN]
|
||||
pipeline_data = self.hass.data[KEY_ASSIST_PIPELINE]
|
||||
pipeline_data.pipeline_runs.remove_run(self)
|
||||
|
||||
async def prepare_wake_word_detection(self) -> None:
|
||||
@@ -1010,16 +1013,29 @@ class PipelineRun:
|
||||
self.intent_agent = agent_info.id
|
||||
|
||||
async def recognize_intent(
|
||||
self, intent_input: str, conversation_id: str | None, device_id: str | None
|
||||
self,
|
||||
intent_input: str,
|
||||
conversation_id: str | None,
|
||||
device_id: str | None,
|
||||
conversation_extra_system_prompt: str | None,
|
||||
) -> str:
|
||||
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
||||
if self.intent_agent is None:
|
||||
raise RuntimeError("Recognize intent was not prepared")
|
||||
|
||||
if self.pipeline.conversation_language == MATCH_ALL:
|
||||
# LLMs support all languages ('*') so use pipeline language for
|
||||
# intent fallback.
|
||||
input_language = self.pipeline.language
|
||||
# LLMs support all languages ('*') so use languages from the
|
||||
# pipeline for intent fallback.
|
||||
#
|
||||
# We prioritize the STT and TTS languages because they may be more
|
||||
# specific, such as "zh-CN" instead of just "zh". This is necessary
|
||||
# for languages whose intents are split out by region when
|
||||
# preferring local intent matching.
|
||||
input_language = (
|
||||
self.pipeline.stt_language
|
||||
or self.pipeline.tts_language
|
||||
or self.pipeline.language
|
||||
)
|
||||
else:
|
||||
input_language = self.pipeline.conversation_language
|
||||
|
||||
@@ -1045,10 +1061,12 @@ class PipelineRun:
|
||||
device_id=device_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
)
|
||||
processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT
|
||||
|
||||
conversation_result: conversation.ConversationResult | None = None
|
||||
agent_id = user_input.agent_id
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if user_input.agent_id != conversation.HOME_ASSISTANT_AGENT:
|
||||
# Sentence triggers override conversation agent
|
||||
if (
|
||||
@@ -1058,14 +1076,12 @@ class PipelineRun:
|
||||
)
|
||||
) is not None:
|
||||
# Sentence trigger matched
|
||||
trigger_response = intent.IntentResponse(
|
||||
agent_id = "sentence_trigger"
|
||||
intent_response = intent.IntentResponse(
|
||||
self.pipeline.conversation_language
|
||||
)
|
||||
trigger_response.async_set_speech(trigger_response_text)
|
||||
conversation_result = conversation.ConversationResult(
|
||||
response=trigger_response,
|
||||
conversation_id=user_input.conversation_id,
|
||||
)
|
||||
intent_response.async_set_speech(trigger_response_text)
|
||||
|
||||
# Try local intents first, if preferred.
|
||||
elif self.pipeline.prefer_local_intents and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
@@ -1073,13 +1089,31 @@ class PipelineRun:
|
||||
)
|
||||
):
|
||||
# Local intent matched
|
||||
conversation_result = conversation.ConversationResult(
|
||||
response=intent_response,
|
||||
conversation_id=user_input.conversation_id,
|
||||
)
|
||||
agent_id = conversation.HOME_ASSISTANT_AGENT
|
||||
processed_locally = True
|
||||
|
||||
if conversation_result is None:
|
||||
# It was already handled, create response and add to chat history
|
||||
if intent_response is not None:
|
||||
async with conversation.async_get_chat_session(
|
||||
self.hass, user_input
|
||||
) as chat_session:
|
||||
speech: str = intent_response.speech.get("plain", {}).get(
|
||||
"speech", ""
|
||||
)
|
||||
chat_session.async_add_message(
|
||||
conversation.ChatMessage(
|
||||
role="assistant",
|
||||
agent_id=agent_id,
|
||||
content=speech,
|
||||
native=intent_response,
|
||||
)
|
||||
)
|
||||
conversation_result = conversation.ConversationResult(
|
||||
response=intent_response,
|
||||
conversation_id=chat_session.conversation_id,
|
||||
)
|
||||
|
||||
else:
|
||||
# Fall back to pipeline conversation agent
|
||||
conversation_result = await conversation.async_converse(
|
||||
hass=self.hass,
|
||||
@@ -1090,6 +1124,10 @@ class PipelineRun:
|
||||
language=user_input.language,
|
||||
agent_id=user_input.agent_id,
|
||||
)
|
||||
speech = conversation_result.response.speech.get("plain", {}).get(
|
||||
"speech", ""
|
||||
)
|
||||
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during intent recognition")
|
||||
raise IntentRecognitionError(
|
||||
@@ -1109,10 +1147,6 @@ class PipelineRun:
|
||||
)
|
||||
)
|
||||
|
||||
speech: str = conversation_result.response.speech.get("plain", {}).get(
|
||||
"speech", ""
|
||||
)
|
||||
|
||||
return speech
|
||||
|
||||
async def prepare_text_to_speech(self) -> None:
|
||||
@@ -1213,7 +1247,7 @@ class PipelineRun:
|
||||
return
|
||||
|
||||
# Forward to device audio capture
|
||||
pipeline_data: PipelineData = self.hass.data[DOMAIN]
|
||||
pipeline_data = self.hass.data[KEY_ASSIST_PIPELINE]
|
||||
audio_queue = pipeline_data.device_audio_queues.get(self._device_id)
|
||||
if audio_queue is None:
|
||||
return
|
||||
@@ -1392,8 +1426,13 @@ class PipelineInput:
|
||||
"""Input for text-to-speech. Required when start_stage = tts."""
|
||||
|
||||
conversation_id: str | None = None
|
||||
"""Identifier for the conversation."""
|
||||
|
||||
conversation_extra_system_prompt: str | None = None
|
||||
"""Extra prompt information for the conversation agent."""
|
||||
|
||||
device_id: str | None = None
|
||||
"""Identifier of the device that is processing the input/output of the pipeline."""
|
||||
|
||||
async def execute(self) -> None:
|
||||
"""Run pipeline."""
|
||||
@@ -1453,9 +1492,9 @@ class PipelineInput:
|
||||
if stt_audio_buffer:
|
||||
# Send audio in the buffer first to speech-to-text, then move on to stt_stream.
|
||||
# This is basically an async itertools.chain.
|
||||
async def buffer_then_audio_stream() -> (
|
||||
AsyncGenerator[EnhancedAudioChunk]
|
||||
):
|
||||
async def buffer_then_audio_stream() -> AsyncGenerator[
|
||||
EnhancedAudioChunk
|
||||
]:
|
||||
# Buffered audio
|
||||
for chunk in stt_audio_buffer:
|
||||
yield chunk
|
||||
@@ -1483,6 +1522,7 @@ class PipelineInput:
|
||||
intent_input,
|
||||
self.conversation_id,
|
||||
self.device_id,
|
||||
self.conversation_extra_system_prompt,
|
||||
)
|
||||
if tts_input.strip():
|
||||
current_stage = PipelineStage.TTS
|
||||
@@ -1864,7 +1904,7 @@ class PipelineStore(Store[SerializedPipelineStorageCollection]):
|
||||
return old_data
|
||||
|
||||
|
||||
@singleton(DOMAIN)
|
||||
@singleton(KEY_ASSIST_PIPELINE, async_=True)
|
||||
async def async_setup_pipeline_store(hass: HomeAssistant) -> PipelineData:
|
||||
"""Set up the pipeline storage collection."""
|
||||
pipeline_store = PipelineStorageCollection(
|
||||
|
||||
@@ -9,8 +9,8 @@ from homeassistant.const import EntityCategory, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import collection, entity_registry as er, restore_state
|
||||
|
||||
from .const import DOMAIN, OPTION_PREFERRED
|
||||
from .pipeline import AssistDevice, PipelineData, PipelineStorageCollection
|
||||
from .const import OPTION_PREFERRED
|
||||
from .pipeline import KEY_ASSIST_PIPELINE, AssistDevice
|
||||
from .vad import VadSensitivity
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ def get_chosen_pipeline(
|
||||
if state is None or state.state == OPTION_PREFERRED:
|
||||
return None
|
||||
|
||||
pipeline_store: PipelineStorageCollection = hass.data[DOMAIN].pipeline_store
|
||||
pipeline_store = hass.data[KEY_ASSIST_PIPELINE].pipeline_store
|
||||
return next(
|
||||
(item.id for item in pipeline_store.async_items() if item.name == state.state),
|
||||
None,
|
||||
@@ -80,7 +80,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
||||
"""When entity is added to Home Assistant."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
pipeline_data: PipelineData = self.hass.data[DOMAIN]
|
||||
pipeline_data = self.hass.data[KEY_ASSIST_PIPELINE]
|
||||
pipeline_store = pipeline_data.pipeline_store
|
||||
self.async_on_remove(
|
||||
pipeline_store.async_add_change_set_listener(self._pipelines_updated)
|
||||
@@ -116,9 +116,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
||||
@callback
|
||||
def _update_options(self) -> None:
|
||||
"""Handle pipeline update."""
|
||||
pipeline_store: PipelineStorageCollection = self.hass.data[
|
||||
DOMAIN
|
||||
].pipeline_store
|
||||
pipeline_store = self.hass.data[KEY_ASSIST_PIPELINE].pipeline_store
|
||||
options = [OPTION_PREFERRED]
|
||||
options.extend(sorted(item.name for item in pipeline_store.async_items()))
|
||||
self._attr_options = options
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
"""Assist pipeline Websocket API."""
|
||||
|
||||
import asyncio
|
||||
|
||||
# Suppressing disable=deprecated-module is needed for Python 3.11
|
||||
import audioop # pylint: disable=deprecated-module
|
||||
import base64
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
import contextlib
|
||||
@@ -11,6 +8,8 @@ import logging
|
||||
import math
|
||||
from typing import Any, Final
|
||||
|
||||
# Suppressing disable=deprecated-module is needed for Python 3.11
|
||||
import audioop # pylint: disable=deprecated-module
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import conversation, stt, tts, websocket_api
|
||||
@@ -22,7 +21,6 @@ from homeassistant.util import language as language_util
|
||||
from .const import (
|
||||
DEFAULT_PIPELINE_TIMEOUT,
|
||||
DEFAULT_WAKE_WORD_TIMEOUT,
|
||||
DOMAIN,
|
||||
EVENT_RECORDING,
|
||||
SAMPLE_CHANNELS,
|
||||
SAMPLE_RATE,
|
||||
@@ -30,9 +28,9 @@ from .const import (
|
||||
)
|
||||
from .error import PipelineNotFound
|
||||
from .pipeline import (
|
||||
KEY_ASSIST_PIPELINE,
|
||||
AudioSettings,
|
||||
DeviceAudioQueue,
|
||||
PipelineData,
|
||||
PipelineError,
|
||||
PipelineEvent,
|
||||
PipelineEventType,
|
||||
@@ -284,7 +282,7 @@ def websocket_list_runs(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""List pipeline runs for which debug data is available."""
|
||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||
pipeline_data = hass.data[KEY_ASSIST_PIPELINE]
|
||||
pipeline_id = msg["pipeline_id"]
|
||||
|
||||
if pipeline_id not in pipeline_data.pipeline_debug:
|
||||
@@ -320,7 +318,7 @@ def websocket_list_devices(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""List assist devices."""
|
||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||
pipeline_data = hass.data[KEY_ASSIST_PIPELINE]
|
||||
ent_reg = er.async_get(hass)
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
@@ -351,7 +349,7 @@ def websocket_get_run(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Get debug data for a pipeline run."""
|
||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||
pipeline_data = hass.data[KEY_ASSIST_PIPELINE]
|
||||
pipeline_id = msg["pipeline_id"]
|
||||
pipeline_run_id = msg["pipeline_run_id"]
|
||||
|
||||
@@ -456,7 +454,7 @@ async def websocket_device_capture(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Capture raw audio from a satellite device and forward to client."""
|
||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||
pipeline_data = hass.data[KEY_ASSIST_PIPELINE]
|
||||
device_id = msg["device_id"]
|
||||
|
||||
# Number of seconds to record audio in wall clock time
|
||||
|
||||
@@ -30,8 +30,8 @@ from .websocket_api import async_register_websocket_api
|
||||
__all__ = [
|
||||
"DOMAIN",
|
||||
"AssistSatelliteAnnouncement",
|
||||
"AssistSatelliteEntity",
|
||||
"AssistSatelliteConfiguration",
|
||||
"AssistSatelliteEntity",
|
||||
"AssistSatelliteEntityDescription",
|
||||
"AssistSatelliteEntityFeature",
|
||||
"AssistSatelliteWakeWord",
|
||||
|
||||
@@ -187,47 +187,10 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
"""
|
||||
await self._cancel_running_pipeline()
|
||||
|
||||
media_id_source: Literal["url", "media_id", "tts"] | None = None
|
||||
|
||||
if message is None:
|
||||
message = ""
|
||||
|
||||
if not media_id:
|
||||
media_id_source = "tts"
|
||||
# Synthesize audio and get URL
|
||||
pipeline_id = self._resolve_pipeline()
|
||||
pipeline = async_get_pipeline(self.hass, pipeline_id)
|
||||
|
||||
tts_options: dict[str, Any] = {}
|
||||
if pipeline.tts_voice is not None:
|
||||
tts_options[tts.ATTR_VOICE] = pipeline.tts_voice
|
||||
|
||||
if self.tts_options is not None:
|
||||
tts_options.update(self.tts_options)
|
||||
|
||||
media_id = tts_generate_media_source_id(
|
||||
self.hass,
|
||||
message,
|
||||
engine=pipeline.tts_engine,
|
||||
language=pipeline.tts_language,
|
||||
options=tts_options,
|
||||
)
|
||||
|
||||
if media_source.is_media_source_id(media_id):
|
||||
if not media_id_source:
|
||||
media_id_source = "media_id"
|
||||
media = await media_source.async_resolve_media(
|
||||
self.hass,
|
||||
media_id,
|
||||
None,
|
||||
)
|
||||
media_id = media.url
|
||||
|
||||
if not media_id_source:
|
||||
media_id_source = "url"
|
||||
|
||||
# Resolve to full URL
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
announcement = await self._resolve_announcement_media_id(message, media_id)
|
||||
|
||||
if self._is_announcing:
|
||||
raise SatelliteBusyError
|
||||
@@ -237,9 +200,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
|
||||
try:
|
||||
# Block until announcement is finished
|
||||
await self.async_announce(
|
||||
AssistSatelliteAnnouncement(message, media_id, media_id_source)
|
||||
)
|
||||
await self.async_announce(announcement)
|
||||
finally:
|
||||
self._is_announcing = False
|
||||
self._set_state(AssistSatelliteState.IDLE)
|
||||
@@ -428,3 +389,48 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
vad_sensitivity = vad.VadSensitivity(vad_sensitivity_state.state)
|
||||
|
||||
return vad.VadSensitivity.to_seconds(vad_sensitivity)
|
||||
|
||||
async def _resolve_announcement_media_id(
|
||||
self, message: str, media_id: str | None
|
||||
) -> AssistSatelliteAnnouncement:
|
||||
"""Resolve the media ID."""
|
||||
media_id_source: Literal["url", "media_id", "tts"] | None = None
|
||||
|
||||
if not media_id:
|
||||
media_id_source = "tts"
|
||||
# Synthesize audio and get URL
|
||||
pipeline_id = self._resolve_pipeline()
|
||||
pipeline = async_get_pipeline(self.hass, pipeline_id)
|
||||
|
||||
tts_options: dict[str, Any] = {}
|
||||
if pipeline.tts_voice is not None:
|
||||
tts_options[tts.ATTR_VOICE] = pipeline.tts_voice
|
||||
|
||||
if self.tts_options is not None:
|
||||
tts_options.update(self.tts_options)
|
||||
|
||||
media_id = tts_generate_media_source_id(
|
||||
self.hass,
|
||||
message,
|
||||
engine=pipeline.tts_engine,
|
||||
language=pipeline.tts_language,
|
||||
options=tts_options,
|
||||
)
|
||||
|
||||
if media_source.is_media_source_id(media_id):
|
||||
if not media_id_source:
|
||||
media_id_source = "media_id"
|
||||
media = await media_source.async_resolve_media(
|
||||
self.hass,
|
||||
media_id,
|
||||
None,
|
||||
)
|
||||
media_id = media.url
|
||||
|
||||
if not media_id_source:
|
||||
media_id_source = "url"
|
||||
|
||||
# Resolve to full URL
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
|
||||
return AssistSatelliteAnnouncement(message, media_id, media_id_source)
|
||||
|
||||
69
homeassistant/components/assist_satellite/intent.py
Normal file
69
homeassistant/components/assist_satellite/intent.py
Normal file
@@ -0,0 +1,69 @@
|
||||
"""Assist Satellite intents."""
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er, intent
|
||||
|
||||
from .const import DOMAIN, AssistSatelliteEntityFeature
|
||||
|
||||
|
||||
async def async_setup_intents(hass: HomeAssistant) -> None:
|
||||
"""Set up the intents."""
|
||||
intent.async_register(hass, BroadcastIntentHandler())
|
||||
|
||||
|
||||
class BroadcastIntentHandler(intent.IntentHandler):
|
||||
"""Broadcast a message."""
|
||||
|
||||
intent_type = intent.INTENT_BROADCAST
|
||||
description = "Broadcast a message through the home"
|
||||
|
||||
@property
|
||||
def slot_schema(self) -> dict | None:
|
||||
"""Return a slot schema."""
|
||||
return {vol.Required("message"): str}
|
||||
|
||||
async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse:
|
||||
"""Broadcast a message."""
|
||||
hass = intent_obj.hass
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
# Find all assist satellite entities that are not the one invoking the intent
|
||||
entities = {
|
||||
entity: entry
|
||||
for entity in hass.states.async_entity_ids(DOMAIN)
|
||||
if (entry := ent_reg.async_get(entity))
|
||||
and entry.supported_features & AssistSatelliteEntityFeature.ANNOUNCE
|
||||
}
|
||||
|
||||
if intent_obj.device_id:
|
||||
entities = {
|
||||
entity: entry
|
||||
for entity, entry in entities.items()
|
||||
if entry.device_id != intent_obj.device_id
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"announce",
|
||||
{"message": intent_obj.slots["message"]["value"]},
|
||||
blocking=True,
|
||||
context=intent_obj.context,
|
||||
target={"entity_id": list(entities)},
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.async_set_speech("Done")
|
||||
response.response_type = intent.IntentResponseType.ACTION_DONE
|
||||
response.async_set_results(
|
||||
success_results=[
|
||||
intent.IntentResponseTarget(
|
||||
type=intent.IntentResponseTargetType.ENTITY,
|
||||
id=entity,
|
||||
name=state.name if (state := hass.states.get(entity)) else entity,
|
||||
)
|
||||
for entity in entities
|
||||
]
|
||||
)
|
||||
return response
|
||||
@@ -31,8 +31,8 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"invalid_unique_id": "Impossible to determine a valid unique id for the device",
|
||||
"no_unique_id": "A device without a valid unique id is already configured. Configuration of multiple instance is not possible"
|
||||
"invalid_unique_id": "Impossible to determine a valid unique ID for the device",
|
||||
"no_unique_id": "A device without a valid unique ID is already configured. Configuration of multiple instances is not possible"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
@@ -42,7 +42,7 @@
|
||||
"consider_home": "Seconds to wait before considering a device away",
|
||||
"track_unknown": "Track unknown / unnamed devices",
|
||||
"interface": "The interface that you want statistics from (e.g. eth0, eth1 etc)",
|
||||
"dnsmasq": "The location in the router of the dnsmasq.leases files",
|
||||
"dnsmasq": "The location of the dnsmasq.leases file in the router",
|
||||
"require_ip": "Devices must have IP (for access point mode)"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aussie_broadband",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aussiebb"],
|
||||
"requirements": ["pyaussiebb==0.1.4"]
|
||||
"requirements": ["pyaussiebb==0.1.5"]
|
||||
}
|
||||
|
||||
@@ -636,9 +636,9 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
alias = ""
|
||||
if "trigger" in run_variables:
|
||||
if "description" in run_variables["trigger"]:
|
||||
reason = f' by {run_variables["trigger"]["description"]}'
|
||||
reason = f" by {run_variables['trigger']['description']}"
|
||||
if "alias" in run_variables["trigger"]:
|
||||
alias = f' trigger \'{run_variables["trigger"]["alias"]}\''
|
||||
alias = f" trigger '{run_variables['trigger']['alias']}'"
|
||||
self._logger.debug("Automation%s triggered%s", alias, reason)
|
||||
|
||||
# Create a new context referring to the old context.
|
||||
|
||||
@@ -11,11 +11,12 @@ from python_awair.exceptions import AuthError, AwairError
|
||||
from python_awair.user import AwairUser
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import onboarding, zeroconf
|
||||
from homeassistant.components import onboarding
|
||||
from homeassistant.config_entries import SOURCE_ZEROCONF, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_DEVICE, CONF_HOST
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
@@ -29,7 +30,7 @@ class AwairFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
host: str
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ from urllib.parse import urlsplit
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import dhcp, ssdp, zeroconf
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_IGNORE,
|
||||
SOURCE_REAUTH,
|
||||
@@ -32,6 +31,14 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
from homeassistant.helpers.service_info.ssdp import (
|
||||
ATTR_UPNP_FRIENDLY_NAME,
|
||||
ATTR_UPNP_PRESENTATION_URL,
|
||||
ATTR_UPNP_SERIAL,
|
||||
SsdpServiceInfo,
|
||||
)
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
from homeassistant.helpers.typing import VolDictType
|
||||
from homeassistant.util.network import is_link_local
|
||||
|
||||
@@ -190,7 +197,7 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_step_dhcp(
|
||||
self, discovery_info: dhcp.DhcpServiceInfo
|
||||
self, discovery_info: DhcpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Prepare configuration for a DHCP discovered Axis device."""
|
||||
return await self._process_discovered_device(
|
||||
@@ -203,21 +210,21 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
)
|
||||
|
||||
async def async_step_ssdp(
|
||||
self, discovery_info: ssdp.SsdpServiceInfo
|
||||
self, discovery_info: SsdpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Prepare configuration for a SSDP discovered Axis device."""
|
||||
url = urlsplit(discovery_info.upnp[ssdp.ATTR_UPNP_PRESENTATION_URL])
|
||||
url = urlsplit(discovery_info.upnp[ATTR_UPNP_PRESENTATION_URL])
|
||||
return await self._process_discovered_device(
|
||||
{
|
||||
CONF_HOST: url.hostname,
|
||||
CONF_MAC: format_mac(discovery_info.upnp[ssdp.ATTR_UPNP_SERIAL]),
|
||||
CONF_NAME: f"{discovery_info.upnp[ssdp.ATTR_UPNP_FRIENDLY_NAME]}",
|
||||
CONF_MAC: format_mac(discovery_info.upnp[ATTR_UPNP_SERIAL]),
|
||||
CONF_NAME: f"{discovery_info.upnp[ATTR_UPNP_FRIENDLY_NAME]}",
|
||||
CONF_PORT: url.port,
|
||||
}
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Prepare configuration for a Zeroconf discovered Axis device."""
|
||||
return await self._process_discovered_device(
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Setup your Azure Data Explorer integration",
|
||||
"title": "Set up Azure Data Explorer",
|
||||
"description": "Enter connection details",
|
||||
"data": {
|
||||
"cluster_ingest_uri": "Cluster Ingest URI",
|
||||
"cluster_ingest_uri": "Cluster ingestion URI",
|
||||
"authority_id": "Authority ID",
|
||||
"client_id": "Client ID",
|
||||
"client_secret": "Client secret",
|
||||
@@ -14,7 +14,7 @@
|
||||
"use_queued_ingestion": "Use queued ingestion"
|
||||
},
|
||||
"data_description": {
|
||||
"cluster_ingest_uri": "Ingest-URI of the cluster",
|
||||
"cluster_ingest_uri": "Ingestion URI of the cluster",
|
||||
"use_queued_ingestion": "Must be enabled when using ADX free cluster"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,26 +2,26 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Set up your Azure Event Hub integration",
|
||||
"title": "Set up Azure Event Hub",
|
||||
"data": {
|
||||
"event_hub_instance_name": "Event Hub Instance Name",
|
||||
"use_connection_string": "Use Connection String"
|
||||
"event_hub_instance_name": "Event Hub instance name",
|
||||
"use_connection_string": "Use connection string"
|
||||
}
|
||||
},
|
||||
"conn_string": {
|
||||
"title": "Connection String method",
|
||||
"title": "Connection string method",
|
||||
"description": "Please enter the connection string for: {event_hub_instance_name}",
|
||||
"data": {
|
||||
"event_hub_connection_string": "Event Hub Connection String"
|
||||
"event_hub_connection_string": "Event Hub connection string"
|
||||
}
|
||||
},
|
||||
"sas": {
|
||||
"title": "SAS Credentials method",
|
||||
"title": "SAS credentials method",
|
||||
"description": "Please enter the SAS (shared access signature) credentials for: {event_hub_instance_name}",
|
||||
"data": {
|
||||
"event_hub_namespace": "Event Hub Namespace",
|
||||
"event_hub_sas_policy": "Event Hub SAS Policy",
|
||||
"event_hub_sas_key": "Event Hub SAS Key"
|
||||
"event_hub_namespace": "Event Hub namespace",
|
||||
"event_hub_sas_policy": "Event Hub SAS policy",
|
||||
"event_hub_sas_key": "Event Hub SAS key"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -38,7 +38,7 @@
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Options for the Azure Event Hub.",
|
||||
"title": "Options for Azure Event Hub.",
|
||||
"data": {
|
||||
"send_interval": "Interval between sending batches to the hub."
|
||||
}
|
||||
|
||||
@@ -35,7 +35,6 @@ from .websocket import async_register_websocket_handlers
|
||||
__all__ = [
|
||||
"AddonInfo",
|
||||
"AgentBackup",
|
||||
"ManagerBackup",
|
||||
"BackupAgent",
|
||||
"BackupAgentError",
|
||||
"BackupAgentPlatformProtocol",
|
||||
@@ -46,6 +45,7 @@ __all__ = [
|
||||
"Folder",
|
||||
"IncorrectPasswordError",
|
||||
"LocalBackupAgent",
|
||||
"ManagerBackup",
|
||||
"NewBackup",
|
||||
"WrittenBackup",
|
||||
]
|
||||
|
||||
@@ -7,6 +7,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass, field, replace
|
||||
from datetime import datetime, timedelta
|
||||
from enum import StrEnum
|
||||
import random
|
||||
from typing import TYPE_CHECKING, Self, TypedDict
|
||||
|
||||
from cronsim import CronSim
|
||||
@@ -28,6 +29,10 @@ if TYPE_CHECKING:
|
||||
CRON_PATTERN_DAILY = "45 4 * * *"
|
||||
CRON_PATTERN_WEEKLY = "45 4 * * {}"
|
||||
|
||||
# Randomize the start time of the backup by up to 60 minutes to avoid
|
||||
# all backups running at the same time.
|
||||
BACKUP_START_TIME_JITTER = 60 * 60
|
||||
|
||||
|
||||
class StoredBackupConfig(TypedDict):
|
||||
"""Represent the stored backup config."""
|
||||
@@ -329,6 +334,8 @@ class BackupSchedule:
|
||||
except Exception: # noqa: BLE001
|
||||
LOGGER.exception("Unexpected error creating automatic backup")
|
||||
|
||||
next_time += timedelta(seconds=random.randint(0, BACKUP_START_TIME_JITTER))
|
||||
LOGGER.debug("Scheduling next automatic backup at %s", next_time)
|
||||
manager.remove_next_backup_event = async_track_point_in_time(
|
||||
manager.hass, _create_backup, next_time
|
||||
)
|
||||
|
||||
@@ -4,18 +4,23 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from http import HTTPStatus
|
||||
from typing import cast
|
||||
import threading
|
||||
from typing import IO, cast
|
||||
|
||||
from aiohttp import BodyPartReader
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||
from aiohttp.web import FileResponse, Request, Response, StreamResponse
|
||||
from multidict import istr
|
||||
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from . import util
|
||||
from .agent import BackupAgent
|
||||
from .const import DATA_MANAGER
|
||||
from .manager import BackupManager
|
||||
|
||||
|
||||
@callback
|
||||
@@ -43,8 +48,13 @@ class DownloadBackupView(HomeAssistantView):
|
||||
agent_id = request.query.getone("agent_id")
|
||||
except KeyError:
|
||||
return Response(status=HTTPStatus.BAD_REQUEST)
|
||||
try:
|
||||
password = request.query.getone("password")
|
||||
except KeyError:
|
||||
password = None
|
||||
|
||||
manager = request.app[KEY_HASS].data[DATA_MANAGER]
|
||||
hass = request.app[KEY_HASS]
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
if agent_id not in manager.backup_agents:
|
||||
return Response(status=HTTPStatus.BAD_REQUEST)
|
||||
agent = manager.backup_agents[agent_id]
|
||||
@@ -58,6 +68,24 @@ class DownloadBackupView(HomeAssistantView):
|
||||
headers = {
|
||||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
|
||||
}
|
||||
|
||||
if not password:
|
||||
return await self._send_backup_no_password(
|
||||
request, headers, backup_id, agent_id, agent, manager
|
||||
)
|
||||
return await self._send_backup_with_password(
|
||||
hass, request, headers, backup_id, agent_id, password, agent, manager
|
||||
)
|
||||
|
||||
async def _send_backup_no_password(
|
||||
self,
|
||||
request: Request,
|
||||
headers: dict[istr, str],
|
||||
backup_id: str,
|
||||
agent_id: str,
|
||||
agent: BackupAgent,
|
||||
manager: BackupManager,
|
||||
) -> StreamResponse | FileResponse | Response:
|
||||
if agent_id in manager.local_backup_agents:
|
||||
local_agent = manager.local_backup_agents[agent_id]
|
||||
path = local_agent.get_backup_path(backup_id)
|
||||
@@ -70,6 +98,50 @@ class DownloadBackupView(HomeAssistantView):
|
||||
await response.write(chunk)
|
||||
return response
|
||||
|
||||
async def _send_backup_with_password(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
request: Request,
|
||||
headers: dict[istr, str],
|
||||
backup_id: str,
|
||||
agent_id: str,
|
||||
password: str,
|
||||
agent: BackupAgent,
|
||||
manager: BackupManager,
|
||||
) -> StreamResponse | FileResponse | Response:
|
||||
reader: IO[bytes]
|
||||
if agent_id in manager.local_backup_agents:
|
||||
local_agent = manager.local_backup_agents[agent_id]
|
||||
path = local_agent.get_backup_path(backup_id)
|
||||
try:
|
||||
reader = await hass.async_add_executor_job(open, path.as_posix(), "rb")
|
||||
except FileNotFoundError:
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
else:
|
||||
stream = await agent.async_download_backup(backup_id)
|
||||
reader = cast(IO[bytes], util.AsyncIteratorReader(hass, stream))
|
||||
|
||||
worker_done_event = asyncio.Event()
|
||||
|
||||
def on_done() -> None:
|
||||
"""Call by the worker thread when it's done."""
|
||||
hass.loop.call_soon_threadsafe(worker_done_event.set)
|
||||
|
||||
stream = util.AsyncIteratorWriter(hass)
|
||||
worker = threading.Thread(
|
||||
target=util.decrypt_backup, args=[reader, stream, password, on_done]
|
||||
)
|
||||
try:
|
||||
worker.start()
|
||||
response = StreamResponse(status=HTTPStatus.OK, headers=headers)
|
||||
await response.prepare(request)
|
||||
async for chunk in stream:
|
||||
await response.write(chunk)
|
||||
return response
|
||||
finally:
|
||||
reader.close()
|
||||
await worker_done_event.wait()
|
||||
|
||||
|
||||
class UploadBackupView(HomeAssistantView):
|
||||
"""Generate backup view."""
|
||||
|
||||
@@ -10,11 +10,11 @@ from enum import StrEnum
|
||||
import hashlib
|
||||
import io
|
||||
import json
|
||||
from pathlib import Path
|
||||
from pathlib import Path, PurePath
|
||||
import shutil
|
||||
import tarfile
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Any, Protocol, TypedDict
|
||||
from typing import IO, TYPE_CHECKING, Any, Protocol, TypedDict, cast
|
||||
|
||||
import aiohttp
|
||||
from securetar import SecureTarFile, atomic_contents_add
|
||||
@@ -31,6 +31,7 @@ from homeassistant.helpers import (
|
||||
from homeassistant.helpers.json import json_bytes
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import util as backup_util
|
||||
from .agent import (
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
@@ -48,7 +49,13 @@ from .const import (
|
||||
)
|
||||
from .models import AgentBackup, BackupManagerError, Folder
|
||||
from .store import BackupStore
|
||||
from .util import make_backup_dir, read_backup, validate_password
|
||||
from .util import (
|
||||
AsyncIteratorReader,
|
||||
make_backup_dir,
|
||||
read_backup,
|
||||
validate_password,
|
||||
validate_password_stream,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
@@ -248,6 +255,14 @@ class BackupReaderWriterError(HomeAssistantError):
|
||||
class IncorrectPasswordError(BackupReaderWriterError):
|
||||
"""Raised when the password is incorrect."""
|
||||
|
||||
_message = "The password provided is incorrect."
|
||||
|
||||
|
||||
class DecryptOnDowloadNotSupported(BackupManagerError):
|
||||
"""Raised when on-the-fly decryption is not supported."""
|
||||
|
||||
_message = "On-the-fly decryption is not supported for this backup."
|
||||
|
||||
|
||||
class BackupManager:
|
||||
"""Define the format that backup managers can have."""
|
||||
@@ -430,17 +445,21 @@ class BackupManager:
|
||||
return_exceptions=True,
|
||||
)
|
||||
for idx, result in enumerate(sync_backup_results):
|
||||
agent_id = agent_ids[idx]
|
||||
if isinstance(result, BackupReaderWriterError):
|
||||
# writer errors will affect all agents
|
||||
# no point in continuing
|
||||
raise BackupManagerError(str(result)) from result
|
||||
if isinstance(result, BackupAgentError):
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
agent_errors[agent_id] = result
|
||||
LOGGER.error("Upload failed for %s: %s", agent_id, result)
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
# trap bugs from agents
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
LOGGER.error("Unexpected error: %s", result, exc_info=result)
|
||||
agent_errors[agent_id] = result
|
||||
LOGGER.error(
|
||||
"Unexpected error for %s: %s", agent_id, result, exc_info=result
|
||||
)
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
@@ -752,7 +771,7 @@ class BackupManager:
|
||||
|
||||
backup_name = (
|
||||
name
|
||||
or f"{"Automatic" if with_automatic_settings else "Custom"} backup {HAVERSION}"
|
||||
or f"{'Automatic' if with_automatic_settings else 'Custom'} backup {HAVERSION}"
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -800,12 +819,10 @@ class BackupManager:
|
||||
"""Finish a backup."""
|
||||
if TYPE_CHECKING:
|
||||
assert self._backup_task is not None
|
||||
backup_success = False
|
||||
try:
|
||||
written_backup = await self._backup_task
|
||||
except Exception as err:
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.FAILED)
|
||||
)
|
||||
if with_automatic_settings:
|
||||
self._update_issue_backup_failed()
|
||||
|
||||
@@ -831,33 +848,15 @@ class BackupManager:
|
||||
agent_ids=agent_ids,
|
||||
open_stream=written_backup.open_stream,
|
||||
)
|
||||
except BaseException:
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.FAILED)
|
||||
)
|
||||
raise # manager or unexpected error
|
||||
finally:
|
||||
try:
|
||||
await written_backup.release_stream()
|
||||
except Exception:
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.FAILED)
|
||||
)
|
||||
raise
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
if agent_errors:
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.FAILED)
|
||||
)
|
||||
else:
|
||||
if not agent_errors:
|
||||
if with_automatic_settings:
|
||||
# create backup was successful, update last_completed_automatic_backup
|
||||
self.config.data.last_completed_automatic_backup = dt_util.now()
|
||||
self.store.save()
|
||||
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED)
|
||||
)
|
||||
backup_success = True
|
||||
|
||||
if with_automatic_settings:
|
||||
self._update_issue_after_agent_upload(agent_errors)
|
||||
@@ -868,6 +867,14 @@ class BackupManager:
|
||||
finally:
|
||||
self._backup_task = None
|
||||
self._backup_finish_task = None
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(
|
||||
stage=None,
|
||||
state=CreateBackupState.COMPLETED
|
||||
if backup_success
|
||||
else CreateBackupState.FAILED,
|
||||
)
|
||||
)
|
||||
self.async_on_backup_event(IdleEvent())
|
||||
|
||||
async def async_restore_backup(
|
||||
@@ -998,6 +1005,41 @@ class BackupManager:
|
||||
translation_placeholders={"failed_agents": ", ".join(agent_errors)},
|
||||
)
|
||||
|
||||
async def async_can_decrypt_on_download(
|
||||
self,
|
||||
backup_id: str,
|
||||
*,
|
||||
agent_id: str,
|
||||
password: str | None,
|
||||
) -> None:
|
||||
"""Check if we are able to decrypt the backup on download."""
|
||||
try:
|
||||
agent = self.backup_agents[agent_id]
|
||||
except KeyError as err:
|
||||
raise BackupManagerError(f"Invalid agent selected: {agent_id}") from err
|
||||
if not await agent.async_get_backup(backup_id):
|
||||
raise BackupManagerError(
|
||||
f"Backup {backup_id} not found in agent {agent_id}"
|
||||
)
|
||||
reader: IO[bytes]
|
||||
if agent_id in self.local_backup_agents:
|
||||
local_agent = self.local_backup_agents[agent_id]
|
||||
path = local_agent.get_backup_path(backup_id)
|
||||
reader = await self.hass.async_add_executor_job(open, path.as_posix(), "rb")
|
||||
else:
|
||||
backup_stream = await agent.async_download_backup(backup_id)
|
||||
reader = cast(IO[bytes], AsyncIteratorReader(self.hass, backup_stream))
|
||||
try:
|
||||
validate_password_stream(reader, password)
|
||||
except backup_util.IncorrectPassword as err:
|
||||
raise IncorrectPasswordError from err
|
||||
except backup_util.UnsupportedSecureTarVersion as err:
|
||||
raise DecryptOnDowloadNotSupported from err
|
||||
except backup_util.DecryptError as err:
|
||||
raise BackupManagerError(str(err)) from err
|
||||
finally:
|
||||
reader.close()
|
||||
|
||||
|
||||
class KnownBackups:
|
||||
"""Track known backups."""
|
||||
@@ -1242,6 +1284,17 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
if not database_included:
|
||||
excludes = excludes + EXCLUDE_DATABASE_FROM_BACKUP
|
||||
|
||||
def is_excluded_by_filter(path: PurePath) -> bool:
|
||||
"""Filter to filter excludes."""
|
||||
|
||||
for exclude in excludes:
|
||||
if not path.match(exclude):
|
||||
continue
|
||||
LOGGER.debug("Ignoring %s because of %s", path, exclude)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
outer_secure_tarfile = SecureTarFile(
|
||||
tar_file_path, "w", gzip=False, bufsize=BUF_SIZE
|
||||
)
|
||||
@@ -1260,7 +1313,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
atomic_contents_add(
|
||||
tar_file=core_tar,
|
||||
origin_path=Path(self._hass.config.path()),
|
||||
excludes=excludes,
|
||||
file_filter=is_excluded_by_filter,
|
||||
arcname="data",
|
||||
)
|
||||
return (tar_file_path, tar_file_path.stat().st_size)
|
||||
@@ -1369,7 +1422,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
validate_password, path, password
|
||||
)
|
||||
if not password_valid:
|
||||
raise IncorrectPasswordError("The password provided is incorrect.")
|
||||
raise IncorrectPasswordError
|
||||
|
||||
def _write_restore_file() -> None:
|
||||
"""Write the restore file."""
|
||||
@@ -1387,7 +1440,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
)
|
||||
|
||||
await self._hass.async_add_executor_job(_write_restore_file)
|
||||
await self._hass.services.async_call("homeassistant", "restart", {})
|
||||
await self._hass.services.async_call("homeassistant", "restart", blocking=True)
|
||||
|
||||
|
||||
def _generate_backup_id(date: str, name: str) -> str:
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["cronsim==2.6", "securetar==2024.11.0"]
|
||||
"requirements": ["cronsim==2.6", "securetar==2025.1.3"]
|
||||
}
|
||||
|
||||
@@ -3,22 +3,51 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from collections.abc import AsyncIterator, Callable
|
||||
import copy
|
||||
from io import BytesIO
|
||||
import json
|
||||
from pathlib import Path, PurePath
|
||||
from queue import SimpleQueue
|
||||
import tarfile
|
||||
from typing import cast
|
||||
from typing import IO, Self, cast
|
||||
|
||||
import aiohttp
|
||||
from securetar import SecureTarFile
|
||||
from securetar import SecureTarError, SecureTarFile, SecureTarReadError
|
||||
|
||||
from homeassistant.backup_restore import password_to_key
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .const import BUF_SIZE, LOGGER
|
||||
from .models import AddonInfo, AgentBackup, Folder
|
||||
|
||||
|
||||
class DecryptError(HomeAssistantError):
|
||||
"""Error during decryption."""
|
||||
|
||||
_message = "Unexpected error during decryption."
|
||||
|
||||
|
||||
class UnsupportedSecureTarVersion(DecryptError):
|
||||
"""Unsupported securetar version."""
|
||||
|
||||
_message = "Unsupported securetar version."
|
||||
|
||||
|
||||
class IncorrectPassword(DecryptError):
|
||||
"""Invalid password or corrupted backup."""
|
||||
|
||||
_message = "Invalid password or corrupted backup."
|
||||
|
||||
|
||||
class BackupEmpty(DecryptError):
|
||||
"""No tar files found in the backup."""
|
||||
|
||||
_message = "No tar files found in the backup."
|
||||
|
||||
|
||||
def make_backup_dir(path: Path) -> None:
|
||||
"""Create a backup directory if it does not exist."""
|
||||
path.mkdir(exist_ok=True)
|
||||
@@ -106,6 +135,159 @@ def validate_password(path: Path, password: str | None) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
class AsyncIteratorReader:
|
||||
"""Wrap an AsyncIterator."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None:
|
||||
"""Initialize the wrapper."""
|
||||
self._hass = hass
|
||||
self._stream = stream
|
||||
self._buffer: bytes | None = None
|
||||
self._pos: int = 0
|
||||
|
||||
async def _next(self) -> bytes | None:
|
||||
"""Get the next chunk from the iterator."""
|
||||
return await anext(self._stream, None)
|
||||
|
||||
def read(self, n: int = -1, /) -> bytes:
|
||||
"""Read data from the iterator."""
|
||||
result = bytearray()
|
||||
while n < 0 or len(result) < n:
|
||||
if not self._buffer:
|
||||
self._buffer = asyncio.run_coroutine_threadsafe(
|
||||
self._next(), self._hass.loop
|
||||
).result()
|
||||
self._pos = 0
|
||||
if not self._buffer:
|
||||
# The stream is exhausted
|
||||
break
|
||||
chunk = self._buffer[self._pos : self._pos + n]
|
||||
result.extend(chunk)
|
||||
n -= len(chunk)
|
||||
self._pos += len(chunk)
|
||||
if self._pos == len(self._buffer):
|
||||
self._buffer = None
|
||||
return bytes(result)
|
||||
|
||||
def close(self) -> None:
|
||||
"""Close the iterator."""
|
||||
|
||||
|
||||
class AsyncIteratorWriter:
|
||||
"""Wrap an AsyncIterator."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the wrapper."""
|
||||
self._hass = hass
|
||||
self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1)
|
||||
|
||||
def __aiter__(self) -> Self:
|
||||
"""Return the iterator."""
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> bytes:
|
||||
"""Get the next chunk from the iterator."""
|
||||
if data := await self._queue.get():
|
||||
return data
|
||||
raise StopAsyncIteration
|
||||
|
||||
def write(self, s: bytes, /) -> int:
|
||||
"""Write data to the iterator."""
|
||||
asyncio.run_coroutine_threadsafe(self._queue.put(s), self._hass.loop).result()
|
||||
return len(s)
|
||||
|
||||
|
||||
def validate_password_stream(
|
||||
input_stream: IO[bytes],
|
||||
password: str | None,
|
||||
) -> None:
|
||||
"""Decrypt a backup."""
|
||||
with (
|
||||
tarfile.open(fileobj=input_stream, mode="r|", bufsize=BUF_SIZE) as input_tar,
|
||||
):
|
||||
for obj in input_tar:
|
||||
if not obj.name.endswith((".tar", ".tgz", ".tar.gz")):
|
||||
continue
|
||||
istf = SecureTarFile(
|
||||
None, # Not used
|
||||
gzip=False,
|
||||
key=password_to_key(password) if password is not None else None,
|
||||
mode="r",
|
||||
fileobj=input_tar.extractfile(obj),
|
||||
)
|
||||
with istf.decrypt(obj) as decrypted:
|
||||
if istf.securetar_header.plaintext_size is None:
|
||||
raise UnsupportedSecureTarVersion
|
||||
try:
|
||||
decrypted.read(1) # Read a single byte to trigger the decryption
|
||||
except SecureTarReadError as err:
|
||||
raise IncorrectPassword from err
|
||||
return
|
||||
raise BackupEmpty
|
||||
|
||||
|
||||
def decrypt_backup(
|
||||
input_stream: IO[bytes],
|
||||
output_stream: IO[bytes],
|
||||
password: str | None,
|
||||
on_done: Callable[[], None],
|
||||
) -> None:
|
||||
"""Decrypt a backup."""
|
||||
try:
|
||||
with (
|
||||
tarfile.open(
|
||||
fileobj=input_stream, mode="r|", bufsize=BUF_SIZE
|
||||
) as input_tar,
|
||||
tarfile.open(
|
||||
fileobj=output_stream, mode="w|", bufsize=BUF_SIZE
|
||||
) as output_tar,
|
||||
):
|
||||
_decrypt_backup(input_tar, output_tar, password)
|
||||
except (DecryptError, SecureTarError, tarfile.TarError) as err:
|
||||
LOGGER.warning("Error decrypting backup: %s", err)
|
||||
finally:
|
||||
output_stream.write(b"") # Write an empty chunk to signal the end of the stream
|
||||
on_done()
|
||||
|
||||
|
||||
def _decrypt_backup(
|
||||
input_tar: tarfile.TarFile,
|
||||
output_tar: tarfile.TarFile,
|
||||
password: str | None,
|
||||
) -> None:
|
||||
"""Decrypt a backup."""
|
||||
for obj in input_tar:
|
||||
# We compare with PurePath to avoid issues with different path separators,
|
||||
# for example when backup.json is added as "./backup.json"
|
||||
if PurePath(obj.name) == PurePath("backup.json"):
|
||||
# Rewrite the backup.json file to indicate that the backup is decrypted
|
||||
if not (reader := input_tar.extractfile(obj)):
|
||||
raise DecryptError
|
||||
metadata = json_loads_object(reader.read())
|
||||
metadata["protected"] = False
|
||||
updated_metadata_b = json.dumps(metadata).encode()
|
||||
metadata_obj = copy.deepcopy(obj)
|
||||
metadata_obj.size = len(updated_metadata_b)
|
||||
output_tar.addfile(metadata_obj, BytesIO(updated_metadata_b))
|
||||
continue
|
||||
if not obj.name.endswith((".tar", ".tgz", ".tar.gz")):
|
||||
output_tar.addfile(obj, input_tar.extractfile(obj))
|
||||
continue
|
||||
istf = SecureTarFile(
|
||||
None, # Not used
|
||||
gzip=False,
|
||||
key=password_to_key(password) if password is not None else None,
|
||||
mode="r",
|
||||
fileobj=input_tar.extractfile(obj),
|
||||
)
|
||||
with istf.decrypt(obj) as decrypted:
|
||||
if (plaintext_size := istf.securetar_header.plaintext_size) is None:
|
||||
raise UnsupportedSecureTarVersion
|
||||
decrypted_obj = copy.deepcopy(obj)
|
||||
decrypted_obj.size = plaintext_size
|
||||
output_tar.addfile(decrypted_obj, decrypted)
|
||||
|
||||
|
||||
async def receive_file(
|
||||
hass: HomeAssistant, contents: aiohttp.BodyPartReader, path: Path
|
||||
) -> None:
|
||||
|
||||
@@ -9,7 +9,11 @@ from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .config import ScheduleState
|
||||
from .const import DATA_MANAGER, LOGGER
|
||||
from .manager import IncorrectPasswordError, ManagerStateEvent
|
||||
from .manager import (
|
||||
DecryptOnDowloadNotSupported,
|
||||
IncorrectPasswordError,
|
||||
ManagerStateEvent,
|
||||
)
|
||||
from .models import Folder
|
||||
|
||||
|
||||
@@ -24,6 +28,7 @@ def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) ->
|
||||
|
||||
websocket_api.async_register_command(hass, handle_details)
|
||||
websocket_api.async_register_command(hass, handle_info)
|
||||
websocket_api.async_register_command(hass, handle_can_decrypt_on_download)
|
||||
websocket_api.async_register_command(hass, handle_create)
|
||||
websocket_api.async_register_command(hass, handle_create_with_automatic_settings)
|
||||
websocket_api.async_register_command(hass, handle_delete)
|
||||
@@ -147,6 +152,38 @@ async def handle_restore(
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/can_decrypt_on_download",
|
||||
vol.Required("backup_id"): str,
|
||||
vol.Required("agent_id"): str,
|
||||
vol.Required("password"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_can_decrypt_on_download(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Check if the supplied password is correct."""
|
||||
try:
|
||||
await hass.data[DATA_MANAGER].async_can_decrypt_on_download(
|
||||
msg["backup_id"],
|
||||
agent_id=msg["agent_id"],
|
||||
password=msg.get("password"),
|
||||
)
|
||||
except IncorrectPasswordError:
|
||||
connection.send_error(msg["id"], "password_incorrect", "Incorrect password")
|
||||
except DecryptOnDowloadNotSupported:
|
||||
connection.send_error(
|
||||
msg["id"], "decrypt_not_supported", "Decrypt on download not supported"
|
||||
)
|
||||
else:
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
|
||||
@@ -10,9 +10,9 @@ from aiobafi6 import Device, Service
|
||||
from aiobafi6.discovery import PORT
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_IP_ADDRESS
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN, RUN_TIMEOUT
|
||||
from .models import BAFDiscovery
|
||||
@@ -44,7 +44,7 @@ class BAFFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self.discovery: BAFDiscovery | None = None
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
if discovery_info.ip_address.version == 6:
|
||||
|
||||
@@ -20,7 +20,7 @@ class BalboaEntity(Entity):
|
||||
"""Initialize the control."""
|
||||
mac = client.mac_address
|
||||
model = client.model
|
||||
self._attr_unique_id = f'{model}-{key}-{mac.replace(":","")[-6:]}'
|
||||
self._attr_unique_id = f"{model}-{key}-{mac.replace(':', '')[-6:]}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, mac)},
|
||||
name=model,
|
||||
|
||||
@@ -34,7 +34,7 @@ class BangOlufsenData:
|
||||
|
||||
type BangOlufsenConfigEntry = ConfigEntry[BangOlufsenData]
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||
PLATFORMS = [Platform.EVENT, Platform.MEDIA_PLAYER]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry) -> bool:
|
||||
|
||||
@@ -10,10 +10,10 @@ from mozart_api.exceptions import ApiException
|
||||
from mozart_api.mozart_client import MozartClient
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.zeroconf import ZeroconfServiceInfo
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_MODEL
|
||||
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
from homeassistant.util.ssl import get_default_context
|
||||
|
||||
from .const import (
|
||||
|
||||
@@ -79,6 +79,7 @@ class WebsocketNotification(StrEnum):
|
||||
"""Enum for WebSocket notification types."""
|
||||
|
||||
ACTIVE_LISTENING_MODE = "active_listening_mode"
|
||||
BUTTON = "button"
|
||||
PLAYBACK_ERROR = "playback_error"
|
||||
PLAYBACK_METADATA = "playback_metadata"
|
||||
PLAYBACK_PROGRESS = "playback_progress"
|
||||
@@ -203,14 +204,60 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
),
|
||||
]
|
||||
)
|
||||
# Map for storing compatibility of devices.
|
||||
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS: Final[str] = "device_buttons"
|
||||
|
||||
MODEL_SUPPORT_MAP = {
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS: (
|
||||
BangOlufsenModel.BEOLAB_8,
|
||||
BangOlufsenModel.BEOLAB_28,
|
||||
BangOlufsenModel.BEOSOUND_2,
|
||||
BangOlufsenModel.BEOSOUND_A5,
|
||||
BangOlufsenModel.BEOSOUND_A9,
|
||||
BangOlufsenModel.BEOSOUND_BALANCE,
|
||||
BangOlufsenModel.BEOSOUND_EMERGE,
|
||||
BangOlufsenModel.BEOSOUND_LEVEL,
|
||||
BangOlufsenModel.BEOSOUND_THEATRE,
|
||||
)
|
||||
}
|
||||
|
||||
# Device events
|
||||
BANG_OLUFSEN_WEBSOCKET_EVENT: Final[str] = f"{DOMAIN}_websocket_event"
|
||||
|
||||
# Dict used to translate native Bang & Olufsen event names to string.json compatible ones
|
||||
EVENT_TRANSLATION_MAP: dict[str, str] = {
|
||||
"shortPress (Release)": "short_press_release",
|
||||
"longPress (Timeout)": "long_press_timeout",
|
||||
"longPress (Release)": "long_press_release",
|
||||
"veryLongPress (Timeout)": "very_long_press_timeout",
|
||||
"veryLongPress (Release)": "very_long_press_release",
|
||||
}
|
||||
|
||||
CONNECTION_STATUS: Final[str] = "CONNECTION_STATUS"
|
||||
|
||||
DEVICE_BUTTONS: Final[list[str]] = [
|
||||
"Bluetooth",
|
||||
"Microphone",
|
||||
"Next",
|
||||
"PlayPause",
|
||||
"Preset1",
|
||||
"Preset2",
|
||||
"Preset3",
|
||||
"Preset4",
|
||||
"Previous",
|
||||
"Volume",
|
||||
]
|
||||
|
||||
|
||||
DEVICE_BUTTON_EVENTS: Final[list[str]] = [
|
||||
"short_press_release",
|
||||
"long_press_timeout",
|
||||
"long_press_release",
|
||||
"very_long_press_timeout",
|
||||
"very_long_press_release",
|
||||
]
|
||||
|
||||
# Beolink Converter NL/ML sources need to be transformed to upper case
|
||||
BEOLINK_JOIN_SOURCES_TO_UPPER = (
|
||||
"aux_a",
|
||||
|
||||
76
homeassistant/components/bang_olufsen/event.py
Normal file
76
homeassistant/components/bang_olufsen/event.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""Event entities for the Bang & Olufsen integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.event import EventDeviceClass, EventEntity
|
||||
from homeassistant.const import CONF_MODEL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import BangOlufsenConfigEntry
|
||||
from .const import (
|
||||
CONNECTION_STATUS,
|
||||
DEVICE_BUTTON_EVENTS,
|
||||
DEVICE_BUTTONS,
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS,
|
||||
MODEL_SUPPORT_MAP,
|
||||
WebsocketNotification,
|
||||
)
|
||||
from .entity import BangOlufsenEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: BangOlufsenConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Sensor entities from config entry."""
|
||||
|
||||
if config_entry.data[CONF_MODEL] in MODEL_SUPPORT_MAP[MODEL_SUPPORT_DEVICE_BUTTONS]:
|
||||
async_add_entities(
|
||||
BangOlufsenButtonEvent(config_entry, button_type)
|
||||
for button_type in DEVICE_BUTTONS
|
||||
)
|
||||
|
||||
|
||||
class BangOlufsenButtonEvent(BangOlufsenEntity, EventEntity):
|
||||
"""Event class for Button events."""
|
||||
|
||||
_attr_device_class = EventDeviceClass.BUTTON
|
||||
_attr_entity_registry_enabled_default = False
|
||||
_attr_event_types = DEVICE_BUTTON_EVENTS
|
||||
|
||||
def __init__(self, config_entry: BangOlufsenConfigEntry, button_type: str) -> None:
|
||||
"""Initialize Button."""
|
||||
super().__init__(config_entry, config_entry.runtime_data.client)
|
||||
|
||||
self._attr_unique_id = f"{self._unique_id}_{button_type}"
|
||||
|
||||
# Make the native button name Home Assistant compatible
|
||||
self._attr_translation_key = button_type.lower()
|
||||
|
||||
self._button_type = button_type
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Listen to WebSocket button events."""
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._async_update_connection_state,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{self._button_type}",
|
||||
self._async_handle_event,
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_handle_event(self, event: str) -> None:
|
||||
"""Handle event."""
|
||||
self._trigger_event(event)
|
||||
self.async_write_ha_state()
|
||||
@@ -1,7 +1,12 @@
|
||||
{
|
||||
"common": {
|
||||
"jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity.",
|
||||
"jid_options_name": "JID options",
|
||||
"jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity."
|
||||
"long_press_release": "Release of long press",
|
||||
"long_press_timeout": "Long press",
|
||||
"short_press_release": "Release of short press",
|
||||
"very_long_press_release": "Release of very long press",
|
||||
"very_long_press_timeout": "Very long press"
|
||||
},
|
||||
"config": {
|
||||
"error": {
|
||||
@@ -29,6 +34,150 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"event": {
|
||||
"bluetooth": {
|
||||
"name": "Bluetooth",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"microphone": {
|
||||
"name": "Microphone",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"next": {
|
||||
"name": "Next",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"playpause": {
|
||||
"name": "Play / Pause",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"preset1": {
|
||||
"name": "Favourite 1",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"preset2": {
|
||||
"name": "Favourite 2",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"preset3": {
|
||||
"name": "Favourite 3",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"preset4": {
|
||||
"name": "Favourite 4",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"previous": {
|
||||
"name": "Previous",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"volume": {
|
||||
"name": "Volume",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"source_ids": {
|
||||
"options": {
|
||||
|
||||
@@ -3,8 +3,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from mozart_api.models import (
|
||||
ButtonEvent,
|
||||
ListeningModeProps,
|
||||
PlaybackContentMetadata,
|
||||
PlaybackError,
|
||||
@@ -26,6 +28,7 @@ from homeassistant.util.enum import try_parse_enum
|
||||
from .const import (
|
||||
BANG_OLUFSEN_WEBSOCKET_EVENT,
|
||||
CONNECTION_STATUS,
|
||||
EVENT_TRANSLATION_MAP,
|
||||
WebsocketNotification,
|
||||
)
|
||||
from .entity import BangOlufsenBase
|
||||
@@ -54,6 +57,8 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
self._client.get_active_listening_mode_notifications(
|
||||
self.on_active_listening_mode
|
||||
)
|
||||
self._client.get_button_notifications(self.on_button_notification)
|
||||
|
||||
self._client.get_playback_error_notifications(
|
||||
self.on_playback_error_notification
|
||||
)
|
||||
@@ -104,6 +109,19 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
notification,
|
||||
)
|
||||
|
||||
def on_button_notification(self, notification: ButtonEvent) -> None:
|
||||
"""Send button dispatch."""
|
||||
# State is expected to always be available.
|
||||
if TYPE_CHECKING:
|
||||
assert notification.state
|
||||
|
||||
# Send to event entity
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{notification.button}",
|
||||
EVENT_TRANSLATION_MAP[notification.state],
|
||||
)
|
||||
|
||||
def on_notification_notification(
|
||||
self, notification: WebsocketNotificationTag
|
||||
) -> None:
|
||||
|
||||
@@ -131,7 +131,10 @@ def _no_overlapping(configs: list[dict]) -> list[dict]:
|
||||
for i, tup in enumerate(intervals):
|
||||
if len(intervals) > i + 1 and tup.below > intervals[i + 1].above:
|
||||
raise vol.Invalid(
|
||||
f"Ranges for bayesian numeric state entities must not overlap, but {ent_id} has overlapping ranges, above:{tup.above}, below:{tup.below} overlaps with above:{intervals[i+1].above}, below:{intervals[i+1].below}."
|
||||
"Ranges for bayesian numeric state entities must not overlap, "
|
||||
f"but {ent_id} has overlapping ranges, above:{tup.above}, "
|
||||
f"below:{tup.below} overlaps with above:{intervals[i + 1].above}, "
|
||||
f"below:{intervals[i + 1].below}."
|
||||
)
|
||||
return configs
|
||||
|
||||
@@ -206,7 +209,10 @@ async def async_setup_platform(
|
||||
broken_observations: list[dict[str, Any]] = []
|
||||
for observation in observations:
|
||||
if CONF_P_GIVEN_F not in observation:
|
||||
text: str = f"{name}/{observation.get(CONF_ENTITY_ID,'')}{observation.get(CONF_VALUE_TEMPLATE,'')}"
|
||||
text = (
|
||||
f"{name}/{observation.get(CONF_ENTITY_ID, '')}"
|
||||
f"{observation.get(CONF_VALUE_TEMPLATE, '')}"
|
||||
)
|
||||
raise_no_prob_given_false(hass, text)
|
||||
_LOGGER.error("Missing prob_given_false YAML entry for %s", text)
|
||||
broken_observations.append(observation)
|
||||
|
||||
@@ -15,10 +15,10 @@ from blebox_uniapi.error import (
|
||||
from blebox_uniapi.session import ApiHost
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from . import get_maybe_authenticated_session
|
||||
from .const import (
|
||||
@@ -84,7 +84,7 @@ class BleBoxConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
hass = self.hass
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"data": {
|
||||
"api_token": "[%key:common::config_flow::data::api_token%]"
|
||||
},
|
||||
"description": "Enter your Blue Current api token",
|
||||
"description": "Enter your Blue Current API token",
|
||||
"title": "Authentication"
|
||||
}
|
||||
},
|
||||
@@ -19,7 +19,7 @@
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"wrong_account": "Wrong account: Please authenticate with the api key for {email}."
|
||||
"wrong_account": "Wrong account: Please authenticate with the API token for {email}."
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
||||
@@ -136,7 +136,7 @@ def _extract_blueprint_from_community_topic(
|
||||
)
|
||||
|
||||
return ImportedBlueprint(
|
||||
f'{post["username"]}/{topic["slug"]}', block_content, blueprint
|
||||
f"{post['username']}/{topic['slug']}", block_content, blueprint
|
||||
)
|
||||
|
||||
|
||||
@@ -173,8 +173,7 @@ async def fetch_blueprint_from_github_url(
|
||||
|
||||
parsed_import_url = yarl.URL(import_url)
|
||||
suggested_filename = f"{parsed_import_url.parts[1]}/{parsed_import_url.parts[-1]}"
|
||||
if suggested_filename.endswith(".yaml"):
|
||||
suggested_filename = suggested_filename[:-5]
|
||||
suggested_filename = suggested_filename.removesuffix(".yaml")
|
||||
|
||||
return ImportedBlueprint(suggested_filename, raw_yaml, blueprint)
|
||||
|
||||
|
||||
@@ -14,10 +14,13 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BluesoundCoordinator
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||
PLATFORMS = [
|
||||
Platform.MEDIA_PLAYER,
|
||||
]
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -26,6 +29,7 @@ class BluesoundRuntimeData:
|
||||
|
||||
player: Player
|
||||
sync_status: SyncStatus
|
||||
coordinator: BluesoundCoordinator
|
||||
|
||||
|
||||
type BluesoundConfigEntry = ConfigEntry[BluesoundRuntimeData]
|
||||
@@ -33,9 +37,6 @@ type BluesoundConfigEntry = ConfigEntry[BluesoundRuntimeData]
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Bluesound."""
|
||||
if DOMAIN not in hass.data:
|
||||
hass.data[DOMAIN] = []
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -46,13 +47,16 @@ async def async_setup_entry(
|
||||
host = config_entry.data[CONF_HOST]
|
||||
port = config_entry.data[CONF_PORT]
|
||||
session = async_get_clientsession(hass)
|
||||
async with Player(host, port, session=session, default_timeout=10) as player:
|
||||
try:
|
||||
sync_status = await player.sync_status(timeout=1)
|
||||
except PlayerUnreachableError as ex:
|
||||
raise ConfigEntryNotReady(f"Error connecting to {host}:{port}") from ex
|
||||
player = Player(host, port, session=session, default_timeout=10)
|
||||
try:
|
||||
sync_status = await player.sync_status(timeout=1)
|
||||
except PlayerUnreachableError as ex:
|
||||
raise ConfigEntryNotReady(f"Error connecting to {host}:{port}") from ex
|
||||
|
||||
config_entry.runtime_data = BluesoundRuntimeData(player, sync_status)
|
||||
coordinator = BluesoundCoordinator(hass, player, sync_status)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
config_entry.runtime_data = BluesoundRuntimeData(player, sync_status, coordinator)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
|
||||
|
||||
@@ -7,10 +7,10 @@ from pyblu import Player, SyncStatus
|
||||
from pyblu.errors import PlayerUnreachableError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
from .media_player import DEFAULT_PORT
|
||||
@@ -71,29 +71,8 @@ class BluesoundConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
),
|
||||
)
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Import bluesound config entry from configuration.yaml."""
|
||||
session = async_get_clientsession(self.hass)
|
||||
async with Player(
|
||||
import_data[CONF_HOST], import_data[CONF_PORT], session=session
|
||||
) as player:
|
||||
try:
|
||||
sync_status = await player.sync_status(timeout=1)
|
||||
except PlayerUnreachableError:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
await self.async_set_unique_id(
|
||||
format_unique_id(sync_status.mac, import_data[CONF_PORT])
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=sync_status.name,
|
||||
data=import_data,
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by zeroconf discovery."""
|
||||
if discovery_info.port is not None:
|
||||
|
||||
160
homeassistant/components/bluesound/coordinator.py
Normal file
160
homeassistant/components/bluesound/coordinator.py
Normal file
@@ -0,0 +1,160 @@
|
||||
"""Define a base coordinator for Bluesound entities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable, Coroutine
|
||||
import contextlib
|
||||
from dataclasses import dataclass, replace
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyblu import Input, Player, Preset, Status, SyncStatus
|
||||
from pyblu.errors import PlayerUnreachableError
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
NODE_OFFLINE_CHECK_TIMEOUT = timedelta(minutes=3)
|
||||
PRESET_AND_INPUTS_INTERVAL = timedelta(minutes=15)
|
||||
|
||||
|
||||
@dataclass
|
||||
class BluesoundData:
|
||||
"""Define a class to hold Bluesound data."""
|
||||
|
||||
sync_status: SyncStatus
|
||||
status: Status
|
||||
presets: list[Preset]
|
||||
inputs: list[Input]
|
||||
|
||||
|
||||
def cancel_task(task: asyncio.Task) -> Callable[[], Coroutine[None, None, None]]:
|
||||
"""Cancel a task."""
|
||||
|
||||
async def _cancel_task() -> None:
|
||||
task.cancel()
|
||||
with contextlib.suppress(asyncio.CancelledError):
|
||||
await task
|
||||
|
||||
return _cancel_task
|
||||
|
||||
|
||||
class BluesoundCoordinator(DataUpdateCoordinator[BluesoundData]):
|
||||
"""Define an object to hold Bluesound data."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, player: Player, sync_status: SyncStatus
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.player = player
|
||||
self._inital_sync_status = sync_status
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=_LOGGER,
|
||||
name=sync_status.name,
|
||||
)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
assert self.config_entry is not None
|
||||
|
||||
preset = await self.player.presets()
|
||||
inputs = await self.player.inputs()
|
||||
status = await self.player.status()
|
||||
|
||||
self.async_set_updated_data(
|
||||
BluesoundData(
|
||||
sync_status=self._inital_sync_status,
|
||||
status=status,
|
||||
presets=preset,
|
||||
inputs=inputs,
|
||||
)
|
||||
)
|
||||
|
||||
status_loop_task = self.hass.async_create_background_task(
|
||||
self._poll_status_loop(),
|
||||
name=f"bluesound.poll_status_loop_{self.data.sync_status.id}",
|
||||
)
|
||||
self.config_entry.async_on_unload(cancel_task(status_loop_task))
|
||||
|
||||
sync_status_loop_task = self.hass.async_create_background_task(
|
||||
self._poll_sync_status_loop(),
|
||||
name=f"bluesound.poll_sync_status_loop_{self.data.sync_status.id}",
|
||||
)
|
||||
self.config_entry.async_on_unload(cancel_task(sync_status_loop_task))
|
||||
|
||||
presets_and_inputs_loop_task = self.hass.async_create_background_task(
|
||||
self._poll_presets_and_inputs_loop(),
|
||||
name=f"bluesound.poll_presets_and_inputs_loop_{self.data.sync_status.id}",
|
||||
)
|
||||
self.config_entry.async_on_unload(cancel_task(presets_and_inputs_loop_task))
|
||||
|
||||
async def _async_update_data(self) -> BluesoundData:
|
||||
return self.data
|
||||
|
||||
async def _poll_presets_and_inputs_loop(self) -> None:
|
||||
while True:
|
||||
await asyncio.sleep(PRESET_AND_INPUTS_INTERVAL.total_seconds())
|
||||
try:
|
||||
preset = await self.player.presets()
|
||||
inputs = await self.player.inputs()
|
||||
self.async_set_updated_data(
|
||||
replace(
|
||||
self.data,
|
||||
presets=preset,
|
||||
inputs=inputs,
|
||||
)
|
||||
)
|
||||
except PlayerUnreachableError as ex:
|
||||
self.async_set_update_error(ex)
|
||||
except asyncio.CancelledError:
|
||||
return
|
||||
except Exception as ex: # noqa: BLE001 - this loop should never stop
|
||||
self.async_set_update_error(ex)
|
||||
|
||||
async def _poll_status_loop(self) -> None:
|
||||
"""Loop which polls the status of the player."""
|
||||
while True:
|
||||
try:
|
||||
status = await self.player.status(
|
||||
etag=self.data.status.etag, poll_timeout=120, timeout=125
|
||||
)
|
||||
self.async_set_updated_data(
|
||||
replace(
|
||||
self.data,
|
||||
status=status,
|
||||
)
|
||||
)
|
||||
except PlayerUnreachableError as ex:
|
||||
self.async_set_update_error(ex)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT.total_seconds())
|
||||
except asyncio.CancelledError:
|
||||
return
|
||||
except Exception as ex: # noqa: BLE001 - this loop should never stop
|
||||
self.async_set_update_error(ex)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT.total_seconds())
|
||||
|
||||
async def _poll_sync_status_loop(self) -> None:
|
||||
"""Loop which polls the sync status of the player."""
|
||||
while True:
|
||||
try:
|
||||
sync_status = await self.player.sync_status(
|
||||
etag=self.data.sync_status.etag, poll_timeout=120, timeout=125
|
||||
)
|
||||
self.async_set_updated_data(
|
||||
replace(
|
||||
self.data,
|
||||
sync_status=sync_status,
|
||||
)
|
||||
)
|
||||
except PlayerUnreachableError as ex:
|
||||
self.async_set_update_error(ex)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT.total_seconds())
|
||||
except asyncio.CancelledError:
|
||||
raise
|
||||
except Exception as ex: # noqa: BLE001 - this loop should never stop
|
||||
self.async_set_update_error(ex)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT.total_seconds())
|
||||
@@ -2,20 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from asyncio import CancelledError, Task
|
||||
from contextlib import suppress
|
||||
from asyncio import Task
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from pyblu import Input, Player, Preset, Status, SyncStatus
|
||||
from pyblu.errors import PlayerUnreachableError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
from homeassistant.components.media_player import (
|
||||
PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA,
|
||||
BrowseMedia,
|
||||
MediaPlayerEntity,
|
||||
MediaPlayerEntityFeature,
|
||||
@@ -23,16 +19,10 @@ from homeassistant.components.media_player import (
|
||||
MediaType,
|
||||
async_process_play_media_url,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_IMPORT
|
||||
from homeassistant.const import CONF_HOST, CONF_HOSTS, CONF_NAME, CONF_PORT
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
entity_platform,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_NETWORK_MAC,
|
||||
DeviceInfo,
|
||||
@@ -43,10 +33,11 @@ from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN, INTEGRATION_TITLE
|
||||
from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN
|
||||
from .coordinator import BluesoundCoordinator
|
||||
from .utils import dispatcher_join_signal, dispatcher_unjoin_signal, format_unique_id
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -64,71 +55,8 @@ SERVICE_JOIN = "join"
|
||||
SERVICE_SET_TIMER = "set_sleep_timer"
|
||||
SERVICE_UNJOIN = "unjoin"
|
||||
|
||||
NODE_OFFLINE_CHECK_TIMEOUT = 180
|
||||
NODE_RETRY_INITIATION = timedelta(minutes=3)
|
||||
|
||||
SYNC_STATUS_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
POLL_TIMEOUT = 120
|
||||
|
||||
PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_HOSTS): vol.All(
|
||||
cv.ensure_list,
|
||||
[
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
}
|
||||
],
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def _async_import(hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Import config entry from configuration.yaml."""
|
||||
if not hass.config_entries.async_entries(DOMAIN):
|
||||
# Start import flow
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
|
||||
)
|
||||
if (
|
||||
result["type"] == FlowResultType.ABORT
|
||||
and result["reason"] == "cannot_connect"
|
||||
):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
breaks_in_ha_version="2025.2.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key=f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": INTEGRATION_TITLE,
|
||||
},
|
||||
)
|
||||
return
|
||||
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.2.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": INTEGRATION_TITLE,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -137,10 +65,10 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Bluesound entry."""
|
||||
bluesound_player = BluesoundPlayer(
|
||||
config_entry.runtime_data.coordinator,
|
||||
config_entry.data[CONF_HOST],
|
||||
config_entry.data[CONF_PORT],
|
||||
config_entry.runtime_data.player,
|
||||
config_entry.runtime_data.sync_status,
|
||||
)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
@@ -155,27 +83,10 @@ async def async_setup_entry(
|
||||
)
|
||||
platform.async_register_entity_service(SERVICE_UNJOIN, None, "async_unjoin")
|
||||
|
||||
hass.data[DATA_BLUESOUND].append(bluesound_player)
|
||||
async_add_entities([bluesound_player], update_before_add=True)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None,
|
||||
) -> None:
|
||||
"""Trigger import flows."""
|
||||
hosts = config.get(CONF_HOSTS, [])
|
||||
for host in hosts:
|
||||
import_data = {
|
||||
CONF_HOST: host[CONF_HOST],
|
||||
CONF_PORT: host.get(CONF_PORT, 11000),
|
||||
}
|
||||
hass.async_create_task(_async_import(hass, import_data))
|
||||
|
||||
|
||||
class BluesoundPlayer(MediaPlayerEntity):
|
||||
class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity):
|
||||
"""Representation of a Bluesound Player."""
|
||||
|
||||
_attr_media_content_type = MediaType.MUSIC
|
||||
@@ -184,12 +95,15 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BluesoundCoordinator,
|
||||
host: str,
|
||||
port: int,
|
||||
player: Player,
|
||||
sync_status: SyncStatus,
|
||||
) -> None:
|
||||
"""Initialize the media player."""
|
||||
super().__init__(coordinator)
|
||||
sync_status = coordinator.data.sync_status
|
||||
|
||||
self.host = host
|
||||
self.port = port
|
||||
self._poll_status_loop_task: Task[None] | None = None
|
||||
@@ -197,15 +111,14 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
self._id = sync_status.id
|
||||
self._last_status_update: datetime | None = None
|
||||
self._sync_status = sync_status
|
||||
self._status: Status | None = None
|
||||
self._inputs: list[Input] = []
|
||||
self._presets: list[Preset] = []
|
||||
self._status: Status = coordinator.data.status
|
||||
self._inputs: list[Input] = coordinator.data.inputs
|
||||
self._presets: list[Preset] = coordinator.data.presets
|
||||
self._group_name: str | None = None
|
||||
self._group_list: list[str] = []
|
||||
self._bluesound_device_name = sync_status.name
|
||||
self._player = player
|
||||
self._is_leader = False
|
||||
self._leader: BluesoundPlayer | None = None
|
||||
self._last_status_update = dt_util.utcnow()
|
||||
|
||||
self._attr_unique_id = format_unique_id(sync_status.mac, port)
|
||||
# there should always be one player with the default port per mac
|
||||
@@ -228,52 +141,10 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
via_device=(DOMAIN, format_mac(sync_status.mac)),
|
||||
)
|
||||
|
||||
async def _poll_status_loop(self) -> None:
|
||||
"""Loop which polls the status of the player."""
|
||||
while True:
|
||||
try:
|
||||
await self.async_update_status()
|
||||
except PlayerUnreachableError:
|
||||
_LOGGER.error(
|
||||
"Node %s:%s is offline, retrying later", self.host, self.port
|
||||
)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT)
|
||||
except CancelledError:
|
||||
_LOGGER.debug(
|
||||
"Stopping the polling of node %s:%s", self.host, self.port
|
||||
)
|
||||
return
|
||||
except: # noqa: E722 - this loop should never stop
|
||||
_LOGGER.exception(
|
||||
"Unexpected error for %s:%s, retrying later", self.host, self.port
|
||||
)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT)
|
||||
|
||||
async def _poll_sync_status_loop(self) -> None:
|
||||
"""Loop which polls the sync status of the player."""
|
||||
while True:
|
||||
try:
|
||||
await self.update_sync_status()
|
||||
except PlayerUnreachableError:
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT)
|
||||
except CancelledError:
|
||||
raise
|
||||
except: # noqa: E722 - all errors must be caught for this loop
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Start the polling task."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
self._poll_status_loop_task = self.hass.async_create_background_task(
|
||||
self._poll_status_loop(),
|
||||
name=f"bluesound.poll_status_loop_{self.host}:{self.port}",
|
||||
)
|
||||
self._poll_sync_status_loop_task = self.hass.async_create_background_task(
|
||||
self._poll_sync_status_loop(),
|
||||
name=f"bluesound.poll_sync_status_loop_{self.host}:{self.port}",
|
||||
)
|
||||
|
||||
assert self._sync_status.id is not None
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
@@ -294,105 +165,24 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
"""Stop the polling task."""
|
||||
await super().async_will_remove_from_hass()
|
||||
|
||||
assert self._poll_status_loop_task is not None
|
||||
if self._poll_status_loop_task.cancel():
|
||||
# the sleeps in _poll_loop will raise CancelledError
|
||||
with suppress(CancelledError):
|
||||
await self._poll_status_loop_task
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._sync_status = self.coordinator.data.sync_status
|
||||
self._status = self.coordinator.data.status
|
||||
self._inputs = self.coordinator.data.inputs
|
||||
self._presets = self.coordinator.data.presets
|
||||
|
||||
assert self._poll_sync_status_loop_task is not None
|
||||
if self._poll_sync_status_loop_task.cancel():
|
||||
# the sleeps in _poll_sync_status_loop will raise CancelledError
|
||||
with suppress(CancelledError):
|
||||
await self._poll_sync_status_loop_task
|
||||
|
||||
self.hass.data[DATA_BLUESOUND].remove(self)
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update internal status of the entity."""
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
with suppress(PlayerUnreachableError):
|
||||
await self.async_update_presets()
|
||||
await self.async_update_captures()
|
||||
|
||||
async def async_update_status(self) -> None:
|
||||
"""Use the poll session to always get the status of the player."""
|
||||
etag = None
|
||||
if self._status is not None:
|
||||
etag = self._status.etag
|
||||
|
||||
try:
|
||||
status = await self._player.status(
|
||||
etag=etag, poll_timeout=POLL_TIMEOUT, timeout=POLL_TIMEOUT + 5
|
||||
)
|
||||
|
||||
self._attr_available = True
|
||||
self._last_status_update = dt_util.utcnow()
|
||||
self._status = status
|
||||
|
||||
self.async_write_ha_state()
|
||||
except PlayerUnreachableError:
|
||||
self._attr_available = False
|
||||
self._last_status_update = None
|
||||
self._status = None
|
||||
self.async_write_ha_state()
|
||||
_LOGGER.error(
|
||||
"Client connection error, marking %s as offline",
|
||||
self._bluesound_device_name,
|
||||
)
|
||||
raise
|
||||
|
||||
async def update_sync_status(self) -> None:
|
||||
"""Update the internal status."""
|
||||
etag = None
|
||||
if self._sync_status:
|
||||
etag = self._sync_status.etag
|
||||
sync_status = await self._player.sync_status(
|
||||
etag=etag, poll_timeout=POLL_TIMEOUT, timeout=POLL_TIMEOUT + 5
|
||||
)
|
||||
|
||||
self._sync_status = sync_status
|
||||
self._last_status_update = dt_util.utcnow()
|
||||
|
||||
self._group_list = self.rebuild_bluesound_group()
|
||||
|
||||
if sync_status.leader is not None:
|
||||
self._is_leader = False
|
||||
leader_id = f"{sync_status.leader.ip}:{sync_status.leader.port}"
|
||||
leader_device = [
|
||||
device
|
||||
for device in self.hass.data[DATA_BLUESOUND]
|
||||
if device.id == leader_id
|
||||
]
|
||||
|
||||
if leader_device and leader_id != self.id:
|
||||
self._leader = leader_device[0]
|
||||
else:
|
||||
self._leader = None
|
||||
_LOGGER.error("Leader not found %s", leader_id)
|
||||
else:
|
||||
if self._leader is not None:
|
||||
self._leader = None
|
||||
followers = self._sync_status.followers
|
||||
self._is_leader = followers is not None
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update_captures(self) -> None:
|
||||
"""Update Capture sources."""
|
||||
inputs = await self._player.inputs()
|
||||
self._inputs = inputs
|
||||
|
||||
async def async_update_presets(self) -> None:
|
||||
"""Update Presets."""
|
||||
presets = await self._player.presets()
|
||||
self._presets = presets
|
||||
|
||||
@property
|
||||
def state(self) -> MediaPlayerState:
|
||||
"""Return the state of the device."""
|
||||
if self._status is None:
|
||||
if self.available is False:
|
||||
return MediaPlayerState.OFF
|
||||
|
||||
if self.is_grouped and not self.is_leader:
|
||||
@@ -409,7 +199,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_title(self) -> str | None:
|
||||
"""Title of current playing media."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
return self._status.name
|
||||
@@ -417,7 +207,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_artist(self) -> str | None:
|
||||
"""Artist of current playing media (Music track only)."""
|
||||
if self._status is None:
|
||||
if self.available is False:
|
||||
return None
|
||||
|
||||
if self.is_grouped and not self.is_leader:
|
||||
@@ -428,7 +218,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_album_name(self) -> str | None:
|
||||
"""Artist of current playing media (Music track only)."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
return self._status.album
|
||||
@@ -436,7 +226,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_image_url(self) -> str | None:
|
||||
"""Image url of current playing media."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
url = self._status.image
|
||||
@@ -451,7 +241,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_position(self) -> int | None:
|
||||
"""Position of current playing media in seconds."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
mediastate = self.state
|
||||
@@ -470,7 +260,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_duration(self) -> int | None:
|
||||
"""Duration of current playing media in seconds."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
duration = self._status.total_seconds
|
||||
@@ -487,16 +277,11 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def volume_level(self) -> float | None:
|
||||
"""Volume level of the media player (0..1)."""
|
||||
volume = None
|
||||
volume = self._status.volume
|
||||
|
||||
if self._status is not None:
|
||||
volume = self._status.volume
|
||||
if self.is_grouped:
|
||||
volume = self._sync_status.volume
|
||||
|
||||
if volume is None:
|
||||
return None
|
||||
|
||||
return volume / 100
|
||||
|
||||
@property
|
||||
@@ -529,7 +314,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def source_list(self) -> list[str] | None:
|
||||
"""List of available input sources."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
sources = [x.text for x in self._inputs]
|
||||
@@ -540,7 +325,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def source(self) -> str | None:
|
||||
"""Name of the current input source."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
if self._status.input_id is not None:
|
||||
@@ -557,7 +342,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def supported_features(self) -> MediaPlayerEntityFeature:
|
||||
"""Flag of media commands that are supported."""
|
||||
if self._status is None:
|
||||
if self.available is False:
|
||||
return MediaPlayerEntityFeature(0)
|
||||
|
||||
if self.is_grouped and not self.is_leader:
|
||||
@@ -659,16 +444,21 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
if self.sync_status.leader is None and self.sync_status.followers is None:
|
||||
return []
|
||||
|
||||
player_entities: list[BluesoundPlayer] = self.hass.data[DATA_BLUESOUND]
|
||||
config_entries: list[BluesoundConfigEntry] = (
|
||||
self.hass.config_entries.async_entries(DOMAIN)
|
||||
)
|
||||
sync_status_list = [
|
||||
x.runtime_data.coordinator.data.sync_status for x in config_entries
|
||||
]
|
||||
|
||||
leader_sync_status: SyncStatus | None = None
|
||||
if self.sync_status.leader is None:
|
||||
leader_sync_status = self.sync_status
|
||||
else:
|
||||
required_id = f"{self.sync_status.leader.ip}:{self.sync_status.leader.port}"
|
||||
for x in player_entities:
|
||||
if x.sync_status.id == required_id:
|
||||
leader_sync_status = x.sync_status
|
||||
for sync_status in sync_status_list:
|
||||
if sync_status.id == required_id:
|
||||
leader_sync_status = sync_status
|
||||
break
|
||||
|
||||
if leader_sync_status is None or leader_sync_status.followers is None:
|
||||
@@ -676,9 +466,9 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
|
||||
follower_ids = [f"{x.ip}:{x.port}" for x in leader_sync_status.followers]
|
||||
follower_names = [
|
||||
x.sync_status.name
|
||||
for x in player_entities
|
||||
if x.sync_status.id in follower_ids
|
||||
sync_status.name
|
||||
for sync_status in sync_status_list
|
||||
if sync_status.id in follower_ids
|
||||
]
|
||||
follower_names.insert(0, leader_sync_status.name)
|
||||
return follower_names
|
||||
|
||||
@@ -22,6 +22,7 @@ from bluetooth_adapters import (
|
||||
adapter_model,
|
||||
adapter_unique_name,
|
||||
get_adapters,
|
||||
get_manufacturer_from_mac,
|
||||
)
|
||||
from bluetooth_data_tools import monotonic_time_coarse as MONOTONIC_TIME
|
||||
from habluetooth import (
|
||||
@@ -51,7 +52,7 @@ from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.issue_registry import async_delete_issue
|
||||
from homeassistant.loader import async_get_bluetooth
|
||||
|
||||
from . import passive_update_processor
|
||||
from . import passive_update_processor, websocket_api
|
||||
from .api import (
|
||||
_get_manager,
|
||||
async_address_present,
|
||||
@@ -66,6 +67,7 @@ from .api import (
|
||||
async_rediscover_address,
|
||||
async_register_callback,
|
||||
async_register_scanner,
|
||||
async_remove_scanner,
|
||||
async_scanner_by_source,
|
||||
async_scanner_count,
|
||||
async_scanner_devices_by_address,
|
||||
@@ -77,6 +79,9 @@ from .const import (
|
||||
CONF_ADAPTER,
|
||||
CONF_DETAILS,
|
||||
CONF_PASSIVE,
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID,
|
||||
CONF_SOURCE_DOMAIN,
|
||||
CONF_SOURCE_MODEL,
|
||||
DOMAIN,
|
||||
FALLBACK_MAXIMUM_STALE_ADVERTISEMENT_SECONDS,
|
||||
LINUX_FIRMWARE_LOAD_FALLBACK_SECONDS,
|
||||
@@ -92,9 +97,24 @@ if TYPE_CHECKING:
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
__all__ = [
|
||||
"FALLBACK_MAXIMUM_STALE_ADVERTISEMENT_SECONDS",
|
||||
"MONOTONIC_TIME",
|
||||
"SOURCE_LOCAL",
|
||||
"BaseHaRemoteScanner",
|
||||
"BaseHaScanner",
|
||||
"BluetoothCallback",
|
||||
"BluetoothCallbackMatcher",
|
||||
"BluetoothChange",
|
||||
"BluetoothScannerDevice",
|
||||
"BluetoothScanningMode",
|
||||
"BluetoothServiceInfo",
|
||||
"BluetoothServiceInfoBleak",
|
||||
"HaBluetoothConnector",
|
||||
"HomeAssistantRemoteScanner",
|
||||
"async_address_present",
|
||||
"async_ble_device_from_address",
|
||||
"async_discovered_service_info",
|
||||
"async_get_advertisement_callback",
|
||||
"async_get_fallback_availability_interval",
|
||||
"async_get_learned_advertising_interval",
|
||||
"async_get_scanner",
|
||||
@@ -103,26 +123,12 @@ __all__ = [
|
||||
"async_rediscover_address",
|
||||
"async_register_callback",
|
||||
"async_register_scanner",
|
||||
"async_set_fallback_availability_interval",
|
||||
"async_track_unavailable",
|
||||
"async_remove_scanner",
|
||||
"async_scanner_by_source",
|
||||
"async_scanner_count",
|
||||
"async_scanner_devices_by_address",
|
||||
"async_get_advertisement_callback",
|
||||
"BaseHaScanner",
|
||||
"HomeAssistantRemoteScanner",
|
||||
"BluetoothCallbackMatcher",
|
||||
"BluetoothChange",
|
||||
"BluetoothServiceInfo",
|
||||
"BluetoothServiceInfoBleak",
|
||||
"BluetoothScanningMode",
|
||||
"BluetoothCallback",
|
||||
"BluetoothScannerDevice",
|
||||
"HaBluetoothConnector",
|
||||
"BaseHaRemoteScanner",
|
||||
"SOURCE_LOCAL",
|
||||
"FALLBACK_MAXIMUM_STALE_ADVERTISEMENT_SECONDS",
|
||||
"MONOTONIC_TIME",
|
||||
"async_set_fallback_availability_interval",
|
||||
"async_track_unavailable",
|
||||
]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -232,6 +238,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
set_manager(manager)
|
||||
await storage_setup_task
|
||||
await manager.async_setup()
|
||||
websocket_api.async_setup(hass)
|
||||
|
||||
hass.async_create_background_task(
|
||||
_async_start_adapter_discovery(hass, manager, bluetooth_adapters),
|
||||
@@ -312,6 +319,38 @@ async def async_update_device(
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry for a bluetooth scanner."""
|
||||
if source_entry_id := entry.data.get(CONF_SOURCE_CONFIG_ENTRY_ID):
|
||||
if not (source_entry := hass.config_entries.async_get_entry(source_entry_id)):
|
||||
# Cleanup the orphaned entry using a call_soon to ensure
|
||||
# we can return before the entry is removed
|
||||
hass.loop.call_soon(
|
||||
hass_callback(
|
||||
lambda: hass.async_create_task(
|
||||
hass.config_entries.async_remove(entry.entry_id),
|
||||
"remove orphaned bluetooth entry {entry.entry_id}",
|
||||
)
|
||||
)
|
||||
)
|
||||
address = entry.unique_id
|
||||
assert address is not None
|
||||
assert source_entry is not None
|
||||
source_domain = entry.data[CONF_SOURCE_DOMAIN]
|
||||
if mac_manufacturer := await get_manufacturer_from_mac(address):
|
||||
manufacturer = f"{mac_manufacturer} ({source_domain})"
|
||||
else:
|
||||
manufacturer = source_domain
|
||||
details = AdapterDetails(
|
||||
address=address,
|
||||
product=entry.data.get(CONF_SOURCE_MODEL),
|
||||
manufacturer=manufacturer,
|
||||
)
|
||||
await async_update_device(
|
||||
hass,
|
||||
entry,
|
||||
source_entry.title,
|
||||
details,
|
||||
)
|
||||
return True
|
||||
manager = _get_manager(hass)
|
||||
address = entry.unique_id
|
||||
assert address is not None
|
||||
|
||||
@@ -132,7 +132,7 @@ class ActiveBluetoothDataUpdateCoordinator[_T](PassiveBluetoothDataUpdateCoordin
|
||||
)
|
||||
self.last_poll_successful = False
|
||||
return
|
||||
except Exception: # noqa: BLE001
|
||||
except Exception:
|
||||
if self.last_poll_successful:
|
||||
self.logger.exception("%s: Failure while polling", self.address)
|
||||
self.last_poll_successful = False
|
||||
|
||||
@@ -127,7 +127,7 @@ class ActiveBluetoothProcessorCoordinator[_DataT](
|
||||
)
|
||||
self.last_poll_successful = False
|
||||
return
|
||||
except Exception: # noqa: BLE001
|
||||
except Exception:
|
||||
if self.last_poll_successful:
|
||||
self.logger.exception("%s: Failure while polling", self.address)
|
||||
self.last_poll_successful = False
|
||||
|
||||
@@ -178,9 +178,20 @@ def async_register_scanner(
|
||||
hass: HomeAssistant,
|
||||
scanner: BaseHaScanner,
|
||||
connection_slots: int | None = None,
|
||||
source_domain: str | None = None,
|
||||
source_model: str | None = None,
|
||||
source_config_entry_id: str | None = None,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Register a BleakScanner."""
|
||||
return _get_manager(hass).async_register_scanner(scanner, connection_slots)
|
||||
return _get_manager(hass).async_register_hass_scanner(
|
||||
scanner, connection_slots, source_domain, source_model, source_config_entry_id
|
||||
)
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_remove_scanner(hass: HomeAssistant, source: str) -> None:
|
||||
"""Permanently remove a BleakScanner by source address."""
|
||||
return _get_manager(hass).async_remove_scanner(source)
|
||||
|
||||
|
||||
@hass_callback
|
||||
|
||||
@@ -18,7 +18,12 @@ from habluetooth import get_manager
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import onboarding
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.schema_config_entry_flow import (
|
||||
SchemaFlowFormStep,
|
||||
@@ -26,7 +31,16 @@ from homeassistant.helpers.schema_config_entry_flow import (
|
||||
)
|
||||
from homeassistant.helpers.typing import DiscoveryInfoType
|
||||
|
||||
from .const import CONF_ADAPTER, CONF_DETAILS, CONF_PASSIVE, DOMAIN
|
||||
from .const import (
|
||||
CONF_ADAPTER,
|
||||
CONF_DETAILS,
|
||||
CONF_PASSIVE,
|
||||
CONF_SOURCE,
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID,
|
||||
CONF_SOURCE_DOMAIN,
|
||||
CONF_SOURCE_MODEL,
|
||||
DOMAIN,
|
||||
)
|
||||
from .util import adapter_title
|
||||
|
||||
OPTIONS_SCHEMA = vol.Schema(
|
||||
@@ -63,6 +77,8 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, discovery_info: DiscoveryInfoType
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by discovery."""
|
||||
if discovery_info and CONF_SOURCE in discovery_info:
|
||||
return await self.async_step_external_scanner(discovery_info)
|
||||
self._adapter = cast(str, discovery_info[CONF_ADAPTER])
|
||||
self._details = cast(AdapterDetails, discovery_info[CONF_DETAILS])
|
||||
await self.async_set_unique_id(self._details[ADAPTER_ADDRESS])
|
||||
@@ -167,6 +183,24 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
),
|
||||
)
|
||||
|
||||
async def async_step_external_scanner(
|
||||
self, user_input: dict[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by an external scanner."""
|
||||
source = user_input[CONF_SOURCE]
|
||||
await self.async_set_unique_id(source)
|
||||
data = {
|
||||
CONF_SOURCE: source,
|
||||
CONF_SOURCE_MODEL: user_input[CONF_SOURCE_MODEL],
|
||||
CONF_SOURCE_DOMAIN: user_input[CONF_SOURCE_DOMAIN],
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID: user_input[CONF_SOURCE_CONFIG_ENTRY_ID],
|
||||
}
|
||||
self._abort_if_unique_id_configured(updates=data)
|
||||
manager = get_manager()
|
||||
scanner = manager.async_scanner_by_source(source)
|
||||
assert scanner is not None
|
||||
return self.async_create_entry(title=scanner.name, data=data)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -177,8 +211,10 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> SchemaOptionsFlowHandler:
|
||||
) -> SchemaOptionsFlowHandler | RemoteAdapterOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
if CONF_SOURCE in config_entry.data:
|
||||
return RemoteAdapterOptionsFlowHandler()
|
||||
return SchemaOptionsFlowHandler(config_entry, OPTIONS_FLOW)
|
||||
|
||||
@classmethod
|
||||
@@ -186,3 +222,13 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
def async_supports_options_flow(cls, config_entry: ConfigEntry) -> bool:
|
||||
"""Return options flow support for this handler."""
|
||||
return bool((manager := get_manager()) and manager.supports_passive_scan)
|
||||
|
||||
|
||||
class RemoteAdapterOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle a option flow for remote adapters."""
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle options flow."""
|
||||
return self.async_abort(reason="remote_adapters_not_supported")
|
||||
|
||||
@@ -18,6 +18,12 @@ CONF_DETAILS = "details"
|
||||
CONF_PASSIVE = "passive"
|
||||
|
||||
|
||||
CONF_SOURCE: Final = "source"
|
||||
CONF_SOURCE_DOMAIN: Final = "source_domain"
|
||||
CONF_SOURCE_MODEL: Final = "source_model"
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID: Final = "source_config_entry_id"
|
||||
|
||||
|
||||
SOURCE_LOCAL: Final = "local"
|
||||
|
||||
DATA_MANAGER: Final = "bluetooth_manager"
|
||||
|
||||
@@ -22,7 +22,13 @@ from homeassistant.core import (
|
||||
from homeassistant.helpers import discovery_flow
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import (
|
||||
CONF_SOURCE,
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID,
|
||||
CONF_SOURCE_DOMAIN,
|
||||
CONF_SOURCE_MODEL,
|
||||
DOMAIN,
|
||||
)
|
||||
from .match import (
|
||||
ADDRESS,
|
||||
CALLBACK,
|
||||
@@ -44,11 +50,11 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
||||
"""Manage Bluetooth for Home Assistant."""
|
||||
|
||||
__slots__ = (
|
||||
"hass",
|
||||
"storage",
|
||||
"_integration_matcher",
|
||||
"_callback_index",
|
||||
"_cancel_logging_listener",
|
||||
"_integration_matcher",
|
||||
"hass",
|
||||
"storage",
|
||||
)
|
||||
|
||||
def __init__(
|
||||
@@ -240,6 +246,39 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
||||
unregister()
|
||||
self._async_save_scanner_history(scanner)
|
||||
|
||||
@hass_callback
|
||||
def async_register_hass_scanner(
|
||||
self,
|
||||
scanner: BaseHaScanner,
|
||||
connection_slots: int | None = None,
|
||||
source_domain: str | None = None,
|
||||
source_model: str | None = None,
|
||||
source_config_entry_id: str | None = None,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Register a scanner."""
|
||||
cancel = self.async_register_scanner(scanner, connection_slots)
|
||||
if (
|
||||
isinstance(scanner, BaseHaRemoteScanner)
|
||||
and source_domain
|
||||
and source_config_entry_id
|
||||
and not self.hass.config_entries.async_entry_for_domain_unique_id(
|
||||
DOMAIN, scanner.source
|
||||
)
|
||||
):
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY},
|
||||
data={
|
||||
CONF_SOURCE: scanner.source,
|
||||
CONF_SOURCE_DOMAIN: source_domain,
|
||||
CONF_SOURCE_MODEL: source_model,
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID: source_config_entry_id,
|
||||
},
|
||||
)
|
||||
)
|
||||
return cancel
|
||||
|
||||
def async_register_scanner(
|
||||
self,
|
||||
scanner: BaseHaScanner,
|
||||
@@ -253,6 +292,18 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
||||
unregister = super().async_register_scanner(scanner, connection_slots)
|
||||
return partial(self._async_unregister_scanner, scanner, unregister)
|
||||
|
||||
@hass_callback
|
||||
def async_remove_scanner(self, source: str) -> None:
|
||||
"""Remove a scanner."""
|
||||
self.storage.async_remove_advertisement_history(source)
|
||||
if entry := self.hass.config_entries.async_entry_for_domain_unique_id(
|
||||
DOMAIN, source
|
||||
):
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.async_remove(entry.entry_id),
|
||||
f"Removing {source} Bluetooth config entry",
|
||||
)
|
||||
|
||||
@hass_callback
|
||||
def _handle_config_entry_removed(
|
||||
self,
|
||||
|
||||
@@ -15,11 +15,11 @@
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"bleak==0.22.3",
|
||||
"bleak-retry-connector==3.6.0",
|
||||
"bluetooth-adapters==0.20.2",
|
||||
"bleak-retry-connector==3.7.0",
|
||||
"bluetooth-adapters==0.21.0",
|
||||
"bluetooth-auto-recovery==1.4.2",
|
||||
"bluetooth-data-tools==1.20.0",
|
||||
"dbus-fast==2.24.3",
|
||||
"habluetooth==3.6.0"
|
||||
"bluetooth-data-tools==1.22.0",
|
||||
"dbus-fast==2.30.2",
|
||||
"habluetooth==3.9.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -92,7 +92,7 @@ def seen_all_fields(
|
||||
class IntegrationMatcher:
|
||||
"""Integration matcher for the bluetooth integration."""
|
||||
|
||||
__slots__ = ("_integration_matchers", "_matched", "_matched_connectable", "_index")
|
||||
__slots__ = ("_index", "_integration_matchers", "_matched", "_matched_connectable")
|
||||
|
||||
def __init__(self, integration_matchers: list[BluetoothMatcher]) -> None:
|
||||
"""Initialize the matcher."""
|
||||
@@ -164,12 +164,12 @@ class BluetoothMatcherIndexBase[
|
||||
|
||||
__slots__ = (
|
||||
"local_name",
|
||||
"service_uuid",
|
||||
"service_data_uuid",
|
||||
"manufacturer_id",
|
||||
"service_uuid_set",
|
||||
"service_data_uuid_set",
|
||||
"manufacturer_id_set",
|
||||
"service_data_uuid",
|
||||
"service_data_uuid_set",
|
||||
"service_uuid",
|
||||
"service_uuid_set",
|
||||
)
|
||||
|
||||
def __init__(self) -> None:
|
||||
|
||||
@@ -4,8 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from typing_extensions import TypeVar
|
||||
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
BaseCoordinatorEntity,
|
||||
@@ -20,12 +18,6 @@ if TYPE_CHECKING:
|
||||
|
||||
from . import BluetoothChange, BluetoothScanningMode, BluetoothServiceInfoBleak
|
||||
|
||||
_PassiveBluetoothDataUpdateCoordinatorT = TypeVar(
|
||||
"_PassiveBluetoothDataUpdateCoordinatorT",
|
||||
bound="PassiveBluetoothDataUpdateCoordinator",
|
||||
default="PassiveBluetoothDataUpdateCoordinator",
|
||||
)
|
||||
|
||||
|
||||
class PassiveBluetoothDataUpdateCoordinator(
|
||||
BasePassiveBluetoothCoordinator, BaseDataUpdateCoordinatorProtocol
|
||||
@@ -98,7 +90,9 @@ class PassiveBluetoothDataUpdateCoordinator(
|
||||
self.async_update_listeners()
|
||||
|
||||
|
||||
class PassiveBluetoothCoordinatorEntity( # pylint: disable=hass-enforce-class-module
|
||||
class PassiveBluetoothCoordinatorEntity[
|
||||
_PassiveBluetoothDataUpdateCoordinatorT: PassiveBluetoothDataUpdateCoordinator = PassiveBluetoothDataUpdateCoordinator
|
||||
]( # pylint: disable=hass-enforce-class-module
|
||||
BaseCoordinatorEntity[_PassiveBluetoothDataUpdateCoordinatorT]
|
||||
):
|
||||
"""A class for entities using DataUpdateCoordinator."""
|
||||
|
||||
@@ -38,6 +38,12 @@ class BluetoothStorage:
|
||||
"""Get all scanners."""
|
||||
return list(self._data.keys())
|
||||
|
||||
@callback
|
||||
def async_remove_advertisement_history(self, scanner: str) -> None:
|
||||
"""Remove discovered devices by scanner."""
|
||||
if self._data.pop(scanner, None):
|
||||
self._store.async_delay_save(self._async_get_data, SCANNER_SAVE_DELAY)
|
||||
|
||||
@callback
|
||||
def async_get_advertisement_history(
|
||||
self, scanner: str
|
||||
|
||||
@@ -33,6 +33,9 @@
|
||||
"passive": "Passive scanning"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"remote_adapters_not_supported": "Bluetooth configuration for remote adapters is not supported."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
150
homeassistant/components/bluetooth/websocket_api.py
Normal file
150
homeassistant/components/bluetooth/websocket_api.py
Normal file
@@ -0,0 +1,150 @@
|
||||
"""The bluetooth integration websocket apis."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Iterable
|
||||
from functools import lru_cache, partial
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
from habluetooth import BluetoothScanningMode
|
||||
from home_assistant_bluetooth import BluetoothServiceInfoBleak
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.json import json_bytes
|
||||
|
||||
from .api import _get_manager, async_register_callback
|
||||
from .match import BluetoothCallbackMatcher
|
||||
from .models import BluetoothChange
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the bluetooth websocket API."""
|
||||
websocket_api.async_register_command(hass, ws_subscribe_advertisements)
|
||||
|
||||
|
||||
@lru_cache(maxsize=1024)
|
||||
def serialize_service_info(
|
||||
service_info: BluetoothServiceInfoBleak, time_diff: float
|
||||
) -> dict[str, Any]:
|
||||
"""Serialize a BluetoothServiceInfoBleak object."""
|
||||
return {
|
||||
"name": service_info.name,
|
||||
"address": service_info.address,
|
||||
"rssi": service_info.rssi,
|
||||
"manufacturer_data": {
|
||||
str(manufacturer_id): manufacturer_data.hex()
|
||||
for manufacturer_id, manufacturer_data in service_info.manufacturer_data.items()
|
||||
},
|
||||
"service_data": {
|
||||
service_uuid: service_data.hex()
|
||||
for service_uuid, service_data in service_info.service_data.items()
|
||||
},
|
||||
"service_uuids": service_info.service_uuids,
|
||||
"source": service_info.source,
|
||||
"connectable": service_info.connectable,
|
||||
"time": service_info.time + time_diff,
|
||||
"tx_power": service_info.tx_power,
|
||||
}
|
||||
|
||||
|
||||
class _AdvertisementSubscription:
|
||||
"""Class to hold and manage the subscription data."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
ws_msg_id: int,
|
||||
match_dict: BluetoothCallbackMatcher,
|
||||
) -> None:
|
||||
"""Initialize the subscription data."""
|
||||
self.hass = hass
|
||||
self.match_dict = match_dict
|
||||
self.pending_service_infos: list[BluetoothServiceInfoBleak] = []
|
||||
self.ws_msg_id = ws_msg_id
|
||||
self.connection = connection
|
||||
self.pending = True
|
||||
# Keep time_diff precise to 2 decimal places
|
||||
# so the cached serialization can be reused,
|
||||
# however we still want to calculate it each
|
||||
# subscription in case the system clock is wrong
|
||||
# and gets corrected.
|
||||
self.time_diff = round(time.time() - time.monotonic(), 2)
|
||||
|
||||
@callback
|
||||
def _async_unsubscribe(
|
||||
self, cancel_callbacks: tuple[Callable[[], None], ...]
|
||||
) -> None:
|
||||
"""Unsubscribe the callback."""
|
||||
for cancel_callback in cancel_callbacks:
|
||||
cancel_callback()
|
||||
|
||||
@callback
|
||||
def async_start(self) -> None:
|
||||
"""Start the subscription."""
|
||||
connection = self.connection
|
||||
cancel_adv_callback = async_register_callback(
|
||||
self.hass,
|
||||
self._async_on_advertisement,
|
||||
self.match_dict,
|
||||
BluetoothScanningMode.PASSIVE,
|
||||
)
|
||||
cancel_disappeared_callback = _get_manager(
|
||||
self.hass
|
||||
).async_register_disappeared_callback(self._async_removed)
|
||||
connection.subscriptions[self.ws_msg_id] = partial(
|
||||
self._async_unsubscribe, (cancel_adv_callback, cancel_disappeared_callback)
|
||||
)
|
||||
self.pending = False
|
||||
self.connection.send_message(
|
||||
json_bytes(websocket_api.result_message(self.ws_msg_id))
|
||||
)
|
||||
self._async_added(self.pending_service_infos)
|
||||
self.pending_service_infos.clear()
|
||||
|
||||
def _async_event_message(self, message: dict[str, Any]) -> None:
|
||||
self.connection.send_message(
|
||||
json_bytes(websocket_api.event_message(self.ws_msg_id, message))
|
||||
)
|
||||
|
||||
def _async_added(self, service_infos: Iterable[BluetoothServiceInfoBleak]) -> None:
|
||||
self._async_event_message(
|
||||
{
|
||||
"add": [
|
||||
serialize_service_info(service_info, self.time_diff)
|
||||
for service_info in service_infos
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
def _async_removed(self, address: str) -> None:
|
||||
self._async_event_message({"remove": [{"address": address}]})
|
||||
|
||||
@callback
|
||||
def _async_on_advertisement(
|
||||
self, service_info: BluetoothServiceInfoBleak, change: BluetoothChange
|
||||
) -> None:
|
||||
"""Handle the callback."""
|
||||
if self.pending:
|
||||
self.pending_service_infos.append(service_info)
|
||||
return
|
||||
self._async_added((service_info,))
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "bluetooth/subscribe_advertisements",
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def ws_subscribe_advertisements(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle subscribe advertisements websocket command."""
|
||||
_AdvertisementSubscription(
|
||||
hass, connection, msg["id"], BluetoothCallbackMatcher(connectable=False)
|
||||
).async_start()
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user