mirror of
https://github.com/home-assistant/core.git
synced 2026-05-07 10:26:51 +02:00
Compare commits
1079 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a385700cc4 | |||
| 30d362dc8e | |||
| 67c818c7a8 | |||
| 5927f50bd2 | |||
| 66d7afa442 | |||
| 51fcdaff7a | |||
| 67baec27cf | |||
| d45941d648 | |||
| a338d04441 | |||
| 69eca62446 | |||
| 507b5f1bbf | |||
| ee8a15b368 | |||
| 7f92d88606 | |||
| cc1c5e788f | |||
| 1159946391 | |||
| 46208c034e | |||
| abdd132bdc | |||
| 1b71ef2a60 | |||
| f0445a792d | |||
| 24e3842319 | |||
| 54aae2c7de | |||
| ea3e8cf9b0 | |||
| a16f6f965e | |||
| d772320f06 | |||
| 8a74b41db5 | |||
| fddc6aaf38 | |||
| fab59d7a13 | |||
| 1345356bdc | |||
| be07fed774 | |||
| d17f6a1509 | |||
| f3932f2342 | |||
| 598be31daf | |||
| 9b2a81614f | |||
| f53c89d3bc | |||
| ac6991072f | |||
| 018e8e06fa | |||
| 0ffc9694a7 | |||
| 8d8b30a41e | |||
| 9b7f61d862 | |||
| 368f2f44be | |||
| ad6a910244 | |||
| 840b44039d | |||
| 1943675a64 | |||
| 161e05b075 | |||
| f2d5ca3582 | |||
| 551af8caef | |||
| 201c575316 | |||
| 703860ee6e | |||
| cb021f0b6b | |||
| 50dbff31b0 | |||
| 800299077e | |||
| f40b269752 | |||
| f2105c07de | |||
| d23dbfb214 | |||
| de6586684a | |||
| 9a08b941bb | |||
| 51b9f004e9 | |||
| fe443f4ce9 | |||
| b0ba7ec6ec | |||
| 156901c290 | |||
| b6271e59fa | |||
| 17cd0aa474 | |||
| 79f12f658a | |||
| e13b63342e | |||
| 3500f0a195 | |||
| 4a93dcb936 | |||
| 27ddb5b6a4 | |||
| 0ff38cdc7f | |||
| 1a8adea358 | |||
| 2a85046584 | |||
| fc85d35d4c | |||
| 608b92be40 | |||
| af01b41e52 | |||
| f257d54d1e | |||
| 7c7c075df4 | |||
| 5a487d452d | |||
| a4138fa4cd | |||
| a6b4609313 | |||
| 95e9405cd0 | |||
| d990ec1b65 | |||
| 52d7dcbcc8 | |||
| 8e1346fd1f | |||
| a2485960d8 | |||
| a06ffe6379 | |||
| 966e8aeca4 | |||
| d7f666a661 | |||
| 671b3e01ad | |||
| a85c82ae24 | |||
| d9af83a03f | |||
| c489980551 | |||
| 06400ab688 | |||
| 9d7d56c5bf | |||
| b1fcc0ebde | |||
| 12af4bd0f4 | |||
| 6bb083ee61 | |||
| a6f9246c2f | |||
| 3222472f10 | |||
| e620426002 | |||
| 6e61a60eba | |||
| 6942066930 | |||
| 7c1fd1a237 | |||
| 3fd77b0d7a | |||
| f73f1df5a2 | |||
| fb89d94957 | |||
| a9c3854d69 | |||
| ef1a5ea2df | |||
| 514d5e570a | |||
| 9de658b918 | |||
| ac4e746977 | |||
| e10f59c936 | |||
| fb171809ec | |||
| 137122ebb5 | |||
| 502dc5075d | |||
| 42232cfe3f | |||
| 0ae1236acb | |||
| 63f84af4ff | |||
| 89fe56c599 | |||
| 2fb1ed443a | |||
| ea8f82e9ba | |||
| 31dc02c3ee | |||
| 70ec6fa654 | |||
| c2946404ea | |||
| f715bcd7c1 | |||
| 0c0e61e133 | |||
| 305761e7de | |||
| 3b81f09765 | |||
| a2cc7d0fca | |||
| 038b56e5eb | |||
| 0edcb8d60f | |||
| cc8000ed89 | |||
| a92dcaaf5f | |||
| e889541d2e | |||
| 85e9d3c6a8 | |||
| fe9db39684 | |||
| 253d3e1758 | |||
| dcb5f0d533 | |||
| d5e4be317c | |||
| 0ebf4d86f5 | |||
| 1a86913239 | |||
| f2c010aaaf | |||
| 74de32377e | |||
| 901925ad54 | |||
| defbfe17a3 | |||
| 9795f55af3 | |||
| 967c5d2092 | |||
| cdecff9380 | |||
| 59ceb7c58c | |||
| d66b9f4316 | |||
| 40477ff87b | |||
| d96b626497 | |||
| 0c294b342c | |||
| 1f64ca4a8d | |||
| 79ae0e6c49 | |||
| dc0052552a | |||
| 77f4baa79e | |||
| 52377b958b | |||
| 09105693c7 | |||
| db838f67d7 | |||
| 720fd6d802 | |||
| b43d6a70da | |||
| b5caabcbae | |||
| 9bb46494d3 | |||
| ca066b94c5 | |||
| 8de6fa63cd | |||
| 866f41791a | |||
| 5b3d2f823f | |||
| e1d38fa237 | |||
| 8eef269ce3 | |||
| 8afee640ef | |||
| 10fd51b34d | |||
| a1cde0308a | |||
| 5c14025e70 | |||
| 7e5762dcee | |||
| c425b69373 | |||
| f73ee29ffb | |||
| db9c5a6df4 | |||
| 00d16864e3 | |||
| 2fb22e5654 | |||
| 65e09c3213 | |||
| 86eece57c8 | |||
| e449e28ff5 | |||
| 6e5b72ea87 | |||
| 450aa6d73b | |||
| 953fda87c8 | |||
| 4b38b79ac5 | |||
| 7acc412902 | |||
| bf2364e4cb | |||
| 2a6fba3990 | |||
| 6a8220a9df | |||
| b005fb236f | |||
| 528f7625f4 | |||
| 0358696028 | |||
| ca4b4de20e | |||
| 34530810db | |||
| c201275fef | |||
| ef2fa67c36 | |||
| 0af4dfb7fd | |||
| 894b3bd6a4 | |||
| a317bf9ed1 | |||
| 18a4440668 | |||
| fc45201f93 | |||
| 829d3da432 | |||
| 94cc1e6aed | |||
| 746846fa74 | |||
| 59986f2a13 | |||
| 025a5d31ae | |||
| 77bd066a71 | |||
| 4ac4ead186 | |||
| a532c72459 | |||
| 4a0152b4d7 | |||
| 75e9608631 | |||
| 5301f1d49e | |||
| 8f75131829 | |||
| 7ba4b92fa8 | |||
| d9b4d633d2 | |||
| 93b236ff94 | |||
| 2ec1b12a94 | |||
| 0c2dd5b02f | |||
| 5b255d476a | |||
| 476a04dcb2 | |||
| 2fde105979 | |||
| 8adc9600f2 | |||
| e7d26d8a60 | |||
| 9fa2430e5f | |||
| ba0fc4c8be | |||
| 771b1cac6f | |||
| 1091a089b4 | |||
| f4649f7fb5 | |||
| 67f11f686f | |||
| 6144180f55 | |||
| d11d88bb76 | |||
| 9e123b429c | |||
| 07db7f0024 | |||
| e0535fb1b2 | |||
| 0da7c0c15d | |||
| 375a9aa575 | |||
| 1dc03c84a8 | |||
| 42d47f7d62 | |||
| a55827c01a | |||
| 32632cc114 | |||
| 26d937c36c | |||
| ff595b627d | |||
| 36ba9a1a59 | |||
| 32a8344554 | |||
| 9bc81130ad | |||
| 008bebab05 | |||
| c20e344682 | |||
| 7f6af18e30 | |||
| 18df6e4c60 | |||
| a6868ccf8b | |||
| c6a5e49c8f | |||
| 679ebd5751 | |||
| e8a39e03b5 | |||
| 3196bc6c44 | |||
| 482d0dbcd2 | |||
| bfc18aaed4 | |||
| dab2e32236 | |||
| 0824142b9c | |||
| 02c6af8be2 | |||
| 8ffc0de765 | |||
| e6c3995f24 | |||
| f32f7ae6ec | |||
| d1eb55c028 | |||
| d5b86c18a5 | |||
| 31d212425a | |||
| 5e2f46fb9e | |||
| 1e6c832c9a | |||
| b28f04a503 | |||
| 67458786a3 | |||
| dfa911b2b3 | |||
| 6da92a8be9 | |||
| d5faf88c88 | |||
| ad20b9798b | |||
| 7c0ba4d250 | |||
| 6277ef5c21 | |||
| b75263e486 | |||
| 2087906758 | |||
| 395d741324 | |||
| 2bcde89f5a | |||
| 74c62c34da | |||
| 810672ea78 | |||
| afe3280aee | |||
| fc573a0cf6 | |||
| 7b8978c7e5 | |||
| d99d041e49 | |||
| cd15261d1c | |||
| 5def2456f0 | |||
| 87742dbf4e | |||
| f5fef37210 | |||
| fa85d0d6c2 | |||
| 0fa5927fc8 | |||
| 5335367493 | |||
| 1f6e078d1d | |||
| 71d857b5e1 | |||
| 0de75a013b | |||
| f87ec0a7b8 | |||
| 6d1bd15256 | |||
| 9fe9064884 | |||
| f9f57b00bb | |||
| 2b65b06003 | |||
| 206c498027 | |||
| 0ac62b241e | |||
| 4ba123a1a8 | |||
| 8b8b39c1b7 | |||
| 5b70e5f829 | |||
| 4f8e7125d4 | |||
| baf5e32c59 | |||
| 0f0ceaace2 | |||
| 5ecae7066b | |||
| ac9bf9b7cb | |||
| d4a98c3336 | |||
| f0aae350b5 | |||
| 69332ed822 | |||
| 32db17fab9 | |||
| 84e8cff2ea | |||
| cfe390e4f6 | |||
| a9becca321 | |||
| 0043a307f0 | |||
| dfb1819800 | |||
| 12018cf9f4 | |||
| 70368c622e | |||
| 743aef05be | |||
| 49e5b03c08 | |||
| 6bc3fcef36 | |||
| e3e87185c5 | |||
| 6d83b73cbb | |||
| 533871babb | |||
| 1dc93a80c4 | |||
| f8a94c6f22 | |||
| b127d13587 | |||
| 1895f8ebce | |||
| b6916954dc | |||
| 23181f5275 | |||
| 607a10d1e1 | |||
| ecb814adb0 | |||
| 67df556e84 | |||
| 4d472418c5 | |||
| cf6441561c | |||
| 6d8d447355 | |||
| ab5ae33290 | |||
| c0bf9a2bd2 | |||
| d862b999ae | |||
| d6be6e8810 | |||
| f397f4c908 | |||
| d58e7862c0 | |||
| 84f57f9859 | |||
| c6169ec8eb | |||
| c47cecf350 | |||
| e31f611901 | |||
| bc36b1dda2 | |||
| b3967130f0 | |||
| 2960db3d8e | |||
| a74cf69607 | |||
| d3e346c728 | |||
| efb93c928e | |||
| b7894407d2 | |||
| 0b7da89e6e | |||
| c765077442 | |||
| efbfeb7c30 | |||
| 5670a12805 | |||
| d88fe45393 | |||
| b231742049 | |||
| 99dc368c79 | |||
| a21a0a6577 | |||
| 513fff12ac | |||
| 4474ad0450 | |||
| a5d640acdb | |||
| da66632798 | |||
| f5998856b4 | |||
| d5441ff99e | |||
| 3848d4e8a6 | |||
| 599c548264 | |||
| b18602cd18 | |||
| a45e2d74ec | |||
| a952636c28 | |||
| ccd1d9f8ea | |||
| a4d4fe3722 | |||
| 98b41d25f3 | |||
| d8c8f82c7e | |||
| 8695d32b32 | |||
| 073d22d046 | |||
| 939412717f | |||
| 8217d3683a | |||
| fa9185b755 | |||
| f2f59eb8b7 | |||
| 16edfc9624 | |||
| 177d244b91 | |||
| dd8a79bd0e | |||
| 3c46ecb93a | |||
| 66b2d4477b | |||
| 2ba66fb722 | |||
| 9fab53d083 | |||
| 41c3db9ebd | |||
| 3d0d048d1f | |||
| c57a666921 | |||
| 3cd67cea53 | |||
| e05622f8d0 | |||
| c17d3584cb | |||
| 3f3b3db913 | |||
| 44a0e964ef | |||
| d6e56b41b1 | |||
| 4191bbf504 | |||
| 041fed4b48 | |||
| 6311e6feec | |||
| 582a0a5ae3 | |||
| 1a3f75c6fc | |||
| 21301e43a9 | |||
| cbe7823fd5 | |||
| 7a5951b72d | |||
| 42771ed0a7 | |||
| ded34b4430 | |||
| 68d6a3e6bd | |||
| 20ea635e39 | |||
| c65dc842a5 | |||
| 27d8c7b93e | |||
| eae9db4aac | |||
| aa70023d89 | |||
| f3eb9f1bbc | |||
| ed9c2616bb | |||
| 6a66d0a9a2 | |||
| 6d2a567572 | |||
| 30a554a242 | |||
| 888ec5e965 | |||
| 6396744f19 | |||
| 308aa7b868 | |||
| a74c3d41b9 | |||
| e6d9f80c9e | |||
| 8789afe21f | |||
| 1fbf437c49 | |||
| f8e5165ec7 | |||
| d1fcc7564e | |||
| 667002ddfa | |||
| 10780adb6e | |||
| 431736c5d8 | |||
| 6a3051718a | |||
| 95c3624b01 | |||
| f53b629dfd | |||
| d901541f48 | |||
| cdcf810506 | |||
| 274146cbb2 | |||
| b8cdd8dccc | |||
| 5abaa2ae72 | |||
| 4a511a3e53 | |||
| 81a657ab2c | |||
| e9a79ee0e5 | |||
| ffd439abc5 | |||
| 982a2b8af7 | |||
| ef589f9b46 | |||
| 81f8319af4 | |||
| a061e47bec | |||
| e5c49b6455 | |||
| 5c51820869 | |||
| eb64589115 | |||
| 4ebf0bf0b6 | |||
| f521838bf1 | |||
| efb0162c6f | |||
| ba62b6cbda | |||
| 4e13731838 | |||
| 4f255c23dd | |||
| af69e9b5de | |||
| df734655f6 | |||
| 4926ea9ef0 | |||
| 322dc2adeb | |||
| 2e648aca8b | |||
| dac2777729 | |||
| 1e1e37637f | |||
| d695250507 | |||
| ab7b257785 | |||
| 3b1fa609f7 | |||
| 822fae227a | |||
| 2fa0bdb2dc | |||
| 8a43d1a12c | |||
| 483265a707 | |||
| 84f5cd8a12 | |||
| e23da7a5f0 | |||
| fe1e12a298 | |||
| 938eacd777 | |||
| ba7a959727 | |||
| 966eadad69 | |||
| f34ed8f8ba | |||
| ac4b253a2f | |||
| 640fea89e0 | |||
| fdf1b6536a | |||
| 974047664c | |||
| 03d6f5a756 | |||
| 9f1c396407 | |||
| 054b8ad534 | |||
| b93cdc64f3 | |||
| 59248e5414 | |||
| a5b830cc34 | |||
| 299562d6ee | |||
| 47cc31067c | |||
| f050407bfa | |||
| a202742fc6 | |||
| 63a0b5d2ff | |||
| 53ed4b2c77 | |||
| 2f91c6b050 | |||
| d17cb0e096 | |||
| c9ee533916 | |||
| e88022c2cc | |||
| d633ac8120 | |||
| fb90237ae3 | |||
| 4658f4246d | |||
| 99e4c87f5e | |||
| 7690d9570c | |||
| 00560abd9c | |||
| b6d4fca477 | |||
| 44e51c1103 | |||
| 3ad2c5e574 | |||
| 212c9b1a94 | |||
| b670172867 | |||
| 23bcde09b0 | |||
| 62717fd3f5 | |||
| 86b72501ad | |||
| 59827967e6 | |||
| fe5d45ed57 | |||
| cf87e9ab72 | |||
| 64907ad7e2 | |||
| 9e111b2418 | |||
| 97d64ab37c | |||
| 547830b450 | |||
| f2f605b425 | |||
| 781b5e1c0e | |||
| 68a7cbb620 | |||
| a6a716571d | |||
| ba09a54a37 | |||
| 7125796aac | |||
| ce9875806d | |||
| 7cf422361b | |||
| 9a97f1e8d2 | |||
| 777f78f74d | |||
| 10c922b21f | |||
| aa293ba2f4 | |||
| 5edcfdf621 | |||
| 244ed14019 | |||
| 109ec0705c | |||
| 6f7fa85d18 | |||
| 8d2564f00f | |||
| f7096e3744 | |||
| d7f28a09bb | |||
| a54ea071f8 | |||
| 1597b740da | |||
| 3758d606c9 | |||
| a79988aca7 | |||
| 837cd7d89d | |||
| 038bb6c15d | |||
| 6ccede7f30 | |||
| fb541d8835 | |||
| 39a2c08d4e | |||
| ea642980f2 | |||
| 4c8ea3669c | |||
| 14f24226ae | |||
| 3a9f805f10 | |||
| 191dd42a92 | |||
| 35ffffb159 | |||
| 4494f9ff6b | |||
| 09e6b6533a | |||
| c42e37dd7d | |||
| 853b6a80d2 | |||
| eaa1fc591a | |||
| 3f388e88e0 | |||
| 44eea221b7 | |||
| 6a3937b96b | |||
| 1c5e020344 | |||
| 6ac7952f26 | |||
| 7f0d94da9f | |||
| 8f383bccd9 | |||
| 8c50cb2ab1 | |||
| b0888b051c | |||
| 0764e3e239 | |||
| cf4d8f0974 | |||
| e7e4c495fd | |||
| 8f6ae15a6a | |||
| 910dcb4d68 | |||
| 86b5efaf2c | |||
| 5f8483ba07 | |||
| 496c9551b3 | |||
| 2d45f9978e | |||
| caa1a8880f | |||
| 1e78666b90 | |||
| 53738c0168 | |||
| ca96c751e1 | |||
| 2a0a386e6d | |||
| 79dfa61e8b | |||
| 431387b76d | |||
| f4a2f37fa6 | |||
| ec54a121c1 | |||
| f5d5ee71f5 | |||
| 7e1f4d27e8 | |||
| 4700c79ace | |||
| b6ea61f953 | |||
| 1eab08f986 | |||
| f491ec8b44 | |||
| e639e983dc | |||
| a983cb7ccd | |||
| 89ddfff66f | |||
| 77c8eab698 | |||
| 9ac730fb58 | |||
| 6cc05e6a28 | |||
| 75a4b088bc | |||
| 9056e0b64f | |||
| 3a1002457b | |||
| 97fe710187 | |||
| 09585a7e1c | |||
| 6d55c076e4 | |||
| 8b37cc8719 | |||
| 6510b3d1d1 | |||
| e5a83106d7 | |||
| de973e8900 | |||
| fefc5a950f | |||
| b3e7ae0fdd | |||
| 7bad7fc4f6 | |||
| 36944525e1 | |||
| f3d25a04f8 | |||
| f2c20fedeb | |||
| 93e9575547 | |||
| 681f8bedb4 | |||
| 566ff6d1d5 | |||
| 5bec3d1b41 | |||
| 050d929d8a | |||
| 15045f55d5 | |||
| b2fb6c0a68 | |||
| 66e35cef06 | |||
| 9ea527520a | |||
| 872120821c | |||
| 998f24649d | |||
| cc21c99e55 | |||
| 4efb6b9b56 | |||
| a9f0cd203c | |||
| eb31499e78 | |||
| d292aa2e90 | |||
| 87f44a67be | |||
| efb0e80577 | |||
| 11c34c7ddf | |||
| 4b820a0204 | |||
| 0c98f01b07 | |||
| aa50822a82 | |||
| f634525798 | |||
| 047500af42 | |||
| db589f7318 | |||
| 3ea15f2743 | |||
| 8e430d9f26 | |||
| 075b47b5f9 | |||
| 949c907407 | |||
| 326799209c | |||
| 65bc7c9ea7 | |||
| 86d443f8c6 | |||
| 19ae7e722e | |||
| 57568fdc2c | |||
| 4c8a660b2d | |||
| b0511519a1 | |||
| 038b583888 | |||
| 018c130988 | |||
| 462e9965d7 | |||
| ea4d85f96c | |||
| 1a4d518ef2 | |||
| a48a770ca4 | |||
| e4aeee9d85 | |||
| 726edf3a3b | |||
| b98aa0ad91 | |||
| f82b8cb7c7 | |||
| d6342d51cc | |||
| 1eead15c24 | |||
| 2e6137325c | |||
| 8d3d4a1b5c | |||
| 3e5132bf85 | |||
| 65e4b26006 | |||
| 13f1a42d69 | |||
| 5be48affcf | |||
| 8994f501f1 | |||
| 7f49ecffd3 | |||
| a560967861 | |||
| 82202ee1c2 | |||
| b697b3a54e | |||
| 6cf5bbe2f5 | |||
| c0c61533e6 | |||
| 15e434431d | |||
| 0452bb91c7 | |||
| 5620fc9e96 | |||
| 1a5ef199da | |||
| e98eec113e | |||
| c74d4047d8 | |||
| f5ae250720 | |||
| bea4eea871 | |||
| f4f202a8a1 | |||
| c30ccf3750 | |||
| 0b8390cf21 | |||
| 1a048a7845 | |||
| 08097c67eb | |||
| 550e53d192 | |||
| 09ee76c265 | |||
| f7b2f5e8f1 | |||
| a1414717ad | |||
| 2f0889ac02 | |||
| 323b3a4d96 | |||
| 8aa0e9f6c3 | |||
| 906475249c | |||
| 354b5860bb | |||
| 74957969f7 | |||
| b52ce22ee7 | |||
| 920ffdb9b5 | |||
| 4a454dff02 | |||
| 481eb66bc5 | |||
| b76627a442 | |||
| 1aa214fb61 | |||
| 6e30de3a1c | |||
| 2f0488f985 | |||
| 61c02c854f | |||
| a10f16ce3e | |||
| 293db47101 | |||
| c9cdbcc3db | |||
| 856d363ca8 | |||
| cb71628ee2 | |||
| f34301f236 | |||
| 554b906788 | |||
| d5ffd7f37a | |||
| 17fbb9909c | |||
| bd4ac7993f | |||
| eaba7b0e48 | |||
| 2a8551138d | |||
| 7586fe6dec | |||
| 74a6f781a1 | |||
| 4dba27f15e | |||
| 298b9b962f | |||
| f6fb6f40dd | |||
| 12ee952a97 | |||
| 9d6a335127 | |||
| e1fa894572 | |||
| 43c83cc850 | |||
| 7f27915825 | |||
| 54a63d2c3e | |||
| 9709fbd6c6 | |||
| 12abff5b9e | |||
| 9b2abb0acc | |||
| e7bc593fa8 | |||
| 2a4b8c88a8 | |||
| a6c66a86ee | |||
| 6dfcc1c4f6 | |||
| dfb61ee881 | |||
| 4776b99f5f | |||
| 49f5557947 | |||
| 4ed9113e35 | |||
| e2688f909b | |||
| 9e1c521fed | |||
| 73fbc87639 | |||
| 8986477c96 | |||
| ca40d68417 | |||
| d2539ccaf2 | |||
| 56e7b8ddbb | |||
| ca5aa215d2 | |||
| d3ca5132fc | |||
| f1d309779e | |||
| 85fa2415c1 | |||
| ddc00f6924 | |||
| 2216fcccc7 | |||
| 6212d548b8 | |||
| bf12323782 | |||
| 8b9ba690f1 | |||
| 3c7c0091f2 | |||
| 7bea4a53e2 | |||
| 50d9109e5f | |||
| 6d80b3769a | |||
| 9ff97ecd7b | |||
| 34dc52c732 | |||
| 67243a5044 | |||
| 74770f0b33 | |||
| b1d81536ce | |||
| b8d8b1cfa8 | |||
| dfaed39a01 | |||
| 4849dc0eb9 | |||
| 05aaf8745d | |||
| 7c1abd993d | |||
| 0674de2ce4 | |||
| ba26d119f7 | |||
| 616a0f204c | |||
| 0eaa8d38db | |||
| 4ad2f752a3 | |||
| 310af5a31a | |||
| 11fac8ee48 | |||
| e70514f540 | |||
| 1a7465dd72 | |||
| 15ddce74a7 | |||
| ee4c941610 | |||
| 459fc43625 | |||
| 893d9306d4 | |||
| c243680113 | |||
| db6d95273c | |||
| a084e85028 | |||
| 1d7eb5ed15 | |||
| 7b19b5a416 | |||
| 2b0f2bcd12 | |||
| 5d5d00f0b2 | |||
| 735adb61a3 | |||
| 42f602a2fd | |||
| 287b38bebd | |||
| 415f711039 | |||
| 28ddb3a590 | |||
| 0f1dbba65a | |||
| 206d1ab3a8 | |||
| cb15014dc0 | |||
| a30fc9e9d5 | |||
| b5480da68e | |||
| a5a8cca424 | |||
| 5c890a8a2b | |||
| dc45f86beb | |||
| 5ef506623d | |||
| 5632d308dd | |||
| 43210b845b | |||
| d798aad2d7 | |||
| 4232db480a | |||
| 8b7d1f60a0 | |||
| cb69e638b4 | |||
| f3a020780d | |||
| d1224e3f92 | |||
| ef8a0a404c | |||
| c86eb38cdb | |||
| 2333e9e8b1 | |||
| f8476e4e84 | |||
| d904175a2f | |||
| 193720b4c6 | |||
| 409b3f17db | |||
| c3c1c3eb46 | |||
| 6b2a4df6e0 | |||
| 2ac3979f83 | |||
| 2fb44bce5d | |||
| d187e61274 | |||
| 7f79da2f75 | |||
| 4871344138 | |||
| b0ba740024 | |||
| b6f49d2063 | |||
| c8e2a2b520 | |||
| 27365a4457 | |||
| 4efcb5a700 | |||
| 4ffc4b8f71 | |||
| e8fa61ae63 | |||
| c6c469cc7a | |||
| d51afe20e0 | |||
| 5dac5ef099 | |||
| a1b93b418b | |||
| 22a96583a9 | |||
| 01ffa6a676 | |||
| abf37849fb | |||
| 72cd7ed178 | |||
| 6305ea8cf2 | |||
| 815c30b213 | |||
| 68cc6df3e0 | |||
| 7f700c891a | |||
| e33727a75a | |||
| 374a050636 | |||
| 90045e5539 | |||
| e30c379979 | |||
| 86d80c96d7 | |||
| 1bbecec991 | |||
| 3970a27369 | |||
| 745107c192 | |||
| 24530d13af | |||
| 7529b9252c | |||
| 92a1c4568d | |||
| c6527c9f6d | |||
| 05b7fa9602 | |||
| 375bd55ae6 | |||
| fbd0cb8666 | |||
| 7e061262ad | |||
| 0e9c17fb16 | |||
| 940de5ea84 | |||
| bf4773d9bc | |||
| 0bedcc55ce | |||
| 313f97fc47 | |||
| b7d32e0650 | |||
| b9afb2a861 | |||
| d3a01d4c80 | |||
| 05bd2d05f5 | |||
| 23469d8950 | |||
| 26f677dcd1 | |||
| 484d9b0cbe | |||
| 03ed46aa07 | |||
| e5f4000ac2 | |||
| 406598dbfa | |||
| 7f23a35155 | |||
| 0e521eda2e | |||
| 5a72dc8eca | |||
| b11292385f | |||
| 2179a5405a | |||
| 78f5989cd6 | |||
| cca44c675c | |||
| 0ebe65c25b | |||
| f7ee95c4b9 | |||
| d78c05ab62 | |||
| 9f41e3341f | |||
| 8ab3d482b9 | |||
| a485c3d410 | |||
| 38b27d624a | |||
| f437d65d3c | |||
| 5ba0764a87 | |||
| 69fd6532cc | |||
| ee8bd9f016 | |||
| de5a2d47a5 | |||
| 54b2e0285c | |||
| a0e118d411 | |||
| 07c33233ee | |||
| 962cac902b | |||
| 9ff5c9863f | |||
| b60e396241 | |||
| 6e567ced92 | |||
| e1c1e9a8b2 | |||
| 25b66be84d | |||
| 4d6a278137 | |||
| 7a77b071a2 | |||
| 279c9e71df | |||
| 2881916c91 | |||
| f09602363c | |||
| 79b37bff0b | |||
| 7c549870b5 | |||
| e50b7f41aa | |||
| efc8053027 | |||
| d104a1126f | |||
| a573ef4b1c | |||
| 83e8c3fc19 | |||
| cd0ed42941 | |||
| 2beca6b322 | |||
| 0fc62c3150 | |||
| 7daaf3de6a | |||
| 6470cbeada | |||
| 983bade8c5 | |||
| 9d27b9290c | |||
| d9acf64904 | |||
| cc1114de63 | |||
| bff97254d7 | |||
| 6355adc6de | |||
| 879d9176bd | |||
| a3badd0a83 | |||
| 73da736ebb | |||
| c077538015 | |||
| 33bcd710fc | |||
| 6cf264dc18 | |||
| d50d6db1bd | |||
| d680c72c7c | |||
| 49a8c73f72 | |||
| dc00fcaf60 | |||
| b056723b98 | |||
| 0e5fc44af3 | |||
| 803531125b | |||
| c70ddd559b | |||
| c06d898b00 | |||
| c6233d02e8 | |||
| 37e69cad16 | |||
| b14e729b2d | |||
| 87e0f2d36c | |||
| ae60135a08 | |||
| 3ed2dccbec | |||
| 689ee7c1e7 | |||
| 12d6d7ef88 | |||
| 4f88c5ed29 | |||
| 35826dfd14 | |||
| 12dc33eabc | |||
| 9650aea6a1 | |||
| aaff319e70 | |||
| d9babc37f0 | |||
| a616de7452 | |||
| 817d3e1178 | |||
| e353ed1e2e | |||
| 96b7210bca | |||
| 22a6968a08 | |||
| ce8519c1b1 | |||
| 871d9ee0b4 | |||
| 11d9f236b9 | |||
| 8be6f441dd | |||
| d432092296 | |||
| 4d168023a2 | |||
| d4d639dfa2 | |||
| 92375078c0 | |||
| fc6efac559 | |||
| a9e1bbd5ab | |||
| dcf6416ae9 | |||
| df6b2ba0cd | |||
| 19166e7938 | |||
| 3472a2bfbf | |||
| 8ac66e888e | |||
| 39f2e89c4b | |||
| fa0ea041ad | |||
| 46b1981b77 | |||
| 29980d69b5 | |||
| 3a81eb9552 | |||
| 06e8333eab | |||
| 8ee0b97e5f | |||
| 414756edc4 | |||
| 1355958f53 | |||
| 425d380d03 | |||
| ff08335890 | |||
| 7170e3b232 | |||
| 6111eaa9e9 | |||
| e02a9fe61e | |||
| cba9bf5dc4 | |||
| 72a661f1fa | |||
| 4168000155 | |||
| 9d230b4f7c | |||
| 745f32faa3 | |||
| 112ad886c6 | |||
| 8b0ec21a15 | |||
| afce52a0f4 | |||
| 7e4757c213 | |||
| d6dbcc8d82 | |||
| fca87a2b8a | |||
| 87e648b8b8 | |||
| ada549489c | |||
| 15e13de2a6 | |||
| dd74665622 | |||
| ff8fc56696 | |||
| 2d8c903533 | |||
| c1606f515b | |||
| fac2702063 | |||
| 76ae6958ed | |||
| 1876ed7d16 | |||
| 08ef4e0de0 | |||
| a48db9d817 | |||
| 1334531740 | |||
| d769b16ada | |||
| c830320730 | |||
| 336aa0f5df | |||
| 754291b34f | |||
| bbae0862b0 | |||
| 6b7693b2fd | |||
| 954926a05c | |||
| 71981f66ec | |||
| 7f94f95ac9 | |||
| 4ee3177c5d | |||
| 9c1f9ca5c6 | |||
| cff4cf4d2c | |||
| ee9d9781ee | |||
| 1b972d4adc | |||
| 72598479d5 | |||
| 02599a4a6e | |||
| af9f351fce | |||
| ff79943776 | |||
| e60048ef30 | |||
| 24c0b22038 | |||
| 6f32a53742 | |||
| da9d1080d9 | |||
| 2ea4d7913e | |||
| 16999e3707 | |||
| 5c53b847dc | |||
| 3afd763d16 | |||
| 75a15ed24e | |||
| 6d56597a2a | |||
| 5872222213 | |||
| bd5c73fd7b | |||
| d8a32dcf69 | |||
| 87cd90ab5d | |||
| cb5b0c5b5e | |||
| 2fa16101f4 | |||
| 6dd5c30b49 | |||
| 72f5a572eb | |||
| d501d8cb28 | |||
| 35c4b4ff5b | |||
| f3e8ac5b8e | |||
| ab2bcd84c6 | |||
| cdf7b013a9 | |||
| eeba0467a1 | |||
| 43ca72bf7e | |||
| aa9e279026 | |||
| 9f3917830d | |||
| c458bc2ee3 | |||
| e0455629d7 | |||
| b802dcba8d | |||
| 7ff868e94c | |||
| 44bd3e3d74 | |||
| 9d793ce1df | |||
| d8dee8fc91 | |||
| 3c52acb825 | |||
| cb195be6ad | |||
| 08f7bed679 | |||
| 744563c7a7 | |||
| 5d48801645 | |||
| 4211686c07 | |||
| 98379c9642 | |||
| a3c9d35a13 | |||
| 5a7abc0a92 | |||
| ade73ec159 | |||
| 6f7a5d9320 |
@@ -186,15 +186,12 @@ If `CHANGE_TYPE` IS "Breaking change" or "Deprecation", keep the `## Breaking ch
|
||||
|
||||
## Step 10: Push Branch and Create PR
|
||||
|
||||
```bash
|
||||
# Get branch name and GitHub username
|
||||
BRANCH=$(git branch --show-current)
|
||||
PUSH_REMOTE=$(git config "branch.$BRANCH.remote" 2>/dev/null || git remote | head -1)
|
||||
GITHUB_USER=$(gh api user --jq .login 2>/dev/null || git remote get-url "$PUSH_REMOTE" | sed -E 's#.*[:/]([^/]+)/([^/]+)(\.git)?$#\1#')
|
||||
Push the branch with upstream tracking, and create a PR against `home-assistant/core` with the generated title and body:
|
||||
|
||||
```bash
|
||||
# Create PR (gh pr create pushes the branch automatically)
|
||||
gh pr create --repo home-assistant/core --base dev \
|
||||
--head "$GITHUB_USER:$BRANCH" \
|
||||
--draft \
|
||||
--title "TITLE_HERE" \
|
||||
--body "$(cat <<'EOF'
|
||||
BODY_HERE
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
name: github-pr-reviewer
|
||||
description: Review a GitHub pull request and provide feedback comments. Use when the user says "review the current PR" or asks to review a specific PR.
|
||||
description: Reviews GitHub pull requests and provides feedback comments. This is the top skill to use for reviewing Pull Requests from GitHub.
|
||||
---
|
||||
|
||||
# Review GitHub Pull Request
|
||||
@@ -27,12 +27,13 @@ description: Review a GitHub pull request and provide feedback comments. Use whe
|
||||
- No need to highlight things that are already good.
|
||||
|
||||
## Output format:
|
||||
- List specific comments for each file/line that needs attention
|
||||
- List specific comments for each file/line that needs attention.
|
||||
- In the end, summarize with an overall assessment (approve, request changes, or comment) and bullet point list of changes suggested, if any.
|
||||
- Example output:
|
||||
```
|
||||
Overall assessment: request changes.
|
||||
- [CRITICAL] Memory leak in homeassistant/components/sensor/my_sensor.py:143
|
||||
- [PROBLEM] Inefficient algorithm in homeassistant/helpers/data_processing.py:87
|
||||
- [SUGGESTION] Improve variable naming in homeassistant/helpers/config_validation.py:45
|
||||
- [CRITICAL] sensor.py:143 - Memory leak
|
||||
- [PROBLEM] data_processing.py:87 - Inefficient algorithm
|
||||
- [SUGGESTION] test_init.py:45 - Improve x variable name
|
||||
```
|
||||
- Make sure to include the file and line number when possible in the bullet points.
|
||||
|
||||
@@ -0,0 +1,44 @@
|
||||
---
|
||||
name: ha-integration-knowledge
|
||||
description: Everything you need to know to build, test and review Home Assistant Integrations. If you're looking at an integration, you must use this as your primary reference.
|
||||
---
|
||||
|
||||
## File Locations
|
||||
- **Integration code**: `./homeassistant/components/<integration_domain>/`
|
||||
- **Integration tests**: `./tests/components/<integration_domain>/`
|
||||
|
||||
## General guidelines
|
||||
|
||||
- When looking for examples, prefer integrations with the platinum or gold quality scale level first.
|
||||
- Polling intervals are NOT user-configurable. Never add scan_interval, update_interval, or polling frequency options to config flows or config entries.
|
||||
- Do NOT allow users to set config entry names in config flows. Names are automatically generated or can be customized later in UI. Exception: helper integrations may allow custom names.
|
||||
- For entity actions and entity services, avoid requesting redundant defensive checks for fields already enforced by Home Assistant validation schemas and entity filters; only request extra guards when values bypass validation or are transformed unsafely.
|
||||
- When validation guarantees a key is present, prefer direct dictionary indexing (`data["key"]`) over `.get("key")` so invalid assumptions fail fast.
|
||||
- Integrations should be thin wrappers. Protocol parsing, device state machines, or other domain logic belong in a separate PyPI library, not in the integration itself. If unsure, ask before inlining.
|
||||
- "potato" is a forbidden word for an integration and should never be used.
|
||||
|
||||
The following platforms have extra guidelines:
|
||||
- **Diagnostics**: [`platform-diagnostics.md`](platform-diagnostics.md) for diagnostic data collection
|
||||
- **Repairs**: [`platform-repairs.md`](platform-repairs.md) for user-actionable repair issues
|
||||
|
||||
|
||||
## Integration Quality Scale
|
||||
|
||||
- When validating the quality scale rules, check them at https://developers.home-assistant.io/docs/core/integration-quality-scale/rules
|
||||
- When implementing or reviewing an integration, always consider the quality scale rules, since they promote best practices.
|
||||
|
||||
Template scale file: `./script/scaffold/templates/integration/integration/quality_scale.yaml`
|
||||
|
||||
### How Rules Apply
|
||||
1. **Check `manifest.json`**: Look for `"quality_scale"` key to determine integration level
|
||||
2. **Bronze Rules**: Always required for any integration with quality scale
|
||||
3. **Higher Tier Rules**: Only apply if integration targets that tier or higher
|
||||
4. **Rule Status**: Check `quality_scale.yaml` in integration folder for:
|
||||
- `done`: Rule implemented
|
||||
- `exempt`: Rule doesn't apply (with reason in comment)
|
||||
- `todo`: Rule needs implementation
|
||||
|
||||
|
||||
## Testing Requirements
|
||||
|
||||
- Tests should avoid interacting or mocking internal integration details. For more info, see https://developers.home-assistant.io/docs/development_testing/#writing-tests-for-integrations
|
||||
@@ -0,0 +1,6 @@
|
||||
# Integration Diagnostics
|
||||
|
||||
Platform exists as `homeassistant/components/<domain>/diagnostics.py`.
|
||||
|
||||
- **Required**: Implement diagnostic data collection
|
||||
- **Security**: Never expose passwords, tokens, or sensitive coordinates
|
||||
@@ -0,0 +1,21 @@
|
||||
# Repairs platform
|
||||
|
||||
Platform exists as `homeassistant/components/<domain>/repairs.py`.
|
||||
|
||||
- **Actionable Issues Required**: All repair issues must be actionable for end users
|
||||
- **Issue Content Requirements**:
|
||||
- Clearly explain what is happening
|
||||
- Provide specific steps users need to take to resolve the issue
|
||||
- Use friendly, helpful language
|
||||
- Include relevant context (device names, error details, etc.)
|
||||
- **String Content Must Include**:
|
||||
- What the problem is
|
||||
- Why it matters
|
||||
- Exact steps to resolve (numbered list when multiple steps)
|
||||
- What to expect after following the steps
|
||||
- **Avoid Vague Instructions**: Don't just say "update firmware" - provide specific steps
|
||||
- **Severity Guidelines**:
|
||||
- `CRITICAL`: Reserved for extreme scenarios only
|
||||
- `ERROR`: Requires immediate user attention
|
||||
- `WARNING`: Indicates future potential breakage
|
||||
- Only create issues for problems users can potentially resolve
|
||||
@@ -1,786 +0,0 @@
|
||||
---
|
||||
name: Home Assistant Integration knowledge
|
||||
description: Everything you need to know to build, test and review Home Assistant Integrations. If you're looking at an integration, you must use this as your primary reference.
|
||||
---
|
||||
|
||||
### File Locations
|
||||
- **Integration code**: `./homeassistant/components/<integration_domain>/`
|
||||
- **Integration tests**: `./tests/components/<integration_domain>/`
|
||||
|
||||
## Integration Templates
|
||||
|
||||
### Standard Integration Structure
|
||||
```
|
||||
homeassistant/components/my_integration/
|
||||
├── __init__.py # Entry point with async_setup_entry
|
||||
├── manifest.json # Integration metadata and dependencies
|
||||
├── const.py # Domain and constants
|
||||
├── config_flow.py # UI configuration flow
|
||||
├── coordinator.py # Data update coordinator (if needed)
|
||||
├── entity.py # Base entity class (if shared patterns)
|
||||
├── sensor.py # Sensor platform
|
||||
├── strings.json # User-facing text and translations
|
||||
├── services.yaml # Service definitions (if applicable)
|
||||
└── quality_scale.yaml # Quality scale rule status
|
||||
```
|
||||
|
||||
An integration can have platforms as needed (e.g., `sensor.py`, `switch.py`, etc.). The following platforms have extra guidelines:
|
||||
- **Diagnostics**: [`platform-diagnostics.md`](platform-diagnostics.md) for diagnostic data collection
|
||||
- **Repairs**: [`platform-repairs.md`](platform-repairs.md) for user-actionable repair issues
|
||||
|
||||
### Minimal Integration Checklist
|
||||
- [ ] `manifest.json` with required fields (domain, name, codeowners, etc.)
|
||||
- [ ] `__init__.py` with `async_setup_entry` and `async_unload_entry`
|
||||
- [ ] `config_flow.py` with UI configuration support
|
||||
- [ ] `const.py` with `DOMAIN` constant
|
||||
- [ ] `strings.json` with at least config flow text
|
||||
- [ ] Platform files (`sensor.py`, etc.) as needed
|
||||
- [ ] `quality_scale.yaml` with rule status tracking
|
||||
|
||||
## Integration Quality Scale
|
||||
|
||||
Home Assistant uses an Integration Quality Scale to ensure code quality and consistency. The quality level determines which rules apply:
|
||||
|
||||
### Quality Scale Levels
|
||||
- **Bronze**: Basic requirements (ALL Bronze rules are mandatory)
|
||||
- **Silver**: Enhanced functionality
|
||||
- **Gold**: Advanced features
|
||||
- **Platinum**: Highest quality standards
|
||||
|
||||
### Quality Scale Progression
|
||||
- **Bronze → Silver**: Add entity unavailability, parallel updates, auth flows
|
||||
- **Silver → Gold**: Add device management, diagnostics, translations
|
||||
- **Gold → Platinum**: Add strict typing, async dependencies, websession injection
|
||||
|
||||
### How Rules Apply
|
||||
1. **Check `manifest.json`**: Look for `"quality_scale"` key to determine integration level
|
||||
2. **Bronze Rules**: Always required for any integration with quality scale
|
||||
3. **Higher Tier Rules**: Only apply if integration targets that tier or higher
|
||||
4. **Rule Status**: Check `quality_scale.yaml` in integration folder for:
|
||||
- `done`: Rule implemented
|
||||
- `exempt`: Rule doesn't apply (with reason in comment)
|
||||
- `todo`: Rule needs implementation
|
||||
|
||||
### Example `quality_scale.yaml` Structure
|
||||
```yaml
|
||||
rules:
|
||||
# Bronze (mandatory)
|
||||
config-flow: done
|
||||
entity-unique-id: done
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
|
||||
# Silver (if targeting Silver+)
|
||||
entity-unavailable: done
|
||||
parallel-updates: done
|
||||
|
||||
# Gold (if targeting Gold+)
|
||||
devices: done
|
||||
diagnostics: done
|
||||
|
||||
# Platinum (if targeting Platinum)
|
||||
strict-typing: done
|
||||
```
|
||||
|
||||
**When Reviewing/Creating Code**: Always check the integration's quality scale level and exemption status before applying rules.
|
||||
|
||||
## Code Organization
|
||||
|
||||
### Core Locations
|
||||
- Shared constants: `homeassistant/const.py` (use these instead of hardcoding)
|
||||
- Integration structure:
|
||||
- `homeassistant/components/{domain}/const.py` - Constants
|
||||
- `homeassistant/components/{domain}/models.py` - Data models
|
||||
- `homeassistant/components/{domain}/coordinator.py` - Update coordinator
|
||||
- `homeassistant/components/{domain}/config_flow.py` - Configuration flow
|
||||
- `homeassistant/components/{domain}/{platform}.py` - Platform implementations
|
||||
|
||||
### Common Modules
|
||||
- **coordinator.py**: Centralize data fetching logic
|
||||
```python
|
||||
class MyCoordinator(DataUpdateCoordinator[MyData]):
|
||||
def __init__(self, hass: HomeAssistant, client: MyClient, config_entry: ConfigEntry) -> None:
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(minutes=1),
|
||||
config_entry=config_entry, # ✅ Pass config_entry - it's accepted and recommended
|
||||
)
|
||||
```
|
||||
- **entity.py**: Base entity definitions to reduce duplication
|
||||
```python
|
||||
class MyEntity(CoordinatorEntity[MyCoordinator]):
|
||||
_attr_has_entity_name = True
|
||||
```
|
||||
|
||||
### Runtime Data Storage
|
||||
- **Use ConfigEntry.runtime_data**: Store non-persistent runtime data
|
||||
```python
|
||||
type MyIntegrationConfigEntry = ConfigEntry[MyClient]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MyIntegrationConfigEntry) -> bool:
|
||||
client = MyClient(entry.data[CONF_HOST])
|
||||
entry.runtime_data = client
|
||||
```
|
||||
|
||||
### Manifest Requirements
|
||||
- **Required Fields**: `domain`, `name`, `codeowners`, `integration_type`, `documentation`, `requirements`
|
||||
- **Integration Types**: `device`, `hub`, `service`, `system`, `helper`
|
||||
- **IoT Class**: Always specify connectivity method (e.g., `cloud_polling`, `local_polling`, `local_push`)
|
||||
- **Discovery Methods**: Add when applicable: `zeroconf`, `dhcp`, `bluetooth`, `ssdp`, `usb`
|
||||
- **Dependencies**: Include platform dependencies (e.g., `application_credentials`, `bluetooth_adapters`)
|
||||
|
||||
### Config Flow Patterns
|
||||
- **Version Control**: Always set `VERSION = 1` and `MINOR_VERSION = 1`
|
||||
- **Unique ID Management**:
|
||||
```python
|
||||
await self.async_set_unique_id(device_unique_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
```
|
||||
- **Error Handling**: Define errors in `strings.json` under `config.error`
|
||||
- **Step Methods**: Use standard naming (`async_step_user`, `async_step_discovery`, etc.)
|
||||
|
||||
### Integration Ownership
|
||||
- **manifest.json**: Add GitHub usernames to `codeowners`:
|
||||
```json
|
||||
{
|
||||
"domain": "my_integration",
|
||||
"name": "My Integration",
|
||||
"codeowners": ["@me"]
|
||||
}
|
||||
```
|
||||
|
||||
### Async Dependencies (Platinum)
|
||||
- **Requirement**: All dependencies must use asyncio
|
||||
- Ensures efficient task handling without thread context switching
|
||||
|
||||
### WebSession Injection (Platinum)
|
||||
- **Pass WebSession**: Support passing web sessions to dependencies
|
||||
```python
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MyConfigEntry) -> bool:
|
||||
"""Set up integration from config entry."""
|
||||
client = MyClient(entry.data[CONF_HOST], async_get_clientsession(hass))
|
||||
```
|
||||
- For cookies: Use `async_create_clientsession` (aiohttp) or `create_async_httpx_client` (httpx)
|
||||
|
||||
### Data Update Coordinator
|
||||
- **Standard Pattern**: Use for efficient data management
|
||||
```python
|
||||
class MyCoordinator(DataUpdateCoordinator):
|
||||
def __init__(self, hass: HomeAssistant, client: MyClient, config_entry: ConfigEntry) -> None:
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(minutes=5),
|
||||
config_entry=config_entry, # ✅ Pass config_entry - it's accepted and recommended
|
||||
)
|
||||
self.client = client
|
||||
|
||||
async def _async_update_data(self):
|
||||
try:
|
||||
return await self.client.fetch_data()
|
||||
except ApiError as err:
|
||||
raise UpdateFailed(f"API communication error: {err}")
|
||||
```
|
||||
- **Error Types**: Use `UpdateFailed` for API errors, `ConfigEntryAuthFailed` for auth issues
|
||||
- **Config Entry**: Always pass `config_entry` parameter to coordinator - it's accepted and recommended
|
||||
|
||||
## Integration Guidelines
|
||||
|
||||
### Configuration Flow
|
||||
- **UI Setup Required**: All integrations must support configuration via UI
|
||||
- **Manifest**: Set `"config_flow": true` in `manifest.json`
|
||||
- **Data Storage**:
|
||||
- Connection-critical config: Store in `ConfigEntry.data`
|
||||
- Non-critical settings: Store in `ConfigEntry.options`
|
||||
- **Validation**: Always validate user input before creating entries
|
||||
- **Config Entry Naming**:
|
||||
- ❌ Do NOT allow users to set config entry names in config flows
|
||||
- Names are automatically generated or can be customized later in UI
|
||||
- ✅ Exception: Helper integrations MAY allow custom names in config flow
|
||||
- **Connection Testing**: Test device/service connection during config flow:
|
||||
```python
|
||||
try:
|
||||
await client.get_data()
|
||||
except MyException:
|
||||
errors["base"] = "cannot_connect"
|
||||
```
|
||||
- **Duplicate Prevention**: Prevent duplicate configurations:
|
||||
```python
|
||||
# Using unique ID
|
||||
await self.async_set_unique_id(identifier)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
# Using unique data
|
||||
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
|
||||
```
|
||||
|
||||
### Reauthentication Support
|
||||
- **Required Method**: Implement `async_step_reauth` in config flow
|
||||
- **Credential Updates**: Allow users to update credentials without re-adding
|
||||
- **Validation**: Verify account matches existing unique ID:
|
||||
```python
|
||||
await self.async_set_unique_id(user_id)
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_account")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
data_updates={CONF_API_TOKEN: user_input[CONF_API_TOKEN]}
|
||||
)
|
||||
```
|
||||
|
||||
### Reconfiguration Flow
|
||||
- **Purpose**: Allow configuration updates without removing device
|
||||
- **Implementation**: Add `async_step_reconfigure` method
|
||||
- **Validation**: Prevent changing underlying account with `_abort_if_unique_id_mismatch`
|
||||
|
||||
### Device Discovery
|
||||
- **Manifest Configuration**: Add discovery method (zeroconf, dhcp, etc.)
|
||||
```json
|
||||
{
|
||||
"zeroconf": ["_mydevice._tcp.local."]
|
||||
}
|
||||
```
|
||||
- **Discovery Handler**: Implement appropriate `async_step_*` method:
|
||||
```python
|
||||
async def async_step_zeroconf(self, discovery_info):
|
||||
"""Handle zeroconf discovery."""
|
||||
await self.async_set_unique_id(discovery_info.properties["serialno"])
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.host})
|
||||
```
|
||||
- **Network Updates**: Use discovery to update dynamic IP addresses
|
||||
|
||||
### Network Discovery Implementation
|
||||
- **Zeroconf/mDNS**: Use async instances
|
||||
```python
|
||||
aiozc = await zeroconf.async_get_async_instance(hass)
|
||||
```
|
||||
- **SSDP Discovery**: Register callbacks with cleanup
|
||||
```python
|
||||
entry.async_on_unload(
|
||||
ssdp.async_register_callback(
|
||||
hass, _async_discovered_device,
|
||||
{"st": "urn:schemas-upnp-org:device:ZonePlayer:1"}
|
||||
)
|
||||
)
|
||||
```
|
||||
|
||||
### Bluetooth Integration
|
||||
- **Manifest Dependencies**: Add `bluetooth_adapters` to dependencies
|
||||
- **Connectable**: Set `"connectable": true` for connection-required devices
|
||||
- **Scanner Usage**: Always use shared scanner instance
|
||||
```python
|
||||
scanner = bluetooth.async_get_scanner()
|
||||
entry.async_on_unload(
|
||||
bluetooth.async_register_callback(
|
||||
hass, _async_discovered_device,
|
||||
{"service_uuid": "example_uuid"},
|
||||
bluetooth.BluetoothScanningMode.ACTIVE
|
||||
)
|
||||
)
|
||||
```
|
||||
- **Connection Handling**: Never reuse `BleakClient` instances, use 10+ second timeouts
|
||||
|
||||
### Setup Validation
|
||||
- **Test Before Setup**: Verify integration can be set up in `async_setup_entry`
|
||||
- **Exception Handling**:
|
||||
- `ConfigEntryNotReady`: Device offline or temporary failure
|
||||
- `ConfigEntryAuthFailed`: Authentication issues
|
||||
- `ConfigEntryError`: Unresolvable setup problems
|
||||
|
||||
### Config Entry Unloading
|
||||
- **Required**: Implement `async_unload_entry` for runtime removal/reload
|
||||
- **Platform Unloading**: Use `hass.config_entries.async_unload_platforms`
|
||||
- **Cleanup**: Register callbacks with `entry.async_on_unload`:
|
||||
```python
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: MyConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
entry.runtime_data.listener() # Clean up resources
|
||||
return unload_ok
|
||||
```
|
||||
|
||||
### Service Actions
|
||||
- **Registration**: Register all service actions in `async_setup`, NOT in `async_setup_entry`
|
||||
- **Validation**: Check config entry existence and loaded state:
|
||||
```python
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async def service_action(call: ServiceCall) -> ServiceResponse:
|
||||
if not (entry := hass.config_entries.async_get_entry(call.data[ATTR_CONFIG_ENTRY_ID])):
|
||||
raise ServiceValidationError("Entry not found")
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError("Entry not loaded")
|
||||
```
|
||||
- **Exception Handling**: Raise appropriate exceptions:
|
||||
```python
|
||||
# For invalid input
|
||||
if end_date < start_date:
|
||||
raise ServiceValidationError("End date must be after start date")
|
||||
|
||||
# For service errors
|
||||
try:
|
||||
await client.set_schedule(start_date, end_date)
|
||||
except MyConnectionError as err:
|
||||
raise HomeAssistantError("Could not connect to the schedule") from err
|
||||
```
|
||||
|
||||
### Service Registration Patterns
|
||||
- **Entity Services**: Register on platform setup
|
||||
```python
|
||||
platform.async_register_entity_service(
|
||||
"my_entity_service",
|
||||
{vol.Required("parameter"): cv.string},
|
||||
"handle_service_method"
|
||||
)
|
||||
```
|
||||
- **Service Schema**: Always validate input
|
||||
```python
|
||||
SERVICE_SCHEMA = vol.Schema({
|
||||
vol.Required("entity_id"): cv.entity_ids,
|
||||
vol.Required("parameter"): cv.string,
|
||||
vol.Optional("timeout", default=30): cv.positive_int,
|
||||
})
|
||||
```
|
||||
- **Services File**: Create `services.yaml` with descriptions and field definitions
|
||||
|
||||
### Polling
|
||||
- Use update coordinator pattern when possible
|
||||
- **Polling intervals are NOT user-configurable**: Never add scan_interval, update_interval, or polling frequency options to config flows or config entries
|
||||
- **Integration determines intervals**: Set `update_interval` programmatically based on integration logic, not user input
|
||||
- **Minimum Intervals**:
|
||||
- Local network: 5 seconds
|
||||
- Cloud services: 60 seconds
|
||||
- **Parallel Updates**: Specify number of concurrent updates:
|
||||
```python
|
||||
PARALLEL_UPDATES = 1 # Serialize updates to prevent overwhelming device
|
||||
# OR
|
||||
PARALLEL_UPDATES = 0 # Unlimited (for coordinator-based or read-only)
|
||||
```
|
||||
|
||||
## Entity Development
|
||||
|
||||
### Unique IDs
|
||||
- **Required**: Every entity must have a unique ID for registry tracking
|
||||
- Must be unique per platform (not per integration)
|
||||
- Don't include integration domain or platform in ID
|
||||
- **Implementation**:
|
||||
```python
|
||||
class MySensor(SensorEntity):
|
||||
def __init__(self, device_id: str) -> None:
|
||||
self._attr_unique_id = f"{device_id}_temperature"
|
||||
```
|
||||
|
||||
**Acceptable ID Sources**:
|
||||
- Device serial numbers
|
||||
- MAC addresses (formatted using `format_mac` from device registry)
|
||||
- Physical identifiers (printed/EEPROM)
|
||||
- Config entry ID as last resort: `f"{entry.entry_id}-battery"`
|
||||
|
||||
**Never Use**:
|
||||
- IP addresses, hostnames, URLs
|
||||
- Device names
|
||||
- Email addresses, usernames
|
||||
|
||||
### Entity Descriptions
|
||||
- **Lambda/Anonymous Functions**: Often used in EntityDescription for value transformation
|
||||
- **Multiline Lambdas**: When lambdas exceed line length, wrap in parentheses for readability
|
||||
- **Bad pattern**:
|
||||
```python
|
||||
SensorEntityDescription(
|
||||
key="temperature",
|
||||
name="Temperature",
|
||||
value_fn=lambda data: round(data["temp_value"] * 1.8 + 32, 1) if data.get("temp_value") is not None else None, # ❌ Too long
|
||||
)
|
||||
```
|
||||
- **Good pattern**:
|
||||
```python
|
||||
SensorEntityDescription(
|
||||
key="temperature",
|
||||
name="Temperature",
|
||||
value_fn=lambda data: ( # ✅ Parenthesis on same line as lambda
|
||||
round(data["temp_value"] * 1.8 + 32, 1)
|
||||
if data.get("temp_value") is not None
|
||||
else None
|
||||
),
|
||||
)
|
||||
```
|
||||
|
||||
### Entity Naming
|
||||
- **Use has_entity_name**: Set `_attr_has_entity_name = True`
|
||||
- **For specific fields**:
|
||||
```python
|
||||
class MySensor(SensorEntity):
|
||||
_attr_has_entity_name = True
|
||||
def __init__(self, device: Device, field: str) -> None:
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.id)},
|
||||
name=device.name,
|
||||
)
|
||||
self._attr_name = field # e.g., "temperature", "humidity"
|
||||
```
|
||||
- **For device itself**: Set `_attr_name = None`
|
||||
|
||||
### Event Lifecycle Management
|
||||
- **Subscribe in `async_added_to_hass`**:
|
||||
```python
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to events."""
|
||||
self.async_on_remove(
|
||||
self.client.events.subscribe("my_event", self._handle_event)
|
||||
)
|
||||
```
|
||||
- **Unsubscribe in `async_will_remove_from_hass`** if not using `async_on_remove`
|
||||
- Never subscribe in `__init__` or other methods
|
||||
|
||||
### State Handling
|
||||
- Unknown values: Use `None` (not "unknown" or "unavailable")
|
||||
- Availability: Implement `available()` property instead of using "unavailable" state
|
||||
|
||||
### Entity Availability
|
||||
- **Mark Unavailable**: When data cannot be fetched from device/service
|
||||
- **Coordinator Pattern**:
|
||||
```python
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and self.identifier in self.coordinator.data
|
||||
```
|
||||
- **Direct Update Pattern**:
|
||||
```python
|
||||
async def async_update(self) -> None:
|
||||
"""Update entity."""
|
||||
try:
|
||||
data = await self.client.get_data()
|
||||
except MyException:
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = True
|
||||
self._attr_native_value = data.value
|
||||
```
|
||||
|
||||
### Extra State Attributes
|
||||
- All attribute keys must always be present
|
||||
- Unknown values: Use `None`
|
||||
- Provide descriptive attributes
|
||||
|
||||
## Device Management
|
||||
|
||||
### Device Registry
|
||||
- **Create Devices**: Group related entities under devices
|
||||
- **Device Info**: Provide comprehensive metadata:
|
||||
```python
|
||||
_attr_device_info = DeviceInfo(
|
||||
connections={(CONNECTION_NETWORK_MAC, device.mac)},
|
||||
identifiers={(DOMAIN, device.id)},
|
||||
name=device.name,
|
||||
manufacturer="My Company",
|
||||
model="My Sensor",
|
||||
sw_version=device.version,
|
||||
)
|
||||
```
|
||||
- For services: Add `entry_type=DeviceEntryType.SERVICE`
|
||||
|
||||
### Dynamic Device Addition
|
||||
- **Auto-detect New Devices**: After initial setup
|
||||
- **Implementation Pattern**:
|
||||
```python
|
||||
def _check_device() -> None:
|
||||
current_devices = set(coordinator.data)
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities([MySensor(coordinator, device_id) for device_id in new_devices])
|
||||
|
||||
entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
```
|
||||
|
||||
### Stale Device Removal
|
||||
- **Auto-remove**: When devices disappear from hub/account
|
||||
- **Device Registry Update**:
|
||||
```python
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=self.config_entry.entry_id,
|
||||
)
|
||||
```
|
||||
- **Manual Deletion**: Implement `async_remove_config_entry_device` when needed
|
||||
|
||||
### Entity Categories
|
||||
- **Required**: Assign appropriate category to entities
|
||||
- **Implementation**: Set `_attr_entity_category`
|
||||
```python
|
||||
class MySensor(SensorEntity):
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
```
|
||||
- Categories include: `DIAGNOSTIC` for system/technical information
|
||||
|
||||
### Device Classes
|
||||
- **Use When Available**: Set appropriate device class for entity type
|
||||
```python
|
||||
class MyTemperatureSensor(SensorEntity):
|
||||
_attr_device_class = SensorDeviceClass.TEMPERATURE
|
||||
```
|
||||
- Provides context for: unit conversion, voice control, UI representation
|
||||
|
||||
### Disabled by Default
|
||||
- **Disable Noisy/Less Popular Entities**: Reduce resource usage
|
||||
```python
|
||||
class MySignalStrengthSensor(SensorEntity):
|
||||
_attr_entity_registry_enabled_default = False
|
||||
```
|
||||
- Target: frequently changing states, technical diagnostics
|
||||
|
||||
### Entity Translations
|
||||
- **Required with has_entity_name**: Support international users
|
||||
- **Implementation**:
|
||||
```python
|
||||
class MySensor(SensorEntity):
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "phase_voltage"
|
||||
```
|
||||
- Create `strings.json` with translations:
|
||||
```json
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"phase_voltage": {
|
||||
"name": "Phase voltage"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Exception Translations (Gold)
|
||||
- **Translatable Errors**: Use translation keys for user-facing exceptions
|
||||
- **Implementation**:
|
||||
```python
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="end_date_before_start_date",
|
||||
)
|
||||
```
|
||||
- Add to `strings.json`:
|
||||
```json
|
||||
{
|
||||
"exceptions": {
|
||||
"end_date_before_start_date": {
|
||||
"message": "The end date cannot be before the start date."
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Icon Translations (Gold)
|
||||
- **Dynamic Icons**: Support state and range-based icon selection
|
||||
- **State-based Icons**:
|
||||
```json
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"tree_pollen": {
|
||||
"default": "mdi:tree",
|
||||
"state": {
|
||||
"high": "mdi:tree-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
- **Range-based Icons** (for numeric values):
|
||||
```json
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"battery_level": {
|
||||
"default": "mdi:battery-unknown",
|
||||
"range": {
|
||||
"0": "mdi:battery-outline",
|
||||
"90": "mdi:battery-90",
|
||||
"100": "mdi:battery"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Testing Requirements
|
||||
|
||||
- **Location**: `tests/components/{domain}/`
|
||||
- **Coverage Requirement**: Above 95% test coverage for all modules
|
||||
- **Best Practices**:
|
||||
- Use pytest fixtures from `tests.common`
|
||||
- Mock all external dependencies
|
||||
- Use snapshots for complex data structures
|
||||
- Follow existing test patterns
|
||||
|
||||
### Config Flow Testing
|
||||
- **100% Coverage Required**: All config flow paths must be tested
|
||||
- **Patch Boundaries**: Only patch library or client methods when testing config flows. Do not patch methods defined in `config_flow.py`; exercise the flow logic end-to-end.
|
||||
- **Test Scenarios**:
|
||||
- All flow initiation methods (user, discovery, import)
|
||||
- Successful configuration paths
|
||||
- Error recovery scenarios
|
||||
- Prevention of duplicate entries
|
||||
- Flow completion after errors
|
||||
- Reauthentication/reconfigure flows
|
||||
|
||||
### Testing
|
||||
- **Integration-specific tests** (recommended):
|
||||
```bash
|
||||
pytest ./tests/components/<integration_domain> \
|
||||
--cov=homeassistant.components.<integration_domain> \
|
||||
--cov-report term-missing \
|
||||
--durations-min=1 \
|
||||
--durations=0 \
|
||||
--numprocesses=auto
|
||||
```
|
||||
|
||||
### Testing Best Practices
|
||||
- **Never access `hass.data` directly** - Use fixtures and proper integration setup instead
|
||||
- **Use snapshot testing** - For verifying entity states and attributes
|
||||
- **Test through integration setup** - Don't test entities in isolation
|
||||
- **Mock external APIs** - Use fixtures with realistic JSON data
|
||||
- **Verify registries** - Ensure entities are properly registered with devices
|
||||
|
||||
### Config Flow Testing Template
|
||||
```python
|
||||
async def test_user_flow_success(hass, mock_api):
|
||||
"""Test successful user flow."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] == FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
# Test form submission
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input=TEST_USER_INPUT
|
||||
)
|
||||
assert result["type"] == FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "My Device"
|
||||
assert result["data"] == TEST_USER_INPUT
|
||||
|
||||
async def test_flow_connection_error(hass, mock_api_error):
|
||||
"""Test connection error handling."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input=TEST_USER_INPUT
|
||||
)
|
||||
assert result["type"] == FlowResultType.FORM
|
||||
assert result["errors"] == {"base": "cannot_connect"}
|
||||
```
|
||||
|
||||
### Entity Testing Patterns
|
||||
```python
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Overridden fixture to specify platforms to test."""
|
||||
return [Platform.SENSOR] # Or another specific platform as needed.
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration")
|
||||
async def test_entities(
|
||||
hass: HomeAssistant,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_registry: er.EntityRegistry,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test the sensor entities."""
|
||||
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
|
||||
|
||||
# Ensure entities are correctly assigned to device
|
||||
device_entry = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, "device_unique_id")}
|
||||
)
|
||||
assert device_entry
|
||||
entity_entries = er.async_entries_for_config_entry(
|
||||
entity_registry, mock_config_entry.entry_id
|
||||
)
|
||||
for entity_entry in entity_entries:
|
||||
assert entity_entry.device_id == device_entry.id
|
||||
```
|
||||
|
||||
### Mock Patterns
|
||||
```python
|
||||
# Modern integration fixture setup
|
||||
@pytest.fixture
|
||||
def mock_config_entry() -> MockConfigEntry:
|
||||
"""Return the default mocked config entry."""
|
||||
return MockConfigEntry(
|
||||
title="My Integration",
|
||||
domain=DOMAIN,
|
||||
data={CONF_HOST: "127.0.0.1", CONF_API_KEY: "test_key"},
|
||||
unique_id="device_unique_id",
|
||||
)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_device_api() -> Generator[MagicMock]:
|
||||
"""Return a mocked device API."""
|
||||
with patch("homeassistant.components.my_integration.MyDeviceAPI", autospec=True) as api_mock:
|
||||
api = api_mock.return_value
|
||||
api.get_data.return_value = MyDeviceData.from_json(
|
||||
load_fixture("device_data.json", DOMAIN)
|
||||
)
|
||||
yield api
|
||||
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Fixture to specify platforms to test."""
|
||||
return PLATFORMS
|
||||
|
||||
@pytest.fixture
|
||||
async def init_integration(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_device_api: MagicMock,
|
||||
platforms: list[Platform],
|
||||
) -> MockConfigEntry:
|
||||
"""Set up the integration for testing."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
with patch("homeassistant.components.my_integration.PLATFORMS", platforms):
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
return mock_config_entry
|
||||
```
|
||||
|
||||
## Debugging & Troubleshooting
|
||||
|
||||
### Common Issues & Solutions
|
||||
- **Integration won't load**: Check `manifest.json` syntax and required fields
|
||||
- **Entities not appearing**: Verify `unique_id` and `has_entity_name` implementation
|
||||
- **Config flow errors**: Check `strings.json` entries and error handling
|
||||
- **Discovery not working**: Verify manifest discovery configuration and callbacks
|
||||
- **Tests failing**: Check mock setup and async context
|
||||
|
||||
### Debug Logging Setup
|
||||
```python
|
||||
# Enable debug logging in tests
|
||||
caplog.set_level(logging.DEBUG, logger="my_integration")
|
||||
|
||||
# In integration code - use proper logging
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_LOGGER.debug("Processing data: %s", data) # Use lazy logging
|
||||
```
|
||||
|
||||
### Validation Commands
|
||||
```bash
|
||||
# Check specific integration
|
||||
python -m script.hassfest --integration-path homeassistant/components/my_integration
|
||||
|
||||
# Validate quality scale
|
||||
# Check quality_scale.yaml against current rules
|
||||
|
||||
# Run integration tests with coverage
|
||||
pytest ./tests/components/my_integration \
|
||||
--cov=homeassistant.components.my_integration \
|
||||
--cov-report term-missing
|
||||
```
|
||||
@@ -1,19 +0,0 @@
|
||||
# Integration Diagnostics
|
||||
|
||||
Platform exists as `homeassistant/components/<domain>/diagnostics.py`.
|
||||
|
||||
- **Required**: Implement diagnostic data collection
|
||||
- **Implementation**:
|
||||
```python
|
||||
TO_REDACT = [CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE]
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: MyConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
return {
|
||||
"entry_data": async_redact_data(entry.data, TO_REDACT),
|
||||
"data": entry.runtime_data.data,
|
||||
}
|
||||
```
|
||||
- **Security**: Never expose passwords, tokens, or sensitive coordinates
|
||||
@@ -1,55 +0,0 @@
|
||||
# Repairs platform
|
||||
|
||||
Platform exists as `homeassistant/components/<domain>/repairs.py`.
|
||||
|
||||
- **Actionable Issues Required**: All repair issues must be actionable for end users
|
||||
- **Issue Content Requirements**:
|
||||
- Clearly explain what is happening
|
||||
- Provide specific steps users need to take to resolve the issue
|
||||
- Use friendly, helpful language
|
||||
- Include relevant context (device names, error details, etc.)
|
||||
- **Implementation**:
|
||||
```python
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"outdated_version",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="outdated_version",
|
||||
)
|
||||
```
|
||||
- **Translation Strings Requirements**: Must contain user-actionable text in `strings.json`:
|
||||
```json
|
||||
{
|
||||
"issues": {
|
||||
"outdated_version": {
|
||||
"title": "Device firmware is outdated",
|
||||
"description": "Your device firmware version {current_version} is below the minimum required version {min_version}. To fix this issue: 1) Open the manufacturer's mobile app, 2) Navigate to device settings, 3) Select 'Update Firmware', 4) Wait for the update to complete, then 5) Restart Home Assistant."
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
- **String Content Must Include**:
|
||||
- What the problem is
|
||||
- Why it matters
|
||||
- Exact steps to resolve (numbered list when multiple steps)
|
||||
- What to expect after following the steps
|
||||
- **Avoid Vague Instructions**: Don't just say "update firmware" - provide specific steps
|
||||
- **Severity Guidelines**:
|
||||
- `CRITICAL`: Reserved for extreme scenarios only
|
||||
- `ERROR`: Requires immediate user attention
|
||||
- `WARNING`: Indicates future potential breakage
|
||||
- **Additional Attributes**:
|
||||
```python
|
||||
ir.async_create_issue(
|
||||
hass, DOMAIN, "issue_id",
|
||||
breaks_in_ha_version="2024.1.0",
|
||||
is_fixable=True,
|
||||
is_persistent=True,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="issue_description",
|
||||
)
|
||||
```
|
||||
- Only create issues for problems users can potentially resolve
|
||||
@@ -11,10 +11,9 @@
|
||||
|
||||
This repository contains the core of Home Assistant, a Python 3 based home automation application.
|
||||
|
||||
## Code Review Guidelines
|
||||
## Git Commit Guidelines
|
||||
|
||||
**Git commit practices during review:**
|
||||
- **Do NOT amend, squash, or rebase commits after review has started** - Reviewers need to see what changed since their last review
|
||||
- **Do NOT amend, squash, or rebase commits that have already been pushed to the PR branch after the PR is opened** - Reviewers need to follow the commit history, as well as see what changed since their last review
|
||||
|
||||
## Development Commands
|
||||
|
||||
@@ -33,7 +32,10 @@ Prefer concrete types (for example, `HomeAssistant`, `MockConfigEntry`, etc.) ov
|
||||
|
||||
Integrations with Platinum or Gold level in the Integration Quality Scale reflect a high standard of code quality and maintainability. When looking for examples of something, these are good places to start. The level is indicated in the manifest.json of the integration.
|
||||
|
||||
When reviewing entity actions, do not suggest extra defensive checks for input fields that are already validated by Home Assistant's service/action schemas and entity selection filters. Suggest additional guards only when data bypasses those validators or is transformed into a less-safe form.
|
||||
When validation guarantees a dict key exists, prefer direct key access (`data["key"]`) instead of `.get("key")` so contract violations are surfaced instead of silently masked.
|
||||
|
||||
|
||||
# Skills
|
||||
|
||||
- Home Assistant Integration knowledge: .claude/skills/integrations/SKILL.md
|
||||
- ha-integration-knowledge: .claude/skills/ha-integration-knowledge/SKILL.md
|
||||
|
||||
@@ -0,0 +1,216 @@
|
||||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": ["config:recommended"],
|
||||
|
||||
"enabledManagers": [
|
||||
"pep621",
|
||||
"pip_requirements",
|
||||
"pre-commit",
|
||||
"regex",
|
||||
"homeassistant-manifest"
|
||||
],
|
||||
|
||||
"pre-commit": {
|
||||
"enabled": true
|
||||
},
|
||||
|
||||
"pip_requirements": {
|
||||
"managerFilePatterns": [
|
||||
"/(^|/)requirements[\\w_-]*\\.txt$/",
|
||||
"/(^|/)homeassistant/package_constraints\\.txt$/"
|
||||
]
|
||||
},
|
||||
|
||||
"homeassistant-manifest": {
|
||||
"managerFilePatterns": [
|
||||
"/^homeassistant/components/[^/]+/manifest\\.json$/"
|
||||
]
|
||||
},
|
||||
|
||||
"regexManagers": [
|
||||
{
|
||||
"description": "Update ruff required-version in pyproject.toml",
|
||||
"managerFilePatterns": ["/^pyproject\\.toml$/"],
|
||||
"matchStrings": ["required-version = \">=(?<currentValue>[\\d.]+)\""],
|
||||
"depNameTemplate": "ruff",
|
||||
"datasourceTemplate": "pypi"
|
||||
}
|
||||
],
|
||||
|
||||
"minimumReleaseAge": "7 days",
|
||||
"prConcurrentLimit": 10,
|
||||
"prHourlyLimit": 2,
|
||||
"schedule": ["before 6am"],
|
||||
|
||||
"semanticCommits": "disabled",
|
||||
"commitMessageAction": "Update",
|
||||
"commitMessageTopic": "{{depName}}",
|
||||
"commitMessageExtra": "to {{newVersion}}",
|
||||
|
||||
"automerge": false,
|
||||
|
||||
"vulnerabilityAlerts": {
|
||||
"enabled": false
|
||||
},
|
||||
|
||||
"packageRules": [
|
||||
{
|
||||
"description": "Deny all by default — allowlist below re-enables specific packages",
|
||||
"matchPackageNames": ["*"],
|
||||
"enabled": false
|
||||
},
|
||||
{
|
||||
"description": "Core runtime dependencies (allowlisted)",
|
||||
"matchPackageNames": [
|
||||
"aiohttp",
|
||||
"aiohttp-fast-zlib",
|
||||
"aiohttp_cors",
|
||||
"aiohttp-asyncmdnsresolver",
|
||||
"yarl",
|
||||
"httpx",
|
||||
"requests",
|
||||
"urllib3",
|
||||
"certifi",
|
||||
"orjson",
|
||||
"PyYAML",
|
||||
"Jinja2",
|
||||
"cryptography",
|
||||
"pyOpenSSL",
|
||||
"PyJWT",
|
||||
"SQLAlchemy",
|
||||
"Pillow",
|
||||
"attrs",
|
||||
"uv",
|
||||
"voluptuous",
|
||||
"voluptuous-serialize",
|
||||
"voluptuous-openapi",
|
||||
"zeroconf"
|
||||
],
|
||||
"enabled": true,
|
||||
"labels": ["dependency", "core"]
|
||||
},
|
||||
{
|
||||
"description": "Common Python utilities (allowlisted)",
|
||||
"matchPackageNames": [
|
||||
"astral",
|
||||
"atomicwrites-homeassistant",
|
||||
"audioop-lts",
|
||||
"awesomeversion",
|
||||
"bcrypt",
|
||||
"ciso8601",
|
||||
"cronsim",
|
||||
"defusedxml",
|
||||
"fnv-hash-fast",
|
||||
"getmac",
|
||||
"ical",
|
||||
"ifaddr",
|
||||
"lru-dict",
|
||||
"mutagen",
|
||||
"propcache",
|
||||
"pyserial",
|
||||
"python-slugify",
|
||||
"PyTurboJPEG",
|
||||
"securetar",
|
||||
"standard-aifc",
|
||||
"standard-telnetlib",
|
||||
"ulid-transform",
|
||||
"url-normalize",
|
||||
"xmltodict"
|
||||
],
|
||||
"enabled": true,
|
||||
"labels": ["dependency"]
|
||||
},
|
||||
{
|
||||
"description": "Home Assistant ecosystem packages (core-maintained, no cooldown)",
|
||||
"matchPackageNames": [
|
||||
"hassil",
|
||||
"home-assistant-bluetooth",
|
||||
"home-assistant-frontend",
|
||||
"home-assistant-intents",
|
||||
"infrared-protocols"
|
||||
],
|
||||
"enabled": true,
|
||||
"minimumReleaseAge": null,
|
||||
"labels": ["dependency", "core"]
|
||||
},
|
||||
{
|
||||
"description": "Test dependencies (allowlisted)",
|
||||
"matchPackageNames": [
|
||||
"pytest",
|
||||
"pytest-asyncio",
|
||||
"pytest-aiohttp",
|
||||
"pytest-cov",
|
||||
"pytest-freezer",
|
||||
"pytest-github-actions-annotate-failures",
|
||||
"pytest-socket",
|
||||
"pytest-sugar",
|
||||
"pytest-timeout",
|
||||
"pytest-unordered",
|
||||
"pytest-picked",
|
||||
"pytest-xdist",
|
||||
"pylint",
|
||||
"pylint-per-file-ignores",
|
||||
"astroid",
|
||||
"coverage",
|
||||
"freezegun",
|
||||
"syrupy",
|
||||
"respx",
|
||||
"requests-mock",
|
||||
"ruff",
|
||||
"codespell",
|
||||
"yamllint",
|
||||
"zizmor"
|
||||
],
|
||||
"enabled": true,
|
||||
"labels": ["dependency"]
|
||||
},
|
||||
{
|
||||
"description": "For types-* stubs, only allow patch updates. Major/minor bumps track the upstream runtime package version and must be manually coordinated with the corresponding pin.",
|
||||
"matchPackageNames": ["/^types-/"],
|
||||
"matchUpdateTypes": ["patch"],
|
||||
"enabled": true,
|
||||
"labels": ["dependency"]
|
||||
},
|
||||
{
|
||||
"description": "Pre-commit hook repos (allowlisted, matched by owner/repo)",
|
||||
"matchPackageNames": [
|
||||
"astral-sh/ruff-pre-commit",
|
||||
"codespell-project/codespell",
|
||||
"adrienverge/yamllint",
|
||||
"zizmorcore/zizmor-pre-commit"
|
||||
],
|
||||
"enabled": true,
|
||||
"labels": ["dependency"]
|
||||
},
|
||||
{
|
||||
"description": "Group ruff pre-commit hook with its PyPI twin into one PR",
|
||||
"matchPackageNames": ["astral-sh/ruff-pre-commit", "ruff"],
|
||||
"groupName": "ruff",
|
||||
"groupSlug": "ruff"
|
||||
},
|
||||
{
|
||||
"description": "Group codespell pre-commit hook with its PyPI twin into one PR",
|
||||
"matchPackageNames": ["codespell-project/codespell", "codespell"],
|
||||
"groupName": "codespell",
|
||||
"groupSlug": "codespell"
|
||||
},
|
||||
{
|
||||
"description": "Group yamllint pre-commit hook with its PyPI twin into one PR",
|
||||
"matchPackageNames": ["adrienverge/yamllint", "yamllint"],
|
||||
"groupName": "yamllint",
|
||||
"groupSlug": "yamllint"
|
||||
},
|
||||
{
|
||||
"description": "Group zizmor pre-commit hook with its PyPI twin into one PR",
|
||||
"matchPackageNames": ["zizmorcore/zizmor-pre-commit", "zizmor"],
|
||||
"groupName": "zizmor",
|
||||
"groupSlug": "zizmor"
|
||||
},
|
||||
{
|
||||
"description": "Group pylint with astroid (their versions are linked and must move together)",
|
||||
"matchPackageNames": ["pylint", "astroid"],
|
||||
"groupName": "pylint",
|
||||
"groupSlug": "pylint"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -47,10 +47,6 @@ jobs:
|
||||
with:
|
||||
python-version-file: ".python-version"
|
||||
|
||||
- name: Get information
|
||||
id: info
|
||||
uses: home-assistant/actions/helpers/info@master # zizmor: ignore[unpinned-uses]
|
||||
|
||||
- name: Get version
|
||||
id: version
|
||||
uses: home-assistant/actions/helpers/version@master # zizmor: ignore[unpinned-uses]
|
||||
@@ -80,7 +76,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@@ -112,7 +108,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@8a338493df3d275e4a7a63bcff3b8fe97e51a927 # v19
|
||||
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -123,7 +119,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@8a338493df3d275e4a7a63bcff3b8fe97e51a927 # v19
|
||||
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: OHF-Voice/intents-package
|
||||
@@ -342,19 +338,19 @@ jobs:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@ba7bc0a3fef59531c69a25acd34668d6d3fe6f22 # v4.1.0
|
||||
uses: sigstore/cosign-installer@cad07c2e89fa2edd6e2d7bab4c1aa38e53f76003 # v4.1.1
|
||||
with:
|
||||
cosign-release: "v2.5.3"
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
|
||||
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
|
||||
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -503,7 +499,7 @@ jobs:
|
||||
python -m build
|
||||
|
||||
- name: Upload package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
uses: pypa/gh-action-pypi-publish@cef221092ed1bacb1cc03d23a2d87d1d172e277b # v1.14.0
|
||||
with:
|
||||
skip-existing: true
|
||||
|
||||
@@ -527,14 +523,14 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
|
||||
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -547,7 +543,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
|
||||
+48
-44
@@ -50,9 +50,11 @@ env:
|
||||
# - 10.10.3 is the latest (as of 6 Feb 2023)
|
||||
# 10.11 is the latest long-term-support
|
||||
# - 10.11.2 is the version currently shipped with Synology (as of 11 Oct 2023)
|
||||
# 11.4 is an LTS with support until May 2029
|
||||
# - 11.4.9 is used in Alpine 3.23 (used in latest HA base images as of 11 Apr 2026)
|
||||
# mysql 8.0.32 does not always behave the same as MariaDB
|
||||
# and some queries that work on MariaDB do not work on MySQL
|
||||
MARIADB_VERSIONS: "['mariadb:10.3.32','mariadb:10.6.10','mariadb:10.10.3','mariadb:10.11.2','mysql:8.0.32']"
|
||||
MARIADB_VERSIONS: "['mariadb:10.3.32','mariadb:10.6.10','mariadb:10.10.3','mariadb:10.11.2','mariadb:11.4.9','mysql:8.0.32']"
|
||||
# 12 is the oldest supported version
|
||||
# - 12.14 is the latest (as of 9 Feb 2023)
|
||||
# 15 is the latest version
|
||||
@@ -280,7 +282,7 @@ jobs:
|
||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/codespell.json"
|
||||
- name: Run prek
|
||||
uses: j178/prek-action@53276d8b0d10f8b6672aa85b4588c6921d0370cc # v2.0.1
|
||||
uses: j178/prek-action@cbc2f23eb5539cf20d82d1aabd0d0ecbcc56f4e3 # v2.0.2
|
||||
env:
|
||||
PREK_SKIP: no-commit-to-branch,mypy,pylint,gen_requirements_all,hassfest,hassfest-metadata,hassfest-mypy-config,zizmor
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
@@ -301,7 +303,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Run zizmor
|
||||
uses: j178/prek-action@53276d8b0d10f8b6672aa85b4588c6921d0370cc # v2.0.1
|
||||
uses: j178/prek-action@cbc2f23eb5539cf20d82d1aabd0d0ecbcc56f4e3 # v2.0.2
|
||||
with:
|
||||
extra-args: --all-files zizmor
|
||||
|
||||
@@ -364,7 +366,7 @@ jobs:
|
||||
echo "key=uv-${UV_CACHE_VERSION}-${uv_version}-${HA_SHORT_VERSION}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -372,7 +374,7 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@@ -384,7 +386,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Check if apt cache exists
|
||||
id: cache-apt-check
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
|
||||
path: |
|
||||
@@ -430,7 +432,7 @@ jobs:
|
||||
fi
|
||||
- name: Save apt cache
|
||||
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/save@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -456,7 +458,7 @@ jobs:
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
@@ -484,7 +486,7 @@ jobs:
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -515,7 +517,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -552,7 +554,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -643,7 +645,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -657,7 +659,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${PYTHON_VERSION}.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
@@ -694,7 +696,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -747,7 +749,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -804,7 +806,7 @@ jobs:
|
||||
echo "key=mypy-${MYPY_CACHE_VERSION}-${mypy_version}-${HA_SHORT_VERSION}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -812,7 +814,7 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -854,7 +856,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -887,7 +889,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -901,7 +903,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${TEST_GROUP_COUNT} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@@ -930,7 +932,7 @@ jobs:
|
||||
group: ${{ fromJson(needs.info.outputs.test_groups) }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -964,7 +966,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1020,14 +1022,14 @@ jobs:
|
||||
2>&1 | tee pytest-${PYTHON_VERSION}-${TEST_GROUP}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1040,7 +1042,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
@@ -1062,7 +1064,9 @@ jobs:
|
||||
- 3306:3306
|
||||
env:
|
||||
MYSQL_ROOT_PASSWORD: password
|
||||
options: --health-cmd="mysqladmin ping -uroot -ppassword" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||
options: >-
|
||||
--health-cmd="if command -v mariadb-admin >/dev/null; then mariadb-admin ping -uroot -ppassword; else mysqladmin ping -uroot -ppassword; fi"
|
||||
--health-interval=5s --health-timeout=2s --health-retries=3
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
@@ -1080,7 +1084,7 @@ jobs:
|
||||
mariadb-group: ${{ fromJson(needs.info.outputs.mariadb_groups) }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1115,7 +1119,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1177,7 +1181,7 @@ jobs:
|
||||
2>&1 | tee pytest-${PYTHON_VERSION}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1185,7 +1189,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1199,7 +1203,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: test-results-mariadb-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1238,7 +1242,7 @@ jobs:
|
||||
postgresql-group: ${{ fromJson(needs.info.outputs.postgresql_groups) }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1275,7 +1279,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1338,7 +1342,7 @@ jobs:
|
||||
2>&1 | tee pytest-${PYTHON_VERSION}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1346,7 +1350,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1360,7 +1364,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: test-results-postgres-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1392,7 +1396,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5.5.3
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1421,7 +1425,7 @@ jobs:
|
||||
group: ${{ fromJson(needs.info.outputs.test_groups) }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1455,7 +1459,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1514,14 +1518,14 @@ jobs:
|
||||
2>&1 | tee pytest-${PYTHON_VERSION}-${TEST_GROUP}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1534,7 +1538,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
@@ -1563,7 +1567,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5.5.3
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }} # zizmor: ignore[secrets-outside-env]
|
||||
@@ -1591,7 +1595,7 @@ jobs:
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5.5.3
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
with:
|
||||
report_type: test_results
|
||||
fail_ci_if_error: true
|
||||
|
||||
@@ -28,11 +28,11 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6
|
||||
uses: github/codeql-action/init@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4.35.2
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6
|
||||
uses: github/codeql-action/analyze@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4.35.2
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -21,7 +21,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check if integration label was added and extract details
|
||||
id: extract
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
script: |
|
||||
// Debug: Log the event payload
|
||||
@@ -118,7 +118,7 @@ jobs:
|
||||
- name: Fetch similar issues
|
||||
id: fetch_similar
|
||||
if: steps.extract.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
env:
|
||||
INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }}
|
||||
CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }}
|
||||
@@ -285,7 +285,7 @@ jobs:
|
||||
- name: Post duplicate detection results
|
||||
id: post_results
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_detection.outputs.response }}
|
||||
SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }}
|
||||
|
||||
@@ -21,7 +21,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check issue language
|
||||
id: detect_language
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
env:
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
@@ -95,7 +95,7 @@ jobs:
|
||||
|
||||
- name: Process non-English issues
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }}
|
||||
ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }}
|
||||
|
||||
@@ -22,7 +22,7 @@ jobs:
|
||||
|| github.event.issue.type.name == 'Opportunity'
|
||||
steps:
|
||||
- name: Add no-stale label
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
script: |
|
||||
await github.rest.issues.addLabels({
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
if: github.event.issue.type.name == 'Task'
|
||||
steps:
|
||||
- name: Check if user is authorized
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
script: |
|
||||
const issueAuthor = context.payload.issue.user.login;
|
||||
|
||||
@@ -74,7 +74,7 @@ jobs:
|
||||
) > .env_file
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
@@ -82,7 +82,7 @@ jobs:
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
|
||||
@@ -142,5 +142,6 @@ pytest_buckets.txt
|
||||
|
||||
# AI tooling
|
||||
.claude/settings.local.json
|
||||
.claude/worktrees/
|
||||
.serena/
|
||||
|
||||
|
||||
+11
-4
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.15.1
|
||||
rev: v0.15.10
|
||||
hooks:
|
||||
- id: ruff-check
|
||||
args:
|
||||
@@ -8,7 +8,7 @@ repos:
|
||||
- id: ruff-format
|
||||
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.(py|pyi)$
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.4.1
|
||||
rev: v2.4.2
|
||||
hooks:
|
||||
- id: codespell
|
||||
args:
|
||||
@@ -18,7 +18,7 @@ repos:
|
||||
exclude_types: [csv, json, html]
|
||||
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
|
||||
- repo: https://github.com/zizmorcore/zizmor-pre-commit
|
||||
rev: v1.23.1
|
||||
rev: v1.24.1
|
||||
hooks:
|
||||
- id: zizmor
|
||||
args:
|
||||
@@ -36,7 +36,7 @@ repos:
|
||||
- --branch=master
|
||||
- --branch=rc
|
||||
- repo: https://github.com/adrienverge/yamllint.git
|
||||
rev: v1.37.1
|
||||
rev: v1.38.0
|
||||
hooks:
|
||||
- id: yamllint
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
@@ -87,6 +87,13 @@ repos:
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(homeassistant/.+/manifest\.json|homeassistant/brands/.+\.json|pyproject\.toml|\.pre-commit-config\.yaml|script/gen_requirements_all\.py)$
|
||||
- id: gen_copilot_instructions
|
||||
name: gen_copilot_instructions
|
||||
entry: script/run-in-env.sh python3 -m script.gen_copilot_instructions
|
||||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(AGENTS\.md|\.claude/skills/(?!github-pr-reviewer/).+/SKILL\.md|\.github/copilot-instructions\.md|script/gen_copilot_instructions\.py)$
|
||||
- id: hassfest
|
||||
name: hassfest
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest
|
||||
|
||||
@@ -46,6 +46,7 @@ homeassistant.components.accuweather.*
|
||||
homeassistant.components.acer_projector.*
|
||||
homeassistant.components.acmeda.*
|
||||
homeassistant.components.actiontec.*
|
||||
homeassistant.components.actron_air.*
|
||||
homeassistant.components.adax.*
|
||||
homeassistant.components.adguard.*
|
||||
homeassistant.components.aftership.*
|
||||
@@ -174,9 +175,11 @@ homeassistant.components.dnsip.*
|
||||
homeassistant.components.doorbird.*
|
||||
homeassistant.components.dormakaba_dkey.*
|
||||
homeassistant.components.downloader.*
|
||||
homeassistant.components.dropbox.*
|
||||
homeassistant.components.droplet.*
|
||||
homeassistant.components.dsmr.*
|
||||
homeassistant.components.duckdns.*
|
||||
homeassistant.components.duco.*
|
||||
homeassistant.components.dunehd.*
|
||||
homeassistant.components.duotecno.*
|
||||
homeassistant.components.easyenergy.*
|
||||
@@ -221,6 +224,7 @@ homeassistant.components.fronius.*
|
||||
homeassistant.components.frontend.*
|
||||
homeassistant.components.fujitsu_fglair.*
|
||||
homeassistant.components.fully_kiosk.*
|
||||
homeassistant.components.fumis.*
|
||||
homeassistant.components.fyta.*
|
||||
homeassistant.components.generic_hygrostat.*
|
||||
homeassistant.components.generic_thermostat.*
|
||||
@@ -331,6 +335,7 @@ homeassistant.components.letpot.*
|
||||
homeassistant.components.lg_infrared.*
|
||||
homeassistant.components.libre_hardware_monitor.*
|
||||
homeassistant.components.lidarr.*
|
||||
homeassistant.components.liebherr.*
|
||||
homeassistant.components.lifx.*
|
||||
homeassistant.components.light.*
|
||||
homeassistant.components.linkplay.*
|
||||
@@ -550,6 +555,7 @@ homeassistant.components.tcp.*
|
||||
homeassistant.components.technove.*
|
||||
homeassistant.components.tedee.*
|
||||
homeassistant.components.telegram_bot.*
|
||||
homeassistant.components.teleinfo.*
|
||||
homeassistant.components.teslemetry.*
|
||||
homeassistant.components.text.*
|
||||
homeassistant.components.thethingsnetwork.*
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
|
||||
This repository contains the core of Home Assistant, a Python 3 based home automation application.
|
||||
|
||||
## Code Review Guidelines
|
||||
## Git Commit Guidelines
|
||||
|
||||
**Git commit practices during review:**
|
||||
- **Do NOT amend, squash, or rebase commits after review has started** - Reviewers need to see what changed since their last review
|
||||
- **Do NOT amend, squash, or rebase commits that have already been pushed to the PR branch after the PR is opened** - Reviewers need to follow the commit history, as well as see what changed since their last review
|
||||
|
||||
## Development Commands
|
||||
|
||||
@@ -23,3 +22,6 @@ Prefer concrete types (for example, `HomeAssistant`, `MockConfigEntry`, etc.) ov
|
||||
## Good practices
|
||||
|
||||
Integrations with Platinum or Gold level in the Integration Quality Scale reflect a high standard of code quality and maintainability. When looking for examples of something, these are good places to start. The level is indicated in the manifest.json of the integration.
|
||||
|
||||
When reviewing entity actions, do not suggest extra defensive checks for input fields that are already validated by Home Assistant's service/action schemas and entity selection filters. Suggest additional guards only when data bypasses those validators or is transformed into a less-safe form.
|
||||
When validation guarantees a dict key exists, prefer direct key access (`data["key"]`) instead of `.get("key")` so contract violations are surfaced instead of silently masked.
|
||||
|
||||
Generated
+51
-18
@@ -37,6 +37,13 @@ build.json @home-assistant/supervisor
|
||||
# Other code
|
||||
/homeassistant/scripts/check_config.py @kellerza
|
||||
|
||||
# Agent Configurations
|
||||
AGENTS.md @home-assistant/core
|
||||
CLAUDE.md @home-assistant/core
|
||||
/.agent/ @home-assistant/core
|
||||
/.claude/ @home-assistant/core
|
||||
/.gemini/ @home-assistant/core
|
||||
|
||||
# Integrations
|
||||
/homeassistant/components/abode/ @shred86
|
||||
/tests/components/abode/ @shred86
|
||||
@@ -355,6 +362,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/deluge/ @tkdrob
|
||||
/homeassistant/components/demo/ @home-assistant/core
|
||||
/tests/components/demo/ @home-assistant/core
|
||||
/homeassistant/components/denon_rs232/ @balloob
|
||||
/tests/components/denon_rs232/ @balloob
|
||||
/homeassistant/components/denonavr/ @ol-iver @starkillerOG
|
||||
/tests/components/denonavr/ @ol-iver @starkillerOG
|
||||
/homeassistant/components/derivative/ @afaucogney @karwosts
|
||||
@@ -391,6 +400,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dnsip/ @gjohansson-ST
|
||||
/homeassistant/components/door/ @home-assistant/core
|
||||
/tests/components/door/ @home-assistant/core
|
||||
/homeassistant/components/doorbell/ @home-assistant/core
|
||||
/tests/components/doorbell/ @home-assistant/core
|
||||
/homeassistant/components/doorbird/ @oblogic7 @bdraco @flacjacket
|
||||
/tests/components/doorbird/ @oblogic7 @bdraco @flacjacket
|
||||
/homeassistant/components/dormakaba_dkey/ @emontnemery
|
||||
@@ -401,6 +412,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dremel_3d_printer/ @tkdrob
|
||||
/homeassistant/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/tests/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/homeassistant/components/dropbox/ @bdr99
|
||||
/tests/components/dropbox/ @bdr99
|
||||
/homeassistant/components/droplet/ @sarahseidman
|
||||
/tests/components/droplet/ @sarahseidman
|
||||
/homeassistant/components/dsmr/ @Robbie1221
|
||||
@@ -409,6 +422,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
/homeassistant/components/duckdns/ @tr4nt0r
|
||||
/tests/components/duckdns/ @tr4nt0r
|
||||
/homeassistant/components/duco/ @ronaldvdmeer
|
||||
/tests/components/duco/ @ronaldvdmeer
|
||||
/homeassistant/components/duotecno/ @cereal2nd
|
||||
/tests/components/duotecno/ @cereal2nd
|
||||
/homeassistant/components/dwd_weather_warnings/ @runningman84 @stephan192
|
||||
@@ -417,6 +432,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dynalite/ @ziv1234
|
||||
/homeassistant/components/eafm/ @Jc2k
|
||||
/tests/components/eafm/ @Jc2k
|
||||
/homeassistant/components/earn_e_p1/ @Miggets7
|
||||
/tests/components/earn_e_p1/ @Miggets7
|
||||
/homeassistant/components/easyenergy/ @klaasnicolaas
|
||||
/tests/components/easyenergy/ @klaasnicolaas
|
||||
/homeassistant/components/ecoforest/ @pjanuario
|
||||
@@ -478,8 +495,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
||||
/tests/components/environment_canada/ @gwww @michaeldavie
|
||||
/homeassistant/components/ephember/ @ttroy50 @roberty99
|
||||
/homeassistant/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/tests/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/homeassistant/components/epic_games_store/ @Quentame
|
||||
/tests/components/epic_games_store/ @Quentame
|
||||
/homeassistant/components/epion/ @lhgravendeel
|
||||
/tests/components/epion/ @lhgravendeel
|
||||
/homeassistant/components/epson/ @pszafer
|
||||
@@ -494,6 +511,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/essent/ @jaapp
|
||||
/homeassistant/components/eufylife_ble/ @bdr99
|
||||
/tests/components/eufylife_ble/ @bdr99
|
||||
/homeassistant/components/eurotronic_cometblue/ @rikroe
|
||||
/tests/components/eurotronic_cometblue/ @rikroe
|
||||
/homeassistant/components/event/ @home-assistant/core
|
||||
/tests/components/event/ @home-assistant/core
|
||||
/homeassistant/components/evohome/ @zxdavb
|
||||
@@ -553,8 +572,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/fortios/ @kimfrellsen
|
||||
/homeassistant/components/foscam/ @Foscam-wangzhengyu
|
||||
/tests/components/foscam/ @Foscam-wangzhengyu
|
||||
/homeassistant/components/freebox/ @hacf-fr @Quentame
|
||||
/tests/components/freebox/ @hacf-fr @Quentame
|
||||
/homeassistant/components/freebox/ @hacf-fr/reviewers @Quentame
|
||||
/tests/components/freebox/ @hacf-fr/reviewers @Quentame
|
||||
/homeassistant/components/freedompro/ @stefano055415
|
||||
/tests/components/freedompro/ @stefano055415
|
||||
/homeassistant/components/freshr/ @SierraNL
|
||||
@@ -575,6 +594,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/fujitsu_fglair/ @crevetor
|
||||
/homeassistant/components/fully_kiosk/ @cgarwood
|
||||
/tests/components/fully_kiosk/ @cgarwood
|
||||
/homeassistant/components/fumis/ @frenck
|
||||
/tests/components/fumis/ @frenck
|
||||
/homeassistant/components/fyta/ @dontinelli
|
||||
/tests/components/fyta/ @dontinelli
|
||||
/homeassistant/components/garage_door/ @home-assistant/core
|
||||
@@ -885,8 +906,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/jewish_calendar/ @tsvi
|
||||
/homeassistant/components/justnimbus/ @kvanzuijlen
|
||||
/tests/components/justnimbus/ @kvanzuijlen
|
||||
/homeassistant/components/jvc_projector/ @SteveEasley @msavazzi
|
||||
/tests/components/jvc_projector/ @SteveEasley @msavazzi
|
||||
/homeassistant/components/jvc_projector/ @SteveEasley
|
||||
/tests/components/jvc_projector/ @SteveEasley
|
||||
/homeassistant/components/kaiterra/ @Michsior14
|
||||
/homeassistant/components/kaleidescape/ @SteveEasley
|
||||
/tests/components/kaleidescape/ @SteveEasley
|
||||
@@ -899,6 +920,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/keyboard_remote/ @bendavid @lanrat
|
||||
/homeassistant/components/keymitt_ble/ @spycle
|
||||
/tests/components/keymitt_ble/ @spycle
|
||||
/homeassistant/components/kiosker/ @Claeysson
|
||||
/tests/components/kiosker/ @Claeysson
|
||||
/homeassistant/components/kitchen_sink/ @home-assistant/core
|
||||
/tests/components/kitchen_sink/ @home-assistant/core
|
||||
/homeassistant/components/kmtronic/ @dgomes
|
||||
@@ -1044,8 +1067,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/met/ @danielhiversen
|
||||
/homeassistant/components/met_eireann/ @DylanGore
|
||||
/tests/components/met_eireann/ @DylanGore
|
||||
/homeassistant/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
|
||||
/tests/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
|
||||
/homeassistant/components/meteo_france/ @hacf-fr/reviewers @oncleben31 @Quentame
|
||||
/tests/components/meteo_france/ @hacf-fr/reviewers @oncleben31 @Quentame
|
||||
/homeassistant/components/meteo_lt/ @xE1H
|
||||
/tests/components/meteo_lt/ @xE1H
|
||||
/homeassistant/components/meteoalarm/ @rolfberkenbosch
|
||||
@@ -1137,8 +1160,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/netatmo/ @cgtobi
|
||||
/tests/components/netatmo/ @cgtobi
|
||||
/homeassistant/components/netdata/ @fabaff
|
||||
/homeassistant/components/netgear/ @hacf-fr @Quentame @starkillerOG
|
||||
/tests/components/netgear/ @hacf-fr @Quentame @starkillerOG
|
||||
/homeassistant/components/netgear/ @Quentame @starkillerOG
|
||||
/tests/components/netgear/ @Quentame @starkillerOG
|
||||
/homeassistant/components/netgear_lte/ @tkdrob
|
||||
/tests/components/netgear_lte/ @tkdrob
|
||||
/homeassistant/components/network/ @home-assistant/core
|
||||
@@ -1232,6 +1255,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/open_meteo/ @frenck
|
||||
/homeassistant/components/open_router/ @joostlek @ab3lson
|
||||
/tests/components/open_router/ @joostlek @ab3lson
|
||||
/homeassistant/components/openai_conversation/ @Shulyaka
|
||||
/tests/components/openai_conversation/ @Shulyaka
|
||||
/homeassistant/components/opendisplay/ @g4bri3lDev
|
||||
/tests/components/opendisplay/ @g4bri3lDev
|
||||
/homeassistant/components/openerz/ @misialq
|
||||
@@ -1254,8 +1279,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/openuv/ @bachya
|
||||
/homeassistant/components/openweathermap/ @fabaff @freekode @nzapponi @wittypluck
|
||||
/tests/components/openweathermap/ @fabaff @freekode @nzapponi @wittypluck
|
||||
/homeassistant/components/opnsense/ @mtreinish
|
||||
/tests/components/opnsense/ @mtreinish
|
||||
/homeassistant/components/opnsense/ @HarlemSquirrel @Snuffy2
|
||||
/tests/components/opnsense/ @HarlemSquirrel @Snuffy2
|
||||
/homeassistant/components/opower/ @tronikos
|
||||
/tests/components/opower/ @tronikos
|
||||
/homeassistant/components/oralb/ @bdraco @Lash-L
|
||||
@@ -1299,6 +1324,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/pi_hole/ @shenxn
|
||||
/homeassistant/components/picnic/ @corneyl @codesalatdev
|
||||
/tests/components/picnic/ @corneyl @codesalatdev
|
||||
/homeassistant/components/picotts/ @rooggiieerr
|
||||
/tests/components/picotts/ @rooggiieerr
|
||||
/homeassistant/components/ping/ @jpbede
|
||||
/tests/components/ping/ @jpbede
|
||||
/homeassistant/components/plaato/ @JohNan
|
||||
@@ -1679,8 +1706,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/syncthing/ @zhulik
|
||||
/homeassistant/components/syncthru/ @nielstron
|
||||
/tests/components/syncthru/ @nielstron
|
||||
/homeassistant/components/synology_dsm/ @hacf-fr @Quentame @mib1185
|
||||
/tests/components/synology_dsm/ @hacf-fr @Quentame @mib1185
|
||||
/homeassistant/components/synology_dsm/ @Quentame @mib1185
|
||||
/tests/components/synology_dsm/ @Quentame @mib1185
|
||||
/homeassistant/components/synology_srm/ @aerialls
|
||||
/homeassistant/components/system_bridge/ @timmo001
|
||||
/tests/components/system_bridge/ @timmo001
|
||||
@@ -1711,6 +1738,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/tedee/ @patrickhilker @zweckj
|
||||
/homeassistant/components/telegram_bot/ @hanwg
|
||||
/tests/components/telegram_bot/ @hanwg
|
||||
/homeassistant/components/teleinfo/ @esciara
|
||||
/tests/components/teleinfo/ @esciara
|
||||
/homeassistant/components/tellduslive/ @fredrike
|
||||
/tests/components/tellduslive/ @fredrike
|
||||
/homeassistant/components/teltonika/ @karlbeecken
|
||||
@@ -1813,6 +1842,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/unifi_access/ @imhotep @RaHehl
|
||||
/tests/components/unifi_access/ @imhotep @RaHehl
|
||||
/homeassistant/components/unifi_direct/ @tofuSCHNITZEL
|
||||
/homeassistant/components/unifi_discovery/ @RaHehl
|
||||
/tests/components/unifi_discovery/ @RaHehl
|
||||
/homeassistant/components/unifiled/ @florisvdk
|
||||
/homeassistant/components/unifiprotect/ @RaHehl
|
||||
/tests/components/unifiprotect/ @RaHehl
|
||||
@@ -1860,10 +1891,12 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/version/ @ludeeus
|
||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||
/homeassistant/components/vicare/ @CFenner
|
||||
/tests/components/vicare/ @CFenner
|
||||
/homeassistant/components/vicare/ @CFenner @lackas
|
||||
/tests/components/vicare/ @CFenner @lackas
|
||||
/homeassistant/components/victron_ble/ @rajlaud
|
||||
/tests/components/victron_ble/ @rajlaud
|
||||
/homeassistant/components/victron_gx/ @tomer-w
|
||||
/tests/components/victron_gx/ @tomer-w
|
||||
/homeassistant/components/victron_remote_monitoring/ @AndyTempel
|
||||
/tests/components/victron_remote_monitoring/ @AndyTempel
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
@@ -1962,8 +1995,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/wsdot/ @ucodery
|
||||
/homeassistant/components/wyoming/ @synesthesiam
|
||||
/tests/components/wyoming/ @synesthesiam
|
||||
/homeassistant/components/xbox/ @hunterjm @tr4nt0r
|
||||
/tests/components/xbox/ @hunterjm @tr4nt0r
|
||||
/homeassistant/components/xbox/ @tr4nt0r
|
||||
/tests/components/xbox/ @tr4nt0r
|
||||
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
/tests/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
/homeassistant/components/xiaomi_ble/ @Jc2k @Ernst79
|
||||
|
||||
Generated
+9
-11
@@ -1,3 +1,4 @@
|
||||
# syntax=docker/dockerfile@sha256:2780b5c3bab67f1f76c781860de469442999ed1a0d7992a5efdf2cffc0e3d769
|
||||
# Automatically generated by hassfest.
|
||||
#
|
||||
# To update, run python3 -m script.hassfest -p docker
|
||||
@@ -19,25 +20,22 @@ ENV \
|
||||
UV_SYSTEM_PYTHON=true \
|
||||
UV_NO_CACHE=true
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
# Home Assistant S6-Overlay
|
||||
COPY rootfs /
|
||||
|
||||
# Add go2rtc binary
|
||||
COPY --from=ghcr.io/alexxit/go2rtc@sha256:675c318b23c06fd862a61d262240c9a63436b4050d177ffc68a32710d9e05bae /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
## Setup Home Assistant Core dependencies
|
||||
COPY --parents requirements.txt homeassistant/package_constraints.txt homeassistant/
|
||||
RUN \
|
||||
# Verify go2rtc can be executed
|
||||
go2rtc --version \
|
||||
# Install uv
|
||||
&& pip3 install uv==0.11.1
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
## Setup Home Assistant Core dependencies
|
||||
COPY requirements.txt homeassistant/
|
||||
COPY homeassistant/package_constraints.txt homeassistant/homeassistant/
|
||||
RUN \
|
||||
uv pip install \
|
||||
# Install uv at the version pinned in the requirements file
|
||||
&& pip3 install --no-cache-dir "uv==$(awk -F'==' '/^uv==/{print $2}' homeassistant/requirements.txt)" \
|
||||
&& uv pip install \
|
||||
--no-build \
|
||||
-r homeassistant/requirements.txt
|
||||
|
||||
@@ -51,7 +49,7 @@ RUN \
|
||||
-r homeassistant/requirements_all.txt
|
||||
|
||||
## Setup Home Assistant Core
|
||||
COPY . homeassistant/
|
||||
COPY --parents LICENSE* README* homeassistant/ pyproject.toml homeassistant/
|
||||
RUN \
|
||||
uv pip install \
|
||||
-e ./homeassistant \
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# syntax=docker/dockerfile@sha256:2780b5c3bab67f1f76c781860de469442999ed1a0d7992a5efdf2cffc0e3d769
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/base:debian
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
@@ -7,23 +7,31 @@ to speed up the process.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Container, Iterable, Sequence
|
||||
from datetime import timedelta
|
||||
from functools import lru_cache, partial
|
||||
from typing import Any
|
||||
from functools import lru_cache
|
||||
from typing import Any, override
|
||||
|
||||
from jwt import DecodeError, PyJWS, PyJWT
|
||||
from jwt import DecodeError, PyJWK, PyJWS, PyJWT
|
||||
from jwt.algorithms import AllowedPublicKeys
|
||||
from jwt.types import Options
|
||||
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
JWT_TOKEN_CACHE_SIZE = 16
|
||||
MAX_TOKEN_SIZE = 8192
|
||||
|
||||
_VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss", "sub", "jti")
|
||||
|
||||
_VERIFY_OPTIONS: dict[str, Any] = {f"verify_{key}": True for key in _VERIFY_KEYS} | {
|
||||
"require": []
|
||||
}
|
||||
_NO_VERIFY_OPTIONS = {f"verify_{key}": False for key in _VERIFY_KEYS}
|
||||
_NO_VERIFY_OPTIONS = Options(
|
||||
verify_signature=False,
|
||||
verify_exp=False,
|
||||
verify_nbf=False,
|
||||
verify_iat=False,
|
||||
verify_aud=False,
|
||||
verify_iss=False,
|
||||
verify_sub=False,
|
||||
verify_jti=False,
|
||||
require=[],
|
||||
)
|
||||
|
||||
|
||||
class _PyJWSWithLoadCache(PyJWS):
|
||||
@@ -38,9 +46,6 @@ class _PyJWSWithLoadCache(PyJWS):
|
||||
return super()._load(jwt)
|
||||
|
||||
|
||||
_jws = _PyJWSWithLoadCache()
|
||||
|
||||
|
||||
@lru_cache(maxsize=JWT_TOKEN_CACHE_SIZE)
|
||||
def _decode_payload(json_payload: str) -> dict[str, Any]:
|
||||
"""Decode the payload from a JWS dictionary."""
|
||||
@@ -56,21 +61,12 @@ def _decode_payload(json_payload: str) -> dict[str, Any]:
|
||||
class _PyJWTWithVerify(PyJWT):
|
||||
"""PyJWT with a fast decode implementation."""
|
||||
|
||||
def decode_payload(
|
||||
self, jwt: str, key: str, options: dict[str, Any], algorithms: list[str]
|
||||
) -> dict[str, Any]:
|
||||
"""Decode a JWT's payload."""
|
||||
if len(jwt) > MAX_TOKEN_SIZE:
|
||||
# Avoid caching impossible tokens
|
||||
raise DecodeError("Token too large")
|
||||
return _decode_payload(
|
||||
_jws.decode_complete(
|
||||
jwt=jwt,
|
||||
key=key,
|
||||
algorithms=algorithms,
|
||||
options=options,
|
||||
)["payload"]
|
||||
)
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the PyJWT instance."""
|
||||
# We require exp and iat claims to be present
|
||||
super().__init__(Options(require=["exp", "iat"]))
|
||||
# Override the _jws instance with our cached version
|
||||
self._jws = _PyJWSWithLoadCache()
|
||||
|
||||
def verify_and_decode(
|
||||
self,
|
||||
@@ -79,37 +75,70 @@ class _PyJWTWithVerify(PyJWT):
|
||||
algorithms: list[str],
|
||||
issuer: str | None = None,
|
||||
leeway: float | timedelta = 0,
|
||||
options: dict[str, Any] | None = None,
|
||||
options: Options | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Verify a JWT's signature and claims."""
|
||||
merged_options = {**_VERIFY_OPTIONS, **(options or {})}
|
||||
payload = self.decode_payload(
|
||||
return self.decode(
|
||||
jwt=jwt,
|
||||
key=key,
|
||||
options=merged_options,
|
||||
algorithms=algorithms,
|
||||
)
|
||||
# These should never be missing since we verify them
|
||||
# but this is an additional safeguard to make sure
|
||||
# nothing slips through.
|
||||
assert "exp" in payload, "exp claim is required"
|
||||
assert "iat" in payload, "iat claim is required"
|
||||
self._validate_claims(
|
||||
payload=payload,
|
||||
options=merged_options,
|
||||
issuer=issuer,
|
||||
leeway=leeway,
|
||||
options=options,
|
||||
)
|
||||
return payload
|
||||
|
||||
@override
|
||||
def decode(
|
||||
self,
|
||||
jwt: str | bytes,
|
||||
key: AllowedPublicKeys | PyJWK | str | bytes = "",
|
||||
algorithms: Sequence[str] | None = None,
|
||||
options: Options | None = None,
|
||||
verify: bool | None = None,
|
||||
detached_payload: bytes | None = None,
|
||||
audience: str | Iterable[str] | None = None,
|
||||
subject: str | None = None,
|
||||
issuer: str | Container[str] | None = None,
|
||||
leeway: float | timedelta = 0,
|
||||
**kwargs: Any,
|
||||
) -> dict[str, Any]:
|
||||
"""Decode a JWT, verifying the signature and claims."""
|
||||
if len(jwt) > MAX_TOKEN_SIZE:
|
||||
# Avoid caching impossible tokens
|
||||
raise DecodeError("Token too large")
|
||||
return super().decode(
|
||||
jwt=jwt,
|
||||
key=key,
|
||||
algorithms=algorithms,
|
||||
options=options,
|
||||
verify=verify,
|
||||
detached_payload=detached_payload,
|
||||
audience=audience,
|
||||
subject=subject,
|
||||
issuer=issuer,
|
||||
leeway=leeway,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@override
|
||||
def _decode_payload(self, decoded: dict[str, Any]) -> dict[str, Any]:
|
||||
return _decode_payload(decoded["payload"])
|
||||
|
||||
|
||||
_jwt = _PyJWTWithVerify()
|
||||
verify_and_decode = _jwt.verify_and_decode
|
||||
unverified_hs256_token_decode = lru_cache(maxsize=JWT_TOKEN_CACHE_SIZE)(
|
||||
partial(
|
||||
_jwt.decode_payload, key="", algorithms=["HS256"], options=_NO_VERIFY_OPTIONS
|
||||
|
||||
|
||||
@lru_cache(maxsize=JWT_TOKEN_CACHE_SIZE)
|
||||
def unverified_hs256_token_decode(jwt: str) -> dict[str, Any]:
|
||||
"""Decode a JWT without verifying the signature."""
|
||||
return _jwt.decode(
|
||||
jwt=jwt,
|
||||
key="",
|
||||
algorithms=["HS256"],
|
||||
options=_NO_VERIFY_OPTIONS,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"unverified_hs256_token_decode",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"domain": "denon",
|
||||
"name": "Denon",
|
||||
"integrations": ["denon", "denonavr", "heos"]
|
||||
"integrations": ["denon", "denonavr", "denon_rs232", "heos"]
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"unifi",
|
||||
"unifi_access",
|
||||
"unifi_direct",
|
||||
"unifi_discovery",
|
||||
"unifiled",
|
||||
"unifiprotect"
|
||||
]
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"domain": "victron",
|
||||
"name": "Victron",
|
||||
"integrations": ["victron_ble", "victron_remote_monitoring"]
|
||||
"integrations": ["victron_gx", "victron_ble", "victron_remote_monitoring"]
|
||||
}
|
||||
|
||||
@@ -30,7 +30,7 @@ from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_POLLING, DOMAIN, DOMAIN_DATA, LOGGER
|
||||
from .const import CONF_POLLING, DOMAIN, LOGGER
|
||||
from .services import async_setup_services
|
||||
|
||||
ATTR_DEVICE_NAME = "device_name"
|
||||
@@ -67,13 +67,16 @@ class AbodeSystem:
|
||||
logout_listener: CALLBACK_TYPE | None = None
|
||||
|
||||
|
||||
type AbodeConfigEntry = ConfigEntry[AbodeSystem]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Abode component."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AbodeConfigEntry) -> bool:
|
||||
"""Set up Abode integration from a config entry."""
|
||||
username = entry.data[CONF_USERNAME]
|
||||
password = entry.data[CONF_PASSWORD]
|
||||
@@ -99,50 +102,54 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
except (AbodeException, ConnectTimeout, HTTPError) as ex:
|
||||
raise ConfigEntryNotReady(f"Unable to connect to Abode: {ex}") from ex
|
||||
|
||||
hass.data[DOMAIN_DATA] = AbodeSystem(abode, polling)
|
||||
entry.runtime_data = AbodeSystem(abode, polling)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
await setup_hass_events(hass)
|
||||
await hass.async_add_executor_job(setup_abode_events, hass)
|
||||
await setup_hass_events(hass, entry)
|
||||
await hass.async_add_executor_job(setup_abode_events, hass, entry)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
def _shutdown_client(abode: Abode) -> None:
|
||||
"""Shutdown client."""
|
||||
abode.events.stop()
|
||||
abode.logout()
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AbodeConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
await hass.async_add_executor_job(hass.data[DOMAIN_DATA].abode.events.stop)
|
||||
await hass.async_add_executor_job(hass.data[DOMAIN_DATA].abode.logout)
|
||||
await hass.async_add_executor_job(_shutdown_client, entry.runtime_data.abode)
|
||||
|
||||
if logout_listener := hass.data[DOMAIN_DATA].logout_listener:
|
||||
if logout_listener := entry.runtime_data.logout_listener:
|
||||
logout_listener()
|
||||
hass.data.pop(DOMAIN_DATA)
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def setup_hass_events(hass: HomeAssistant) -> None:
|
||||
async def setup_hass_events(hass: HomeAssistant, entry: AbodeConfigEntry) -> None:
|
||||
"""Home Assistant start and stop callbacks."""
|
||||
|
||||
def logout(event: Event) -> None:
|
||||
"""Logout of Abode."""
|
||||
if not hass.data[DOMAIN_DATA].polling:
|
||||
hass.data[DOMAIN_DATA].abode.events.stop()
|
||||
if not entry.runtime_data.polling:
|
||||
entry.runtime_data.abode.events.stop()
|
||||
|
||||
hass.data[DOMAIN_DATA].abode.logout()
|
||||
entry.runtime_data.abode.logout()
|
||||
LOGGER.info("Logged out of Abode")
|
||||
|
||||
if not hass.data[DOMAIN_DATA].polling:
|
||||
await hass.async_add_executor_job(hass.data[DOMAIN_DATA].abode.events.start)
|
||||
if not entry.runtime_data.polling:
|
||||
await hass.async_add_executor_job(entry.runtime_data.abode.events.start)
|
||||
|
||||
hass.data[DOMAIN_DATA].logout_listener = hass.bus.async_listen_once(
|
||||
entry.runtime_data.logout_listener = hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP, logout
|
||||
)
|
||||
|
||||
|
||||
def setup_abode_events(hass: HomeAssistant) -> None:
|
||||
def setup_abode_events(hass: HomeAssistant, entry: AbodeConfigEntry) -> None:
|
||||
"""Event callbacks."""
|
||||
|
||||
def event_callback(event: str, event_json: dict[str, str]) -> None:
|
||||
@@ -179,6 +186,6 @@ def setup_abode_events(hass: HomeAssistant) -> None:
|
||||
]
|
||||
|
||||
for event in events:
|
||||
hass.data[DOMAIN_DATA].abode.events.add_event_callback(
|
||||
entry.runtime_data.abode.events.add_event_callback(
|
||||
event, partial(event_callback, event)
|
||||
)
|
||||
|
||||
@@ -9,21 +9,20 @@ from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelEntityFeature,
|
||||
AlarmControlPanelState,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN_DATA
|
||||
from . import AbodeConfigEntry
|
||||
from .entity import AbodeDevice
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: AbodeConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Abode alarm control panel device."""
|
||||
data = hass.data[DOMAIN_DATA]
|
||||
data = entry.runtime_data
|
||||
async_add_entities(
|
||||
[AbodeAlarm(data, await hass.async_add_executor_job(data.abode.get_alarm))]
|
||||
)
|
||||
|
||||
@@ -10,22 +10,21 @@ from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
|
||||
from .const import DOMAIN_DATA
|
||||
from . import AbodeConfigEntry
|
||||
from .entity import AbodeDevice
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: AbodeConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Abode binary sensor devices."""
|
||||
data = hass.data[DOMAIN_DATA]
|
||||
data = entry.runtime_data
|
||||
|
||||
device_types = [
|
||||
"connectivity",
|
||||
|
||||
@@ -12,14 +12,13 @@ import requests
|
||||
from requests.models import Response
|
||||
|
||||
from homeassistant.components.camera import Camera
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from . import AbodeSystem
|
||||
from .const import DOMAIN_DATA, LOGGER
|
||||
from . import AbodeConfigEntry, AbodeSystem
|
||||
from .const import LOGGER
|
||||
from .entity import AbodeDevice
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=90)
|
||||
@@ -27,11 +26,11 @@ MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=90)
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: AbodeConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Abode camera devices."""
|
||||
data = hass.data[DOMAIN_DATA]
|
||||
data = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AbodeCamera(data, device, timeline.CAPTURE_IMAGE)
|
||||
|
||||
@@ -3,17 +3,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import AbodeSystem
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
DOMAIN = "abode"
|
||||
DOMAIN_DATA: HassKey[AbodeSystem] = HassKey(DOMAIN)
|
||||
ATTRIBUTION = "Data provided by goabode.com"
|
||||
|
||||
CONF_POLLING = "polling"
|
||||
|
||||
@@ -5,21 +5,20 @@ from typing import Any
|
||||
from jaraco.abode.devices.cover import Cover
|
||||
|
||||
from homeassistant.components.cover import CoverEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN_DATA
|
||||
from . import AbodeConfigEntry
|
||||
from .entity import AbodeDevice
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: AbodeConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Abode cover devices."""
|
||||
data = hass.data[DOMAIN_DATA]
|
||||
data = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AbodeCover(data, device)
|
||||
|
||||
@@ -7,7 +7,7 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from . import AbodeSystem
|
||||
from .const import ATTRIBUTION, DOMAIN, DOMAIN_DATA
|
||||
from .const import ATTRIBUTION, DOMAIN
|
||||
|
||||
|
||||
class AbodeEntity(Entity):
|
||||
@@ -29,7 +29,7 @@ class AbodeEntity(Entity):
|
||||
self._update_connection_status,
|
||||
)
|
||||
|
||||
self.hass.data[DOMAIN_DATA].entity_ids.add(self.entity_id)
|
||||
self._data.entity_ids.add(self.entity_id)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Unsubscribe from Abode connection status updates."""
|
||||
|
||||
@@ -16,21 +16,20 @@ from homeassistant.components.light import (
|
||||
ColorMode,
|
||||
LightEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN_DATA
|
||||
from . import AbodeConfigEntry
|
||||
from .entity import AbodeDevice
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: AbodeConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Abode light devices."""
|
||||
data = hass.data[DOMAIN_DATA]
|
||||
data = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AbodeLight(data, device)
|
||||
|
||||
@@ -5,21 +5,20 @@ from typing import Any
|
||||
from jaraco.abode.devices.lock import Lock
|
||||
|
||||
from homeassistant.components.lock import LockEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN_DATA
|
||||
from . import AbodeConfigEntry
|
||||
from .entity import AbodeDevice
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: AbodeConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Abode lock devices."""
|
||||
data = hass.data[DOMAIN_DATA]
|
||||
data = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AbodeLock(data, device)
|
||||
|
||||
@@ -14,13 +14,11 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import LIGHT_LUX, PERCENTAGE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AbodeSystem
|
||||
from .const import DOMAIN_DATA
|
||||
from . import AbodeConfigEntry, AbodeSystem
|
||||
from .entity import AbodeDevice
|
||||
|
||||
ABODE_TEMPERATURE_UNIT_HA_UNIT = {
|
||||
@@ -66,11 +64,11 @@ SENSOR_TYPES: tuple[AbodeSensorDescription, ...] = (
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: AbodeConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Abode sensor devices."""
|
||||
data = hass.data[DOMAIN_DATA]
|
||||
data = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AbodeSensor(data, device, description)
|
||||
|
||||
@@ -2,15 +2,21 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from jaraco.abode.exceptions import Exception as AbodeException
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
|
||||
from .const import DOMAIN, DOMAIN_DATA, LOGGER
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import AbodeConfigEntry, AbodeSystem
|
||||
|
||||
ATTR_SETTING = "setting"
|
||||
ATTR_VALUE = "value"
|
||||
@@ -25,13 +31,21 @@ CAPTURE_IMAGE_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
|
||||
AUTOMATION_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
|
||||
|
||||
|
||||
def _get_abode_system(hass: HomeAssistant) -> AbodeSystem:
|
||||
"""Return the Abode system for the loaded config entry."""
|
||||
entries: list[AbodeConfigEntry] = hass.config_entries.async_loaded_entries(DOMAIN)
|
||||
if not entries:
|
||||
raise ServiceValidationError("Abode integration is not loaded")
|
||||
return entries[0].runtime_data
|
||||
|
||||
|
||||
def _change_setting(call: ServiceCall) -> None:
|
||||
"""Change an Abode system setting."""
|
||||
setting = call.data[ATTR_SETTING]
|
||||
value = call.data[ATTR_VALUE]
|
||||
|
||||
try:
|
||||
call.hass.data[DOMAIN_DATA].abode.set_setting(setting, value)
|
||||
_get_abode_system(call.hass).abode.set_setting(setting, value)
|
||||
except AbodeException as ex:
|
||||
LOGGER.warning(ex)
|
||||
|
||||
@@ -42,7 +56,7 @@ def _capture_image(call: ServiceCall) -> None:
|
||||
|
||||
target_entities = [
|
||||
entity_id
|
||||
for entity_id in call.hass.data[DOMAIN_DATA].entity_ids
|
||||
for entity_id in _get_abode_system(call.hass).entity_ids
|
||||
if entity_id in entity_ids
|
||||
]
|
||||
|
||||
@@ -57,7 +71,7 @@ def _trigger_automation(call: ServiceCall) -> None:
|
||||
|
||||
target_entities = [
|
||||
entity_id
|
||||
for entity_id in call.hass.data[DOMAIN_DATA].entity_ids
|
||||
for entity_id in _get_abode_system(call.hass).entity_ids
|
||||
if entity_id in entity_ids
|
||||
]
|
||||
|
||||
|
||||
@@ -7,12 +7,11 @@ from typing import Any, cast
|
||||
from jaraco.abode.devices.switch import Switch
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN_DATA
|
||||
from . import AbodeConfigEntry
|
||||
from .entity import AbodeAutomation, AbodeDevice
|
||||
|
||||
DEVICE_TYPES = ["switch", "valve"]
|
||||
@@ -20,11 +19,11 @@ DEVICE_TYPES = ["switch", "valve"]
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: AbodeConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Abode switch devices."""
|
||||
data = hass.data[DOMAIN_DATA]
|
||||
data = entry.runtime_data
|
||||
|
||||
entities: list[SwitchEntity] = [
|
||||
AbodeSwitch(data, device)
|
||||
|
||||
@@ -6,10 +6,11 @@ from typing import Final
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
|
||||
CONF_READ_TIMEOUT: Final = "timeout"
|
||||
CONF_WRITE_TIMEOUT: Final = "write_timeout"
|
||||
|
||||
DEFAULT_NAME: Final = "Acer Projector"
|
||||
DEFAULT_TIMEOUT: Final = 1
|
||||
DEFAULT_READ_TIMEOUT: Final = 1
|
||||
DEFAULT_WRITE_TIMEOUT: Final = 1
|
||||
|
||||
ECO_MODE: Final = "ECO Mode"
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/acer_projector",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pyserial==3.5"]
|
||||
"requirements": ["serialx==1.4.1"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ import logging
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
import serial
|
||||
from serialx import Serial, SerialException
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
@@ -16,21 +16,22 @@ from homeassistant.components.switch import (
|
||||
from homeassistant.const import (
|
||||
CONF_FILENAME,
|
||||
CONF_NAME,
|
||||
CONF_TIMEOUT,
|
||||
STATE_OFF,
|
||||
STATE_ON,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import (
|
||||
CMD_DICT,
|
||||
CONF_READ_TIMEOUT,
|
||||
CONF_WRITE_TIMEOUT,
|
||||
DEFAULT_NAME,
|
||||
DEFAULT_TIMEOUT,
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
DEFAULT_WRITE_TIMEOUT,
|
||||
ECO_MODE,
|
||||
ICON,
|
||||
@@ -45,7 +46,7 @@ PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_FILENAME): cv.isdevice,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
|
||||
vol.Optional(CONF_READ_TIMEOUT, default=DEFAULT_READ_TIMEOUT): cv.positive_int,
|
||||
vol.Optional(
|
||||
CONF_WRITE_TIMEOUT, default=DEFAULT_WRITE_TIMEOUT
|
||||
): cv.positive_int,
|
||||
@@ -62,10 +63,10 @@ def setup_platform(
|
||||
"""Connect with serial port and return Acer Projector."""
|
||||
serial_port = config[CONF_FILENAME]
|
||||
name = config[CONF_NAME]
|
||||
timeout = config[CONF_TIMEOUT]
|
||||
read_timeout = config[CONF_READ_TIMEOUT]
|
||||
write_timeout = config[CONF_WRITE_TIMEOUT]
|
||||
|
||||
add_entities([AcerSwitch(serial_port, name, timeout, write_timeout)], True)
|
||||
add_entities([AcerSwitch(serial_port, name, read_timeout, write_timeout)], True)
|
||||
|
||||
|
||||
class AcerSwitch(SwitchEntity):
|
||||
@@ -77,14 +78,14 @@ class AcerSwitch(SwitchEntity):
|
||||
self,
|
||||
serial_port: str,
|
||||
name: str,
|
||||
timeout: int,
|
||||
read_timeout: int,
|
||||
write_timeout: int,
|
||||
) -> None:
|
||||
"""Init of the Acer projector."""
|
||||
self.serial = serial.Serial(
|
||||
port=serial_port, timeout=timeout, write_timeout=write_timeout
|
||||
)
|
||||
self._serial_port = serial_port
|
||||
self._read_timeout = read_timeout
|
||||
self._write_timeout = write_timeout
|
||||
|
||||
self._attr_name = name
|
||||
self._attributes = {
|
||||
LAMP_HOURS: STATE_UNKNOWN,
|
||||
@@ -94,22 +95,26 @@ class AcerSwitch(SwitchEntity):
|
||||
|
||||
def _write_read(self, msg: str) -> str:
|
||||
"""Write to the projector and read the return."""
|
||||
ret = ""
|
||||
|
||||
# Sometimes the projector won't answer for no reason or the projector
|
||||
# was disconnected during runtime.
|
||||
# This way the projector can be reconnected and will still work
|
||||
try:
|
||||
if not self.serial.is_open:
|
||||
self.serial.open()
|
||||
self.serial.write(msg.encode("utf-8"))
|
||||
# Size is an experience value there is no real limit.
|
||||
# AFAIK there is no limit and no end character so we will usually
|
||||
# need to wait for timeout
|
||||
ret = self.serial.read_until(size=20).decode("utf-8")
|
||||
except serial.SerialException:
|
||||
_LOGGER.error("Problem communicating with %s", self._serial_port)
|
||||
self.serial.close()
|
||||
return ret
|
||||
with Serial.from_url(
|
||||
self._serial_port,
|
||||
read_timeout=self._read_timeout,
|
||||
write_timeout=self._write_timeout,
|
||||
) as serial:
|
||||
serial.write(msg.encode("utf-8"))
|
||||
|
||||
# Size is an experience value there is no real limit.
|
||||
# AFAIK there is no limit and no end character so we will usually
|
||||
# need to wait for timeout
|
||||
return serial.read_until(size=20).decode("utf-8")
|
||||
except (OSError, SerialException, TimeoutError) as exc:
|
||||
raise HomeAssistantError(
|
||||
f"Problem communicating with {self._serial_port}"
|
||||
) from exc
|
||||
|
||||
def _write_read_format(self, msg: str) -> str:
|
||||
"""Write msg, obtain answer and format output."""
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
"""The Actron Air integration."""
|
||||
|
||||
from actron_neo_api import (
|
||||
ActronAirACSystem,
|
||||
ActronAirAPI,
|
||||
ActronAirAPIError,
|
||||
ActronAirAuthError,
|
||||
)
|
||||
from actron_neo_api import ActronAirAPI, ActronAirAPIError, ActronAirAuthError
|
||||
from actron_neo_api.models.system import ActronAirSystemInfo
|
||||
|
||||
from homeassistant.const import CONF_API_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -25,7 +21,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) ->
|
||||
"""Set up Actron Air integration from a config entry."""
|
||||
|
||||
api = ActronAirAPI(refresh_token=entry.data[CONF_API_TOKEN])
|
||||
systems: list[ActronAirACSystem] = []
|
||||
systems: list[ActronAirSystemInfo] = []
|
||||
|
||||
try:
|
||||
systems = await api.get_ac_systems()
|
||||
@@ -36,14 +32,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) ->
|
||||
translation_key="auth_error",
|
||||
) from err
|
||||
except ActronAirAPIError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="setup_connection_error",
|
||||
) from err
|
||||
|
||||
system_coordinators: dict[str, ActronAirSystemCoordinator] = {}
|
||||
for system in systems:
|
||||
coordinator = ActronAirSystemCoordinator(hass, entry, api, system)
|
||||
_LOGGER.debug("Setting up coordinator for system: %s", system["serial"])
|
||||
_LOGGER.debug("Setting up coordinator for system: %s", system.serial)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
system_coordinators[system["serial"]] = coordinator
|
||||
system_coordinators[system.serial] = coordinator
|
||||
|
||||
entry.runtime_data = ActronAirRuntimeData(
|
||||
api=api,
|
||||
|
||||
@@ -15,10 +15,12 @@ from homeassistant.components.climate import (
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator
|
||||
from .entity import ActronAirAcEntity, ActronAirZoneEntity, handle_actron_api_errors
|
||||
from .entity import ActronAirAcEntity, ActronAirZoneEntity, actron_air_command
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -136,23 +138,27 @@ class ActronSystemClimate(ActronAirAcEntity, ActronAirClimateEntity):
|
||||
"""Return the target temperature."""
|
||||
return self._status.user_aircon_settings.temperature_setpoint_cool_c
|
||||
|
||||
@handle_actron_api_errors
|
||||
@actron_air_command
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set a new fan mode."""
|
||||
api_fan_mode = FAN_MODE_MAPPING_HA_TO_ACTRONAIR.get(fan_mode)
|
||||
api_fan_mode = FAN_MODE_MAPPING_HA_TO_ACTRONAIR[fan_mode]
|
||||
await self._status.user_aircon_settings.set_fan_mode(api_fan_mode)
|
||||
|
||||
@handle_actron_api_errors
|
||||
@actron_air_command
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set the HVAC mode."""
|
||||
ac_mode = HVAC_MODE_MAPPING_HA_TO_ACTRONAIR.get(hvac_mode)
|
||||
ac_mode = HVAC_MODE_MAPPING_HA_TO_ACTRONAIR[hvac_mode]
|
||||
await self._status.ac_system.set_system_mode(ac_mode)
|
||||
|
||||
@handle_actron_api_errors
|
||||
@actron_air_command
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set the temperature."""
|
||||
temp = kwargs.get(ATTR_TEMPERATURE)
|
||||
await self._status.user_aircon_settings.set_temperature(temperature=temp)
|
||||
if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="temperature_missing",
|
||||
)
|
||||
await self._status.user_aircon_settings.set_temperature(temperature=temperature)
|
||||
|
||||
|
||||
class ActronZoneClimate(ActronAirZoneEntity, ActronAirClimateEntity):
|
||||
@@ -212,13 +218,18 @@ class ActronZoneClimate(ActronAirZoneEntity, ActronAirClimateEntity):
|
||||
"""Return the target temperature."""
|
||||
return self._zone.temperature_setpoint_cool_c
|
||||
|
||||
@handle_actron_api_errors
|
||||
@actron_air_command
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set the HVAC mode."""
|
||||
is_enabled = hvac_mode != HVACMode.OFF
|
||||
await self._zone.enable(is_enabled)
|
||||
|
||||
@handle_actron_api_errors
|
||||
@actron_air_command
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set the temperature."""
|
||||
await self._zone.set_temperature(temperature=kwargs.get(ATTR_TEMPERATURE))
|
||||
if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="temperature_missing",
|
||||
)
|
||||
await self._zone.set_temperature(temperature=temperature)
|
||||
|
||||
@@ -23,7 +23,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._user_code: str = ""
|
||||
self._verification_uri: str = ""
|
||||
self._expires_minutes: str = "30"
|
||||
self.login_task: asyncio.Task | None = None
|
||||
self.login_task: asyncio.Task[None] | None = None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -38,10 +38,10 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.error("OAuth2 flow failed: %s", err)
|
||||
return self.async_abort(reason="oauth2_error")
|
||||
|
||||
self._device_code = device_code_response["device_code"]
|
||||
self._user_code = device_code_response["user_code"]
|
||||
self._verification_uri = device_code_response["verification_uri_complete"]
|
||||
self._expires_minutes = str(device_code_response["expires_in"] // 60)
|
||||
self._device_code = device_code_response.device_code
|
||||
self._user_code = device_code_response.user_code
|
||||
self._verification_uri = device_code_response.verification_uri_complete
|
||||
self._expires_minutes = str(device_code_response.expires_in // 60)
|
||||
|
||||
async def _wait_for_authorization() -> None:
|
||||
"""Wait for the user to authorize the device."""
|
||||
@@ -94,7 +94,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.error("Error getting user info: %s", err)
|
||||
return self.async_abort(reason="oauth2_error")
|
||||
|
||||
unique_id = str(user_data["id"])
|
||||
unique_id = user_data.sub
|
||||
await self.async_set_unique_id(unique_id)
|
||||
|
||||
# Check if this is a reauth flow
|
||||
@@ -107,7 +107,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user_data["email"],
|
||||
title=user_data.email,
|
||||
data={CONF_API_TOKEN: self._api.refresh_token_value},
|
||||
)
|
||||
|
||||
|
||||
@@ -6,12 +6,12 @@ from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
|
||||
from actron_neo_api import (
|
||||
ActronAirACSystem,
|
||||
ActronAirAPI,
|
||||
ActronAirAPIError,
|
||||
ActronAirAuthError,
|
||||
ActronAirStatus,
|
||||
)
|
||||
from actron_neo_api.models.system import ActronAirSystemInfo
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -38,7 +38,7 @@ class ActronAirRuntimeData:
|
||||
type ActronAirConfigEntry = ConfigEntry[ActronAirRuntimeData]
|
||||
|
||||
|
||||
class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirACSystem]):
|
||||
class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirStatus]):
|
||||
"""System coordinator for Actron Air integration."""
|
||||
|
||||
def __init__(
|
||||
@@ -46,7 +46,7 @@ class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirACSystem]):
|
||||
hass: HomeAssistant,
|
||||
entry: ActronAirConfigEntry,
|
||||
api: ActronAirAPI,
|
||||
system: ActronAirACSystem,
|
||||
system: ActronAirSystemInfo,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
@@ -57,7 +57,7 @@ class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirACSystem]):
|
||||
config_entry=entry,
|
||||
)
|
||||
self.system = system
|
||||
self.serial_number = system["serial"]
|
||||
self.serial_number = system.serial
|
||||
self.api = api
|
||||
self.status = self.api.state_manager.get_status(self.serial_number)
|
||||
self.last_seen = dt_util.utcnow()
|
||||
@@ -78,7 +78,14 @@ class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirACSystem]):
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
|
||||
self.status = self.api.state_manager.get_status(self.serial_number)
|
||||
status = self.api.state_manager.get_status(self.serial_number)
|
||||
if status is None:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_error",
|
||||
translation_placeholders={"error": "Status not available"},
|
||||
)
|
||||
self.status = status
|
||||
self.last_seen = dt_util.utcnow()
|
||||
return self.status
|
||||
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
"""Diagnostics support for Actron Air."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_API_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import ActronAirConfigEntry
|
||||
|
||||
TO_REDACT = {CONF_API_TOKEN, "master_serial", "serial_number", "serial"}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
entry: ActronAirConfigEntry,
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinators: dict[int, Any] = {}
|
||||
for idx, coordinator in enumerate(entry.runtime_data.system_coordinators.values()):
|
||||
coordinators[idx] = {
|
||||
"system": async_redact_data(
|
||||
coordinator.system.model_dump(mode="json"), TO_REDACT
|
||||
),
|
||||
"status": async_redact_data(
|
||||
coordinator.data.model_dump(mode="json", exclude={"last_known_state"}),
|
||||
TO_REDACT,
|
||||
),
|
||||
}
|
||||
return {
|
||||
"entry_data": async_redact_data(entry.data, TO_REDACT),
|
||||
"coordinators": coordinators,
|
||||
}
|
||||
@@ -14,13 +14,17 @@ from .const import DOMAIN
|
||||
from .coordinator import ActronAirSystemCoordinator
|
||||
|
||||
|
||||
def handle_actron_api_errors[_EntityT: ActronAirEntity, **_P](
|
||||
def actron_air_command[_EntityT: ActronAirEntity, **_P](
|
||||
func: Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, Any]],
|
||||
) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]:
|
||||
"""Decorate Actron Air API calls to handle ActronAirAPIError exceptions."""
|
||||
"""Decorator for Actron Air API calls.
|
||||
|
||||
Handles ActronAirAPIError exceptions, and requests a coordinator update
|
||||
to update the status of the devices as soon as possible.
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
async def wrapper(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None:
|
||||
async def wrapper(self: _EntityT, /, *args: _P.args, **kwargs: _P.kwargs) -> None:
|
||||
"""Wrap API calls with exception handling."""
|
||||
try:
|
||||
await func(self, *args, **kwargs)
|
||||
@@ -30,6 +34,7 @@ def handle_actron_api_errors[_EntityT: ActronAirEntity, **_P](
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"error": str(err)},
|
||||
) from err
|
||||
self.coordinator.async_set_updated_data(self.coordinator.data)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
@@ -13,5 +13,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["actron-neo-api==0.4.1"]
|
||||
"requirements": ["actron-neo-api==0.5.3"]
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ rules:
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: This integration uses DHCP discovery, however is cloud polling. Therefore there is no information to update.
|
||||
@@ -54,18 +54,12 @@ rules:
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices: todo
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: This integration does not use entity categories.
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: This integration does not use entity device classes.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: Not required for this integration at this stage.
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
entity-category: done
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
@@ -75,4 +69,4 @@ rules:
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: todo
|
||||
strict-typing: todo
|
||||
strict-typing: done
|
||||
|
||||
@@ -55,6 +55,12 @@
|
||||
"auth_error": {
|
||||
"message": "Authentication failed, please reauthenticate"
|
||||
},
|
||||
"setup_connection_error": {
|
||||
"message": "Failed to connect to the Actron Air API"
|
||||
},
|
||||
"temperature_missing": {
|
||||
"message": "Provide a temperature value when adjusting the climate entity."
|
||||
},
|
||||
"update_error": {
|
||||
"message": "An error occurred while retrieving data from the Actron Air API: {error}"
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator
|
||||
from .entity import ActronAirAcEntity, handle_actron_api_errors
|
||||
from .entity import ActronAirAcEntity, actron_air_command
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -105,12 +105,12 @@ class ActronAirSwitch(ActronAirAcEntity, SwitchEntity):
|
||||
"""Return true if the switch is on."""
|
||||
return self.entity_description.is_on_fn(self.coordinator)
|
||||
|
||||
@handle_actron_api_errors
|
||||
@actron_air_command
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
await self.entity_description.set_fn(self.coordinator, True)
|
||||
|
||||
@handle_actron_api_errors
|
||||
@actron_air_command
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
await self.entity_description.set_fn(self.coordinator, False)
|
||||
|
||||
@@ -74,7 +74,8 @@ async def _resolve_attachments(
|
||||
resolved_attachments.append(
|
||||
conversation.Attachment(
|
||||
media_content_id=media_content_id,
|
||||
mime_type=image_data.content_type,
|
||||
mime_type=attachment.get("media_content_type")
|
||||
or image_data.content_type,
|
||||
path=temp_filename,
|
||||
)
|
||||
)
|
||||
@@ -89,7 +90,7 @@ async def _resolve_attachments(
|
||||
resolved_attachments.append(
|
||||
conversation.Attachment(
|
||||
media_content_id=media_content_id,
|
||||
mime_type=media.mime_type,
|
||||
mime_type=attachment.get("media_content_type") or media.mime_type,
|
||||
path=media.path,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -36,7 +36,9 @@ def _make_detected_condition(
|
||||
) -> type[Condition]:
|
||||
"""Create a detected condition for a binary sensor device class."""
|
||||
return make_entity_state_condition(
|
||||
{BINARY_SENSOR_DOMAIN: DomainSpec(device_class=device_class)}, STATE_ON
|
||||
{BINARY_SENSOR_DOMAIN: DomainSpec(device_class=device_class)},
|
||||
STATE_ON,
|
||||
support_duration=True,
|
||||
)
|
||||
|
||||
|
||||
@@ -45,7 +47,9 @@ def _make_cleared_condition(
|
||||
) -> type[Condition]:
|
||||
"""Create a cleared condition for a binary sensor device class."""
|
||||
return make_entity_state_condition(
|
||||
{BINARY_SENSOR_DOMAIN: DomainSpec(device_class=device_class)}, STATE_OFF
|
||||
{BINARY_SENSOR_DOMAIN: DomainSpec(device_class=device_class)},
|
||||
STATE_OFF,
|
||||
support_duration=True,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -249,6 +249,11 @@
|
||||
.condition_binary_common: &condition_binary_common
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for:
|
||||
required: true
|
||||
default: 00:00:00
|
||||
selector:
|
||||
duration:
|
||||
|
||||
is_gas_detected:
|
||||
<<: *condition_binary_common
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
{
|
||||
"common": {
|
||||
"condition_behavior_name": "Condition passes if",
|
||||
"condition_for_name": "For at least",
|
||||
"condition_threshold_name": "Threshold type",
|
||||
"trigger_behavior_name": "Trigger when",
|
||||
"trigger_for_name": "For at least",
|
||||
"trigger_threshold_name": "Threshold type"
|
||||
},
|
||||
"conditions": {
|
||||
@@ -23,6 +25,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Carbon monoxide cleared"
|
||||
@@ -32,6 +37,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Carbon monoxide detected"
|
||||
@@ -53,6 +61,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Gas cleared"
|
||||
@@ -62,6 +73,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Gas detected"
|
||||
@@ -167,6 +181,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Smoke cleared"
|
||||
@@ -176,6 +193,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Smoke detected"
|
||||
@@ -249,6 +269,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::trigger_threshold_name%]"
|
||||
}
|
||||
@@ -269,6 +292,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Carbon monoxide cleared"
|
||||
@@ -279,6 +305,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::trigger_threshold_name%]"
|
||||
}
|
||||
@@ -290,6 +319,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Carbon monoxide detected"
|
||||
@@ -299,6 +331,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Gas cleared"
|
||||
@@ -308,6 +343,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Gas detected"
|
||||
@@ -327,6 +365,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::trigger_threshold_name%]"
|
||||
}
|
||||
@@ -348,6 +389,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::trigger_threshold_name%]"
|
||||
}
|
||||
@@ -369,6 +413,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::trigger_threshold_name%]"
|
||||
}
|
||||
@@ -390,6 +437,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::trigger_threshold_name%]"
|
||||
}
|
||||
@@ -411,6 +461,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::trigger_threshold_name%]"
|
||||
}
|
||||
@@ -432,6 +485,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::trigger_threshold_name%]"
|
||||
}
|
||||
@@ -453,6 +509,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::trigger_threshold_name%]"
|
||||
}
|
||||
@@ -474,6 +533,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::trigger_threshold_name%]"
|
||||
}
|
||||
@@ -485,6 +547,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Smoke cleared"
|
||||
@@ -494,6 +559,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Smoke detected"
|
||||
@@ -513,6 +581,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::trigger_threshold_name%]"
|
||||
}
|
||||
@@ -534,6 +605,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::trigger_threshold_name%]"
|
||||
}
|
||||
@@ -555,6 +629,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::trigger_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::trigger_threshold_name%]"
|
||||
}
|
||||
|
||||
@@ -9,6 +9,11 @@
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
for: &trigger_for
|
||||
required: true
|
||||
default: 00:00:00
|
||||
selector:
|
||||
duration:
|
||||
|
||||
# --- Unit lists for multi-unit pollutants ---
|
||||
|
||||
@@ -163,6 +168,7 @@
|
||||
# Binary sensor detected/cleared trigger fields
|
||||
.trigger_binary_fields: &trigger_binary_fields
|
||||
behavior: *trigger_behavior
|
||||
for: *trigger_for
|
||||
|
||||
# --- Binary sensor targets ---
|
||||
|
||||
@@ -294,6 +300,7 @@ co_crossed_threshold:
|
||||
target: *target_co_sensor
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
for: *trigger_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -320,6 +327,7 @@ co2_crossed_threshold:
|
||||
target: *target_co2
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
for: *trigger_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -344,6 +352,7 @@ pm1_crossed_threshold:
|
||||
target: *target_pm1
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
for: *trigger_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -368,6 +377,7 @@ pm25_crossed_threshold:
|
||||
target: *target_pm25
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
for: *trigger_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -392,6 +402,7 @@ pm4_crossed_threshold:
|
||||
target: *target_pm4
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
for: *trigger_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -416,6 +427,7 @@ pm10_crossed_threshold:
|
||||
target: *target_pm10
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
for: *trigger_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -442,6 +454,7 @@ ozone_crossed_threshold:
|
||||
target: *target_ozone
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
for: *trigger_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -470,6 +483,7 @@ voc_crossed_threshold:
|
||||
target: *target_voc
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
for: *trigger_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -498,6 +512,7 @@ voc_ratio_crossed_threshold:
|
||||
target: *target_voc_ratio
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
for: *trigger_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -526,6 +541,7 @@ no_crossed_threshold:
|
||||
target: *target_no
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
for: *trigger_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -554,6 +570,7 @@ no2_crossed_threshold:
|
||||
target: *target_no2
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
for: *trigger_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -580,6 +597,7 @@ n2o_crossed_threshold:
|
||||
target: *target_n2o
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
for: *trigger_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -606,6 +624,7 @@ so2_crossed_threshold:
|
||||
target: *target_so2
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
for: *trigger_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
|
||||
@@ -33,14 +33,21 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS
|
||||
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||
from .coordinator import (
|
||||
AirOSConfigEntry,
|
||||
AirOSDataUpdateCoordinator,
|
||||
AirOSFirmwareUpdateCoordinator,
|
||||
AirOSRuntimeData,
|
||||
)
|
||||
|
||||
_PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.SENSOR,
|
||||
Platform.UPDATE,
|
||||
]
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -86,10 +93,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
|
||||
|
||||
airos_device = airos_class(**conn_data)
|
||||
|
||||
coordinator = AirOSDataUpdateCoordinator(hass, entry, device_data, airos_device)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
data_coordinator = AirOSDataUpdateCoordinator(
|
||||
hass, entry, device_data, airos_device
|
||||
)
|
||||
await data_coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
firmware_coordinator: AirOSFirmwareUpdateCoordinator | None = None
|
||||
if device_data["fw_major"] >= 8:
|
||||
firmware_coordinator = AirOSFirmwareUpdateCoordinator(hass, entry, airos_device)
|
||||
await firmware_coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = AirOSRuntimeData(
|
||||
status=data_coordinator,
|
||||
firmware=firmware_coordinator,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
|
||||
|
||||
|
||||
@@ -87,7 +87,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the AirOS binary sensors from a config entry."""
|
||||
coordinator = config_entry.runtime_data
|
||||
coordinator = config_entry.runtime_data.status
|
||||
|
||||
entities = [
|
||||
AirOSBinarySensor(coordinator, description)
|
||||
|
||||
@@ -31,7 +31,9 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the AirOS button from a config entry."""
|
||||
async_add_entities([AirOSRebootButton(config_entry.runtime_data, REBOOT_BUTTON)])
|
||||
async_add_entities(
|
||||
[AirOSRebootButton(config_entry.runtime_data.status, REBOOT_BUTTON)]
|
||||
)
|
||||
|
||||
|
||||
class AirOSRebootButton(AirOSEntity, ButtonEntity):
|
||||
|
||||
@@ -5,6 +5,7 @@ from datetime import timedelta
|
||||
DOMAIN = "airos"
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
UPDATE_SCAN_INTERVAL = timedelta(days=1)
|
||||
|
||||
MANUFACTURER = "Ubiquiti"
|
||||
|
||||
|
||||
@@ -2,7 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any, TypeVar
|
||||
|
||||
from airos.airos6 import AirOS6, AirOS6Data
|
||||
from airos.airos8 import AirOS8, AirOS8Data
|
||||
@@ -19,20 +22,61 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, SCAN_INTERVAL
|
||||
from .const import DOMAIN, SCAN_INTERVAL, UPDATE_SCAN_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
AirOSDeviceDetect = AirOS8 | AirOS6
|
||||
AirOSDataDetect = AirOS8Data | AirOS6Data
|
||||
type AirOSDeviceDetect = AirOS8 | AirOS6
|
||||
type AirOSDataDetect = AirOS8Data | AirOS6Data
|
||||
type AirOSUpdateData = dict[str, Any]
|
||||
|
||||
type AirOSConfigEntry = ConfigEntry[AirOSDataUpdateCoordinator]
|
||||
type AirOSConfigEntry = ConfigEntry[AirOSRuntimeData]
|
||||
|
||||
T = TypeVar("T", bound=AirOSDataDetect | AirOSUpdateData)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AirOSRuntimeData:
|
||||
"""Data for AirOS config entry."""
|
||||
|
||||
status: AirOSDataUpdateCoordinator
|
||||
firmware: AirOSFirmwareUpdateCoordinator | None
|
||||
|
||||
|
||||
async def async_fetch_airos_data(
|
||||
airos_device: AirOSDeviceDetect,
|
||||
update_method: Callable[[], Awaitable[T]],
|
||||
) -> T:
|
||||
"""Fetch data from AirOS device."""
|
||||
try:
|
||||
await airos_device.login()
|
||||
return await update_method()
|
||||
except AirOSConnectionAuthenticationError as err:
|
||||
_LOGGER.exception("Error authenticating with airOS device")
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN, translation_key="invalid_auth"
|
||||
) from err
|
||||
except (
|
||||
AirOSConnectionSetupError,
|
||||
AirOSDeviceConnectionError,
|
||||
TimeoutError,
|
||||
) as err:
|
||||
_LOGGER.error("Error connecting to airOS device: %s", err)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
) from err
|
||||
except AirOSDataMissingError as err:
|
||||
_LOGGER.error("Expected data not returned by airOS device: %s", err)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="error_data_missing",
|
||||
) from err
|
||||
|
||||
|
||||
class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOSDataDetect]):
|
||||
"""Class to manage fetching AirOS data from single endpoint."""
|
||||
"""Class to manage fetching AirOS status data from single endpoint."""
|
||||
|
||||
airos_device: AirOSDeviceDetect
|
||||
config_entry: AirOSConfigEntry
|
||||
|
||||
def __init__(
|
||||
@@ -54,28 +98,33 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOSDataDetect]):
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> AirOSDataDetect:
|
||||
"""Fetch data from AirOS."""
|
||||
try:
|
||||
await self.airos_device.login()
|
||||
return await self.airos_device.status()
|
||||
except AirOSConnectionAuthenticationError as err:
|
||||
_LOGGER.exception("Error authenticating with airOS device")
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN, translation_key="invalid_auth"
|
||||
) from err
|
||||
except (
|
||||
AirOSConnectionSetupError,
|
||||
AirOSDeviceConnectionError,
|
||||
TimeoutError,
|
||||
) as err:
|
||||
_LOGGER.error("Error connecting to airOS device: %s", err)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
) from err
|
||||
except AirOSDataMissingError as err:
|
||||
_LOGGER.error("Expected data not returned by airOS device: %s", err)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="error_data_missing",
|
||||
) from err
|
||||
"""Fetch status data from AirOS."""
|
||||
return await async_fetch_airos_data(self.airos_device, self.airos_device.status)
|
||||
|
||||
|
||||
class AirOSFirmwareUpdateCoordinator(DataUpdateCoordinator[AirOSUpdateData]):
|
||||
"""Class to manage fetching AirOS firmware."""
|
||||
|
||||
config_entry: AirOSConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AirOSConfigEntry,
|
||||
airos_device: AirOSDeviceDetect,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
self.airos_device = airos_device
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=UPDATE_SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> AirOSUpdateData:
|
||||
"""Fetch firmware data from AirOS."""
|
||||
return await async_fetch_airos_data(
|
||||
self.airos_device, self.airos_device.update_check
|
||||
)
|
||||
|
||||
@@ -29,5 +29,15 @@ async def async_get_config_entry_diagnostics(
|
||||
"""Return diagnostics for a config entry."""
|
||||
return {
|
||||
"entry_data": async_redact_data(entry.data, TO_REDACT_HA),
|
||||
"data": async_redact_data(entry.runtime_data.data.to_dict(), TO_REDACT_AIROS),
|
||||
"data": {
|
||||
"status_data": async_redact_data(
|
||||
entry.runtime_data.status.data.to_dict(), TO_REDACT_AIROS
|
||||
),
|
||||
"firmware_data": async_redact_data(
|
||||
entry.runtime_data.firmware.data
|
||||
if entry.runtime_data.firmware is not None
|
||||
else {},
|
||||
TO_REDACT_AIROS,
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
@@ -180,7 +180,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the AirOS sensors from a config entry."""
|
||||
coordinator = config_entry.runtime_data
|
||||
coordinator = config_entry.runtime_data.status
|
||||
|
||||
entities = [AirOSSensor(coordinator, description) for description in COMMON_SENSORS]
|
||||
|
||||
|
||||
@@ -206,6 +206,12 @@
|
||||
},
|
||||
"reboot_failed": {
|
||||
"message": "The device did not accept the reboot request. Try again, or check your device web interface for errors."
|
||||
},
|
||||
"update_connection_authentication_error": {
|
||||
"message": "Authentication or connection failed during firmware update"
|
||||
},
|
||||
"update_error": {
|
||||
"message": "Connection failed during firmware update"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,101 @@
|
||||
"""AirOS update component for Home Assistant."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from airos.exceptions import AirOSConnectionAuthenticationError, AirOSException
|
||||
|
||||
from homeassistant.components.update import (
|
||||
UpdateDeviceClass,
|
||||
UpdateEntity,
|
||||
UpdateEntityFeature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import (
|
||||
AirOSConfigEntry,
|
||||
AirOSDataUpdateCoordinator,
|
||||
AirOSFirmwareUpdateCoordinator,
|
||||
)
|
||||
from .entity import AirOSEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AirOSConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the AirOS update entity from a config entry."""
|
||||
runtime_data = config_entry.runtime_data
|
||||
|
||||
if runtime_data.firmware is None: # Unsupported device
|
||||
return
|
||||
async_add_entities([AirOSUpdateEntity(runtime_data.status, runtime_data.firmware)])
|
||||
|
||||
|
||||
class AirOSUpdateEntity(AirOSEntity, UpdateEntity):
|
||||
"""Update entity for AirOS firmware updates."""
|
||||
|
||||
_attr_device_class = UpdateDeviceClass.FIRMWARE
|
||||
_attr_supported_features = UpdateEntityFeature.INSTALL
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
status: AirOSDataUpdateCoordinator,
|
||||
firmware: AirOSFirmwareUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize the AirOS update entity."""
|
||||
super().__init__(status)
|
||||
self.status = status
|
||||
self.firmware = firmware
|
||||
|
||||
self._attr_unique_id = f"{status.data.derived.mac}_firmware_update"
|
||||
|
||||
@property
|
||||
def installed_version(self) -> str | None:
|
||||
"""Return the installed firmware version."""
|
||||
return self.status.data.host.fwversion
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str | None:
|
||||
"""Return the latest firmware version."""
|
||||
if not self.firmware.data.get("update", False):
|
||||
return self.status.data.host.fwversion
|
||||
return self.firmware.data.get("version")
|
||||
|
||||
@property
|
||||
def release_url(self) -> str | None:
|
||||
"""Return the release url of the latest firmware."""
|
||||
return self.firmware.data.get("changelog")
|
||||
|
||||
async def async_install(
|
||||
self,
|
||||
version: str | None,
|
||||
backup: bool,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Handle the firmware update installation."""
|
||||
_LOGGER.debug("Starting firmware update")
|
||||
try:
|
||||
await self.status.airos_device.login()
|
||||
await self.status.airos_device.download()
|
||||
await self.status.airos_device.install()
|
||||
except AirOSConnectionAuthenticationError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_connection_authentication_error",
|
||||
) from err
|
||||
except AirOSException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_error",
|
||||
) from err
|
||||
@@ -4,6 +4,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.automation import DomainSpec
|
||||
from homeassistant.helpers.condition import (
|
||||
ENTITY_STATE_CONDITION_SCHEMA_ANY_ALL_FOR,
|
||||
Condition,
|
||||
EntityStateConditionBase,
|
||||
make_entity_state_condition,
|
||||
@@ -25,6 +26,7 @@ class EntityStateRequiredFeaturesCondition(EntityStateConditionBase):
|
||||
"""State condition."""
|
||||
|
||||
_required_features: int
|
||||
_schema = ENTITY_STATE_CONDITION_SCHEMA_ANY_ALL_FOR
|
||||
|
||||
def entity_filter(self, entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain with the required features."""
|
||||
@@ -82,9 +84,11 @@ CONDITIONS: dict[str, type[Condition]] = {
|
||||
AlarmControlPanelState.ARMED_VACATION,
|
||||
AlarmControlPanelEntityFeature.ARM_VACATION,
|
||||
),
|
||||
"is_disarmed": make_entity_state_condition(DOMAIN, AlarmControlPanelState.DISARMED),
|
||||
"is_disarmed": make_entity_state_condition(
|
||||
DOMAIN, AlarmControlPanelState.DISARMED, support_duration=True
|
||||
),
|
||||
"is_triggered": make_entity_state_condition(
|
||||
DOMAIN, AlarmControlPanelState.TRIGGERED
|
||||
DOMAIN, AlarmControlPanelState.TRIGGERED, support_duration=True
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
.condition_common: &condition_common
|
||||
target:
|
||||
target: &condition_common_target
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
fields: &condition_common_fields
|
||||
behavior:
|
||||
behavior: &condition_common_behavior
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
@@ -13,10 +13,20 @@
|
||||
- all
|
||||
- any
|
||||
|
||||
.condition_common_for: &condition_common_for
|
||||
target: *condition_common_target
|
||||
fields: &condition_common_for_fields
|
||||
behavior: *condition_common_behavior
|
||||
for:
|
||||
required: true
|
||||
default: 00:00:00
|
||||
selector:
|
||||
duration:
|
||||
|
||||
is_armed: *condition_common
|
||||
|
||||
is_armed_away:
|
||||
fields: *condition_common_fields
|
||||
fields: *condition_common_for_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
@@ -24,7 +34,7 @@ is_armed_away:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
|
||||
is_armed_home:
|
||||
fields: *condition_common_fields
|
||||
fields: *condition_common_for_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
@@ -32,7 +42,7 @@ is_armed_home:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_HOME
|
||||
|
||||
is_armed_night:
|
||||
fields: *condition_common_fields
|
||||
fields: *condition_common_for_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
@@ -40,13 +50,13 @@ is_armed_night:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_NIGHT
|
||||
|
||||
is_armed_vacation:
|
||||
fields: *condition_common_fields
|
||||
fields: *condition_common_for_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_VACATION
|
||||
|
||||
is_disarmed: *condition_common
|
||||
is_disarmed: *condition_common_for
|
||||
|
||||
is_triggered: *condition_common
|
||||
is_triggered: *condition_common_for
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
{
|
||||
"common": {
|
||||
"condition_behavior_name": "Condition passes if",
|
||||
"trigger_behavior_name": "Trigger when"
|
||||
"condition_for_name": "For at least",
|
||||
"trigger_behavior_name": "Trigger when",
|
||||
"trigger_for_name": "For at least"
|
||||
},
|
||||
"conditions": {
|
||||
"is_armed": {
|
||||
@@ -18,6 +20,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is armed away"
|
||||
@@ -27,6 +32,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is armed home"
|
||||
@@ -36,6 +44,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is armed night"
|
||||
@@ -45,6 +56,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is armed vacation"
|
||||
@@ -54,6 +68,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is disarmed"
|
||||
@@ -63,6 +80,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is triggered"
|
||||
@@ -234,6 +254,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm armed"
|
||||
@@ -243,6 +266,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm armed away"
|
||||
@@ -252,6 +278,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm armed home"
|
||||
@@ -261,6 +290,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm armed night"
|
||||
@@ -270,6 +302,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm armed vacation"
|
||||
@@ -279,6 +314,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm disarmed"
|
||||
@@ -288,6 +326,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm triggered"
|
||||
|
||||
@@ -13,6 +13,11 @@
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
for:
|
||||
required: true
|
||||
default: 00:00:00
|
||||
selector:
|
||||
duration:
|
||||
|
||||
armed: *trigger_common
|
||||
|
||||
|
||||
@@ -54,7 +54,16 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
entry.data[CONF_PASSWORD],
|
||||
entry.data[CONF_LOGIN_DATA],
|
||||
)
|
||||
self.previous_devices: set[str] = set()
|
||||
device_registry = dr.async_get(hass)
|
||||
self.previous_devices: set[str] = {
|
||||
identifier
|
||||
for device in device_registry.devices.get_devices_for_config_entry_id(
|
||||
entry.entry_id
|
||||
)
|
||||
if device.entry_type != dr.DeviceEntryType.SERVICE
|
||||
for identifier_domain, identifier in device.identifiers
|
||||
if identifier_domain == DOMAIN
|
||||
}
|
||||
|
||||
async def _async_update_data(self) -> dict[str, AmazonDevice]:
|
||||
"""Update device data."""
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==13.3.2"]
|
||||
"requirements": ["aioamazondevices==13.4.3"]
|
||||
}
|
||||
|
||||
@@ -92,6 +92,7 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
_LOGGER.debug("Updating statistics for the first time")
|
||||
usage_sum = 0.0
|
||||
last_stats_time = None
|
||||
allow_update_last_stored_hour = False
|
||||
else:
|
||||
if not meter.readings or len(meter.readings) == 0:
|
||||
_LOGGER.debug("No recent usage statistics found, skipping update")
|
||||
@@ -107,6 +108,7 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
continue
|
||||
start = dt_util.as_local(parsed_read_at) - timedelta(hours=1)
|
||||
_LOGGER.debug("Getting statistics at %s", start)
|
||||
stats: dict[str, list[Any]] = {}
|
||||
for end in (start + timedelta(seconds=1), None):
|
||||
stats = await get_instance(self.hass).async_add_executor_job(
|
||||
statistics_during_period,
|
||||
@@ -127,15 +129,28 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
"Not found, trying to find oldest statistic after %s",
|
||||
start,
|
||||
)
|
||||
assert stats
|
||||
|
||||
def _safe_get_sum(records: list[Any]) -> float:
|
||||
if records and "sum" in records[0]:
|
||||
return float(records[0]["sum"])
|
||||
return 0.0
|
||||
if not stats or not stats.get(usage_statistic_id):
|
||||
_LOGGER.debug(
|
||||
"Could not find existing statistics during period lookup for %s, "
|
||||
"falling back to last stored statistic",
|
||||
usage_statistic_id,
|
||||
)
|
||||
allow_update_last_stored_hour = True
|
||||
last_records = last_stat[usage_statistic_id]
|
||||
usage_sum = float(last_records[0].get("sum") or 0.0)
|
||||
last_stats_time = last_records[0]["start"]
|
||||
else:
|
||||
allow_update_last_stored_hour = False
|
||||
records = stats[usage_statistic_id]
|
||||
|
||||
usage_sum = _safe_get_sum(stats.get(usage_statistic_id, []))
|
||||
last_stats_time = stats[usage_statistic_id][0]["start"]
|
||||
def _safe_get_sum(records: list[Any]) -> float:
|
||||
if records and "sum" in records[0]:
|
||||
return float(records[0]["sum"])
|
||||
return 0.0
|
||||
|
||||
usage_sum = _safe_get_sum(records)
|
||||
last_stats_time = records[0]["start"]
|
||||
|
||||
usage_statistics = []
|
||||
|
||||
@@ -148,7 +163,13 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
)
|
||||
continue
|
||||
start = dt_util.as_local(parsed_read_at) - timedelta(hours=1)
|
||||
if last_stats_time is not None and start.timestamp() <= last_stats_time:
|
||||
if last_stats_time is not None and (
|
||||
start.timestamp() < last_stats_time
|
||||
or (
|
||||
start.timestamp() == last_stats_time
|
||||
and not allow_update_last_stored_hour
|
||||
)
|
||||
):
|
||||
continue
|
||||
usage_state = max(0, read["consumption"] / 1000)
|
||||
usage_sum = max(0, read["read"])
|
||||
|
||||
@@ -2,34 +2,25 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import anthropic
|
||||
from anthropic.resources.messages.messages import DEPRECATED_MODELS
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DEPRECATED_MODELS,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
)
|
||||
from .const import CONF_CHAT_MODEL, DEFAULT_CONVERSATION_NAME, DOMAIN, LOGGER
|
||||
from .coordinator import AnthropicConfigEntry, AnthropicCoordinator
|
||||
|
||||
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Anthropic."""
|
||||
@@ -39,38 +30,17 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool:
|
||||
"""Set up Anthropic from a config entry."""
|
||||
client = anthropic.AsyncAnthropic(
|
||||
api_key=entry.data[CONF_API_KEY], http_client=get_async_client(hass)
|
||||
)
|
||||
try:
|
||||
await client.models.list(timeout=10.0)
|
||||
except anthropic.AuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_authentication_error",
|
||||
translation_placeholders={"message": err.message},
|
||||
) from err
|
||||
except anthropic.AnthropicError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={
|
||||
"message": err.message
|
||||
if isinstance(err, anthropic.APIError)
|
||||
else str(err)
|
||||
},
|
||||
) from err
|
||||
|
||||
entry.runtime_data = client
|
||||
coordinator = AnthropicCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
LOGGER.debug("Available models: %s", coordinator.data)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(async_update_options))
|
||||
|
||||
for subentry in entry.subentries.values():
|
||||
if (model := subentry.data.get(CONF_CHAT_MODEL)) and model.startswith(
|
||||
tuple(DEPRECATED_MODELS)
|
||||
):
|
||||
if (model := subentry.data.get(CONF_CHAT_MODEL)) and model in DEPRECATED_MODELS:
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
@@ -260,6 +230,19 @@ async def async_migrate_entry(hass: HomeAssistant, entry: AnthropicConfigEntry)
|
||||
)
|
||||
hass.config_entries.async_update_entry(entry, minor_version=3)
|
||||
|
||||
if entry.version == 2 and entry.minor_version == 3:
|
||||
# Remove Temperature parameter
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
|
||||
for subentry in entry.subentries.values():
|
||||
data = subentry.data.copy()
|
||||
if CONF_TEMPERATURE not in data:
|
||||
continue
|
||||
data.pop(CONF_TEMPERATURE, None)
|
||||
hass.config_entries.async_update_subentry(entry, subentry, data=data)
|
||||
|
||||
hass.config_entries.async_update_entry(entry, minor_version=4)
|
||||
|
||||
LOGGER.debug(
|
||||
"Migration to version %s:%s successful", entry.version, entry.minor_version
|
||||
)
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
from collections.abc import Mapping
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
import anthropic
|
||||
@@ -30,6 +29,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import llm
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.selector import (
|
||||
NumberSelector,
|
||||
@@ -43,15 +43,15 @@ from homeassistant.helpers.selector import (
|
||||
from homeassistant.helpers.typing import VolDictType
|
||||
|
||||
from .const import (
|
||||
CODE_EXECUTION_UNSUPPORTED_MODELS,
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_CODE_EXECUTION,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_PROMPT,
|
||||
CONF_PROMPT_CACHING,
|
||||
CONF_RECOMMENDED,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
CONF_THINKING_EFFORT,
|
||||
CONF_TOOL_SEARCH,
|
||||
CONF_WEB_SEARCH,
|
||||
CONF_WEB_SEARCH_CITY,
|
||||
CONF_WEB_SEARCH_COUNTRY,
|
||||
@@ -63,10 +63,11 @@ from .const import (
|
||||
DEFAULT_AI_TASK_NAME,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DOMAIN,
|
||||
NON_ADAPTIVE_THINKING_MODELS,
|
||||
NON_THINKING_MODELS,
|
||||
WEB_SEARCH_UNSUPPORTED_MODELS,
|
||||
MIN_THINKING_BUDGET,
|
||||
TOOL_SEARCH_UNSUPPORTED_MODELS,
|
||||
PromptCaching,
|
||||
)
|
||||
from .coordinator import model_alias
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import AnthropicConfigEntry
|
||||
@@ -101,39 +102,11 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
await client.models.list(timeout=10.0)
|
||||
|
||||
|
||||
async def get_model_list(client: anthropic.AsyncAnthropic) -> list[SelectOptionDict]:
|
||||
"""Get list of available models."""
|
||||
try:
|
||||
models = (await client.models.list()).data
|
||||
except anthropic.AnthropicError:
|
||||
models = []
|
||||
_LOGGER.debug("Available models: %s", models)
|
||||
model_options: list[SelectOptionDict] = []
|
||||
short_form = re.compile(r"[^\d]-\d$")
|
||||
for model_info in models:
|
||||
# Resolve alias from versioned model name:
|
||||
model_alias = (
|
||||
model_info.id[:-9]
|
||||
if model_info.id != "claude-3-haiku-20240307"
|
||||
and model_info.id[-2:-1] != "-"
|
||||
else model_info.id
|
||||
)
|
||||
if short_form.search(model_alias):
|
||||
model_alias += "-0"
|
||||
model_options.append(
|
||||
SelectOptionDict(
|
||||
label=model_info.display_name,
|
||||
value=model_alias,
|
||||
)
|
||||
)
|
||||
return model_options
|
||||
|
||||
|
||||
class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Anthropic."""
|
||||
|
||||
VERSION = 2
|
||||
MINOR_VERSION = 3
|
||||
MINOR_VERSION = 4
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -225,6 +198,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Flow for managing conversation subentries."""
|
||||
|
||||
options: dict[str, Any]
|
||||
model_info: anthropic.types.ModelInfo
|
||||
|
||||
@property
|
||||
def _is_new(self) -> bool:
|
||||
@@ -338,29 +312,49 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
) -> SubentryFlowResult:
|
||||
"""Manage advanced options."""
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders: dict[str, str] = {}
|
||||
|
||||
step_schema: VolDictType = {
|
||||
vol.Optional(
|
||||
CONF_CHAT_MODEL,
|
||||
default=DEFAULT[CONF_CHAT_MODEL],
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=await self._get_model_list(), custom_value=True
|
||||
)
|
||||
SelectSelectorConfig(options=self._get_model_list(), custom_value=True)
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_MAX_TOKENS,
|
||||
default=DEFAULT[CONF_MAX_TOKENS],
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_TEMPERATURE,
|
||||
default=DEFAULT[CONF_TEMPERATURE],
|
||||
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
|
||||
CONF_PROMPT_CACHING,
|
||||
default=DEFAULT[CONF_PROMPT_CACHING],
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[x.value for x in PromptCaching],
|
||||
translation_key=CONF_PROMPT_CACHING,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
),
|
||||
}
|
||||
|
||||
if user_input is not None:
|
||||
self.options.update(user_input)
|
||||
|
||||
coordinator = self._get_entry().runtime_data
|
||||
self.model_info, status = coordinator.get_model_info(
|
||||
self.options[CONF_CHAT_MODEL]
|
||||
)
|
||||
if not status:
|
||||
# Couldn't find the model in the cached list, try to fetch it directly
|
||||
client = coordinator.client
|
||||
try:
|
||||
self.model_info = await client.models.retrieve(
|
||||
self.options[CONF_CHAT_MODEL], timeout=10.0
|
||||
)
|
||||
except anthropic.NotFoundError:
|
||||
errors[CONF_CHAT_MODEL] = "model_not_found"
|
||||
except anthropic.AnthropicError as err:
|
||||
errors[CONF_CHAT_MODEL] = "api_error"
|
||||
description_placeholders["message"] = (
|
||||
err.message if isinstance(err, anthropic.APIError) else str(err)
|
||||
)
|
||||
|
||||
if not errors:
|
||||
return await self.async_step_model()
|
||||
|
||||
@@ -370,6 +364,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
vol.Schema(step_schema), self.options
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
|
||||
async def async_step_model(
|
||||
@@ -378,30 +373,59 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Manage model-specific options."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
step_schema: VolDictType = {}
|
||||
step_schema: VolDictType = {
|
||||
vol.Optional(
|
||||
CONF_MAX_TOKENS,
|
||||
default=DEFAULT[CONF_MAX_TOKENS],
|
||||
): vol.All(
|
||||
NumberSelector(
|
||||
NumberSelectorConfig(min=0, max=self.model_info.max_tokens)
|
||||
),
|
||||
vol.Coerce(int),
|
||||
)
|
||||
if self.model_info.max_tokens
|
||||
else cv.positive_int,
|
||||
}
|
||||
|
||||
model = self.options[CONF_CHAT_MODEL]
|
||||
|
||||
if not model.startswith(tuple(NON_THINKING_MODELS)) and model.startswith(
|
||||
tuple(NON_ADAPTIVE_THINKING_MODELS)
|
||||
if (
|
||||
self.model_info.capabilities
|
||||
and self.model_info.capabilities.thinking.supported
|
||||
and not self.model_info.capabilities.thinking.types.adaptive.supported
|
||||
):
|
||||
step_schema[
|
||||
vol.Optional(
|
||||
CONF_THINKING_BUDGET, default=DEFAULT[CONF_THINKING_BUDGET]
|
||||
)
|
||||
] = vol.All(
|
||||
NumberSelector(
|
||||
NumberSelectorConfig(
|
||||
min=0,
|
||||
max=self.options.get(CONF_MAX_TOKENS, DEFAULT[CONF_MAX_TOKENS]),
|
||||
)
|
||||
),
|
||||
vol.Coerce(int),
|
||||
] = (
|
||||
vol.All(
|
||||
NumberSelector(
|
||||
NumberSelectorConfig(min=0, max=self.model_info.max_tokens)
|
||||
),
|
||||
vol.Coerce(int),
|
||||
)
|
||||
if self.model_info.max_tokens
|
||||
else cv.positive_int
|
||||
)
|
||||
else:
|
||||
self.options.pop(CONF_THINKING_BUDGET, None)
|
||||
|
||||
if not model.startswith(tuple(NON_ADAPTIVE_THINKING_MODELS)):
|
||||
if (
|
||||
self.model_info.capabilities
|
||||
and (effort_capability := self.model_info.capabilities.effort).supported
|
||||
):
|
||||
effort_options: list[str] = []
|
||||
if self.model_info.capabilities.thinking.types.adaptive.supported:
|
||||
effort_options.append("none")
|
||||
if effort_capability.low.supported:
|
||||
effort_options.append("low")
|
||||
if effort_capability.medium.supported:
|
||||
effort_options.append("medium")
|
||||
if effort_capability.high.supported:
|
||||
effort_options.append("high")
|
||||
if effort_capability.xhigh and effort_capability.xhigh.supported:
|
||||
effort_options.append("xhigh")
|
||||
if effort_capability.max.supported:
|
||||
effort_options.append("max")
|
||||
step_schema[
|
||||
vol.Optional(
|
||||
CONF_THINKING_EFFORT,
|
||||
@@ -409,7 +433,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
)
|
||||
] = SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=["none", "low", "medium", "high", "max"],
|
||||
options=effort_options,
|
||||
translation_key=CONF_THINKING_EFFORT,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
@@ -417,47 +441,58 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
else:
|
||||
self.options.pop(CONF_THINKING_EFFORT, None)
|
||||
|
||||
if not model.startswith(tuple(CODE_EXECUTION_UNSUPPORTED_MODELS)):
|
||||
step_schema[
|
||||
step_schema.update(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_CODE_EXECUTION,
|
||||
default=DEFAULT[CONF_CODE_EXECUTION],
|
||||
)
|
||||
] = bool
|
||||
else:
|
||||
self.options.pop(CONF_CODE_EXECUTION, None)
|
||||
|
||||
if not model.startswith(tuple(WEB_SEARCH_UNSUPPORTED_MODELS)):
|
||||
step_schema.update(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH,
|
||||
default=DEFAULT[CONF_WEB_SEARCH],
|
||||
): bool,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_MAX_USES,
|
||||
default=DEFAULT[CONF_WEB_SEARCH_MAX_USES],
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
default=DEFAULT[CONF_WEB_SEARCH_USER_LOCATION],
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
else:
|
||||
self.options.pop(CONF_WEB_SEARCH, None)
|
||||
self.options.pop(CONF_WEB_SEARCH_MAX_USES, None)
|
||||
self.options.pop(CONF_WEB_SEARCH_USER_LOCATION, None)
|
||||
): bool,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH,
|
||||
default=DEFAULT[CONF_WEB_SEARCH],
|
||||
): bool,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_MAX_USES,
|
||||
default=DEFAULT[CONF_WEB_SEARCH_MAX_USES],
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
default=DEFAULT[CONF_WEB_SEARCH_USER_LOCATION],
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
|
||||
self.options.pop(CONF_WEB_SEARCH_CITY, None)
|
||||
self.options.pop(CONF_WEB_SEARCH_REGION, None)
|
||||
self.options.pop(CONF_WEB_SEARCH_COUNTRY, None)
|
||||
self.options.pop(CONF_WEB_SEARCH_TIMEZONE, None)
|
||||
|
||||
model = self.options[CONF_CHAT_MODEL]
|
||||
|
||||
if not model.startswith(tuple(TOOL_SEARCH_UNSUPPORTED_MODELS)):
|
||||
step_schema[
|
||||
vol.Optional(
|
||||
CONF_TOOL_SEARCH,
|
||||
default=DEFAULT[CONF_TOOL_SEARCH],
|
||||
)
|
||||
] = bool
|
||||
else:
|
||||
self.options.pop(CONF_TOOL_SEARCH, None)
|
||||
|
||||
if not step_schema:
|
||||
user_input = {}
|
||||
# Currently our schema is always present, but if one day it becomes empty,
|
||||
# then the below line is needed to skip this step
|
||||
user_input = {} # pragma: no cover
|
||||
|
||||
if user_input is not None:
|
||||
if (
|
||||
CONF_THINKING_BUDGET in user_input
|
||||
and user_input[CONF_THINKING_BUDGET] >= MIN_THINKING_BUDGET
|
||||
and user_input[CONF_THINKING_BUDGET]
|
||||
>= user_input.get(CONF_MAX_TOKENS, DEFAULT[CONF_MAX_TOKENS])
|
||||
):
|
||||
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
|
||||
|
||||
if user_input.get(CONF_WEB_SEARCH, DEFAULT[CONF_WEB_SEARCH]) and not errors:
|
||||
if user_input.get(
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
@@ -489,13 +524,16 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
last_step=True,
|
||||
)
|
||||
|
||||
async def _get_model_list(self) -> list[SelectOptionDict]:
|
||||
def _get_model_list(self) -> list[SelectOptionDict]:
|
||||
"""Get list of available models."""
|
||||
client = anthropic.AsyncAnthropic(
|
||||
api_key=self._get_entry().data[CONF_API_KEY],
|
||||
http_client=get_async_client(self.hass),
|
||||
)
|
||||
return await get_model_list(client)
|
||||
coordinator = self._get_entry().runtime_data
|
||||
return [
|
||||
SelectOptionDict(
|
||||
label=model_info.display_name,
|
||||
value=model_alias(model_info.id),
|
||||
)
|
||||
for model_info in coordinator.data or []
|
||||
]
|
||||
|
||||
async def _get_location_data(self) -> dict[str, str]:
|
||||
"""Get approximate location data of the user."""
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Constants for the Anthropic integration."""
|
||||
|
||||
from enum import StrEnum
|
||||
import logging
|
||||
|
||||
DOMAIN = "anthropic"
|
||||
@@ -13,9 +14,10 @@ CONF_PROMPT = "prompt"
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
CONF_CODE_EXECUTION = "code_execution"
|
||||
CONF_MAX_TOKENS = "max_tokens"
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
CONF_PROMPT_CACHING = "prompt_caching"
|
||||
CONF_THINKING_BUDGET = "thinking_budget"
|
||||
CONF_THINKING_EFFORT = "thinking_effort"
|
||||
CONF_TOOL_SEARCH = "tool_search"
|
||||
CONF_WEB_SEARCH = "web_search"
|
||||
CONF_WEB_SEARCH_USER_LOCATION = "user_location"
|
||||
CONF_WEB_SEARCH_MAX_USES = "web_search_max_uses"
|
||||
@@ -24,63 +26,30 @@ CONF_WEB_SEARCH_REGION = "region"
|
||||
CONF_WEB_SEARCH_COUNTRY = "country"
|
||||
CONF_WEB_SEARCH_TIMEZONE = "timezone"
|
||||
|
||||
|
||||
class PromptCaching(StrEnum):
|
||||
"""Prompt caching options."""
|
||||
|
||||
OFF = "off"
|
||||
PROMPT = "prompt"
|
||||
AUTOMATIC = "automatic"
|
||||
|
||||
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
|
||||
DEFAULT = {
|
||||
CONF_CHAT_MODEL: "claude-haiku-4-5",
|
||||
CONF_CODE_EXECUTION: False,
|
||||
CONF_MAX_TOKENS: 3000,
|
||||
CONF_TEMPERATURE: 1.0,
|
||||
CONF_THINKING_BUDGET: 0,
|
||||
CONF_PROMPT_CACHING: PromptCaching.PROMPT.value,
|
||||
CONF_THINKING_BUDGET: MIN_THINKING_BUDGET,
|
||||
CONF_THINKING_EFFORT: "low",
|
||||
CONF_TOOL_SEARCH: False,
|
||||
CONF_WEB_SEARCH: False,
|
||||
CONF_WEB_SEARCH_USER_LOCATION: False,
|
||||
CONF_WEB_SEARCH_MAX_USES: 5,
|
||||
}
|
||||
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
|
||||
NON_THINKING_MODELS = [
|
||||
"claude-3-haiku",
|
||||
]
|
||||
|
||||
NON_ADAPTIVE_THINKING_MODELS = [
|
||||
"claude-opus-4-5",
|
||||
"claude-sonnet-4-5",
|
||||
"claude-haiku-4-5",
|
||||
"claude-opus-4-1",
|
||||
"claude-opus-4-0",
|
||||
"claude-opus-4-20250514",
|
||||
"claude-sonnet-4-0",
|
||||
"claude-sonnet-4-20250514",
|
||||
"claude-3-haiku",
|
||||
]
|
||||
|
||||
UNSUPPORTED_STRUCTURED_OUTPUT_MODELS = [
|
||||
"claude-opus-4-1",
|
||||
"claude-opus-4-0",
|
||||
"claude-opus-4-20250514",
|
||||
"claude-sonnet-4-0",
|
||||
"claude-sonnet-4-20250514",
|
||||
"claude-3-haiku",
|
||||
]
|
||||
|
||||
WEB_SEARCH_UNSUPPORTED_MODELS = [
|
||||
"claude-3-haiku",
|
||||
]
|
||||
|
||||
CODE_EXECUTION_UNSUPPORTED_MODELS = [
|
||||
"claude-3-haiku",
|
||||
]
|
||||
|
||||
PROGRAMMATIC_TOOL_CALLING_UNSUPPORTED_MODELS = [
|
||||
"claude-haiku-4-5",
|
||||
"claude-opus-4-1",
|
||||
"claude-opus-4-0",
|
||||
"claude-opus-4-20250514",
|
||||
"claude-sonnet-4-0",
|
||||
"claude-sonnet-4-20250514",
|
||||
"claude-3-haiku",
|
||||
]
|
||||
|
||||
DEPRECATED_MODELS = [
|
||||
"claude-3",
|
||||
TOOL_SEARCH_UNSUPPORTED_MODELS = [
|
||||
"claude-haiku",
|
||||
]
|
||||
|
||||
@@ -0,0 +1,113 @@
|
||||
"""Coordinator for the Anthropic integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import re
|
||||
|
||||
import anthropic
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
UPDATE_INTERVAL_CONNECTED = datetime.timedelta(hours=12)
|
||||
UPDATE_INTERVAL_DISCONNECTED = datetime.timedelta(minutes=1)
|
||||
|
||||
type AnthropicConfigEntry = ConfigEntry[AnthropicCoordinator]
|
||||
|
||||
|
||||
_model_short_form = re.compile(r"[^\d]-\d$")
|
||||
|
||||
|
||||
@callback
|
||||
def model_alias(model_id: str) -> str:
|
||||
"""Resolve alias from versioned model name."""
|
||||
if model_id[-2:-1] != "-" and not model_id.endswith("-preview"):
|
||||
model_id = model_id[:-9]
|
||||
if _model_short_form.search(model_id):
|
||||
return model_id + "-0"
|
||||
return model_id
|
||||
|
||||
|
||||
class AnthropicCoordinator(DataUpdateCoordinator[list[anthropic.types.ModelInfo]]):
|
||||
"""DataUpdateCoordinator which uses different intervals after successful and unsuccessful updates."""
|
||||
|
||||
client: anthropic.AsyncAnthropic
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: AnthropicConfigEntry) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=config_entry.title,
|
||||
update_interval=UPDATE_INTERVAL_CONNECTED,
|
||||
update_method=self.async_update_data,
|
||||
always_update=False,
|
||||
)
|
||||
self.client = anthropic.AsyncAnthropic(
|
||||
api_key=config_entry.data[CONF_API_KEY], http_client=get_async_client(hass)
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_set_updated_data(self, data: list[anthropic.types.ModelInfo]) -> None:
|
||||
"""Manually update data, notify listeners and update refresh interval."""
|
||||
self.update_interval = UPDATE_INTERVAL_CONNECTED
|
||||
super().async_set_updated_data(data)
|
||||
|
||||
async def async_update_data(self) -> list[anthropic.types.ModelInfo]:
|
||||
"""Fetch data from the API."""
|
||||
try:
|
||||
self.update_interval = UPDATE_INTERVAL_DISCONNECTED
|
||||
result = await self.client.models.list(timeout=10.0)
|
||||
self.update_interval = UPDATE_INTERVAL_CONNECTED
|
||||
except anthropic.APITimeoutError as err:
|
||||
raise TimeoutError(err.message or str(err)) from err
|
||||
except anthropic.AuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_authentication_error",
|
||||
translation_placeholders={"message": err.message},
|
||||
) from err
|
||||
except anthropic.APIError as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"message": err.message},
|
||||
) from err
|
||||
return result.data
|
||||
|
||||
def mark_connection_error(self) -> None:
|
||||
"""Mark the connection as having an error and reschedule background check."""
|
||||
self.update_interval = UPDATE_INTERVAL_DISCONNECTED
|
||||
if self.last_update_success:
|
||||
self.last_update_success = False
|
||||
self.async_update_listeners()
|
||||
if self._listeners and not self.hass.is_stopping:
|
||||
self._schedule_refresh()
|
||||
|
||||
@callback
|
||||
def get_model_info(self, model_id: str) -> tuple[anthropic.types.ModelInfo, bool]:
|
||||
"""Get model info for a given model ID."""
|
||||
# First try: exact name match
|
||||
for model in self.data or []:
|
||||
if model.id == model_id:
|
||||
return model, True
|
||||
# Second try: match by alias
|
||||
alias = model_alias(model_id)
|
||||
for model in self.data or []:
|
||||
if model_alias(model.id) == alias:
|
||||
return model, True
|
||||
# Model not found, return safe defaults
|
||||
return anthropic.types.ModelInfo(
|
||||
type="model",
|
||||
id=model_id,
|
||||
created_at=datetime.datetime(1970, 1, 1, tzinfo=datetime.UTC),
|
||||
display_name=alias,
|
||||
), False
|
||||
@@ -30,6 +30,7 @@ from anthropic.types import (
|
||||
MessageDeltaUsage,
|
||||
MessageParam,
|
||||
MessageStreamEvent,
|
||||
ModelInfo,
|
||||
OutputConfigParam,
|
||||
RawContentBlockDeltaEvent,
|
||||
RawContentBlockStartEvent,
|
||||
@@ -58,6 +59,8 @@ from anthropic.types import (
|
||||
ToolChoiceAutoParam,
|
||||
ToolChoiceToolParam,
|
||||
ToolParam,
|
||||
ToolSearchToolBm25_20251119Param,
|
||||
ToolSearchToolResultBlock,
|
||||
ToolUnionParam,
|
||||
ToolUseBlock,
|
||||
ToolUseBlockParam,
|
||||
@@ -74,6 +77,9 @@ from anthropic.types.message_create_params import MessageCreateParamsStreaming
|
||||
from anthropic.types.text_editor_code_execution_tool_result_block_param import (
|
||||
Content as TextEditorCodeExecutionToolResultBlockParamContentParam,
|
||||
)
|
||||
from anthropic.types.tool_search_tool_result_block_param import (
|
||||
Content as ToolSearchToolResultBlockParamContentParam,
|
||||
)
|
||||
import voluptuous as vol
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
@@ -82,19 +88,19 @@ from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, llm
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.json import json_dumps
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.util.json import JsonObjectType
|
||||
|
||||
from . import AnthropicConfigEntry
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_CODE_EXECUTION,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_PROMPT_CACHING,
|
||||
CONF_THINKING_BUDGET,
|
||||
CONF_THINKING_EFFORT,
|
||||
CONF_TOOL_SEARCH,
|
||||
CONF_WEB_SEARCH,
|
||||
CONF_WEB_SEARCH_CITY,
|
||||
CONF_WEB_SEARCH_COUNTRY,
|
||||
@@ -106,11 +112,9 @@ from .const import (
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
MIN_THINKING_BUDGET,
|
||||
NON_ADAPTIVE_THINKING_MODELS,
|
||||
NON_THINKING_MODELS,
|
||||
PROGRAMMATIC_TOOL_CALLING_UNSUPPORTED_MODELS,
|
||||
UNSUPPORTED_STRUCTURED_OUTPUT_MODELS,
|
||||
PromptCaching,
|
||||
)
|
||||
from .coordinator import AnthropicConfigEntry, AnthropicCoordinator
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
MAX_TOOL_ITERATIONS = 10
|
||||
@@ -120,10 +124,14 @@ def _format_tool(
|
||||
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
|
||||
) -> ToolParam:
|
||||
"""Format tool specification."""
|
||||
unsupported_keys = {"oneOf", "anyOf", "allOf"}
|
||||
schema = convert(tool.parameters, custom_serializer=custom_serializer)
|
||||
schema = {k: v for k, v in schema.items() if k not in unsupported_keys}
|
||||
|
||||
return ToolParam(
|
||||
name=tool.name,
|
||||
description=tool.description or "",
|
||||
input_schema=convert(tool.parameters, custom_serializer=custom_serializer),
|
||||
input_schema=schema,
|
||||
)
|
||||
|
||||
|
||||
@@ -202,7 +210,7 @@ class ContentDetails:
|
||||
]
|
||||
|
||||
|
||||
def _convert_content(
|
||||
def _convert_content( # noqa: C901
|
||||
chat_content: Iterable[conversation.Content],
|
||||
) -> tuple[list[MessageParam], str | None]:
|
||||
"""Transform HA chat_log content into Anthropic API format."""
|
||||
@@ -255,6 +263,15 @@ def _convert_content(
|
||||
content.tool_result,
|
||||
),
|
||||
}
|
||||
elif content.tool_name == "tool_search":
|
||||
tool_result_block = {
|
||||
"type": "tool_search_tool_result",
|
||||
"tool_use_id": content.tool_call_id,
|
||||
"content": cast(
|
||||
ToolSearchToolResultBlockParamContentParam,
|
||||
content.tool_result,
|
||||
),
|
||||
}
|
||||
else:
|
||||
tool_result_block = {
|
||||
"type": "tool_result",
|
||||
@@ -385,6 +402,7 @@ def _convert_content(
|
||||
"code_execution",
|
||||
"bash_code_execution",
|
||||
"text_editor_code_execution",
|
||||
"tool_search_tool_bm25",
|
||||
],
|
||||
tool_call.tool_name,
|
||||
),
|
||||
@@ -397,6 +415,7 @@ def _convert_content(
|
||||
"code_execution",
|
||||
"bash_code_execution",
|
||||
"text_editor_code_execution",
|
||||
"tool_search_tool_bm25",
|
||||
]
|
||||
else ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
@@ -558,6 +577,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
CodeExecutionToolResultBlock,
|
||||
BashCodeExecutionToolResultBlock,
|
||||
TextEditorCodeExecutionToolResultBlock,
|
||||
ToolSearchToolResultBlock,
|
||||
),
|
||||
):
|
||||
if content_details:
|
||||
@@ -658,7 +678,7 @@ def _create_token_stats(
|
||||
}
|
||||
|
||||
|
||||
class AnthropicBaseLLMEntity(Entity):
|
||||
class AnthropicBaseLLMEntity(CoordinatorEntity[AnthropicCoordinator]):
|
||||
"""Anthropic base LLM entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
@@ -666,18 +686,24 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
|
||||
def __init__(self, entry: AnthropicConfigEntry, subentry: ConfigSubentry) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(entry.runtime_data)
|
||||
self.entry = entry
|
||||
self.subentry = subentry
|
||||
coordinator = entry.runtime_data
|
||||
self.model_info, _ = coordinator.get_model_info(
|
||||
subentry.data.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL])
|
||||
)
|
||||
self._attr_unique_id = subentry.subentry_id
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
name=subentry.title,
|
||||
manufacturer="Anthropic",
|
||||
model=subentry.data.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL]),
|
||||
model=self.model_info.display_name,
|
||||
model_id=self.model_info.id,
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
async def _async_handle_chat_log(
|
||||
async def _async_handle_chat_log( # noqa: C901
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
structure_name: str | None = None,
|
||||
@@ -687,21 +713,20 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.subentry.data
|
||||
|
||||
preloaded_tools = [
|
||||
"HassTurnOn",
|
||||
"HassTurnOff",
|
||||
"GetLiveContext",
|
||||
"code_execution",
|
||||
"web_search",
|
||||
]
|
||||
|
||||
system = chat_log.content[0]
|
||||
if not isinstance(system, conversation.SystemContent):
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="system_message_not_found"
|
||||
)
|
||||
|
||||
# System prompt with caching enabled
|
||||
system_prompt: list[TextBlockParam] = [
|
||||
TextBlockParam(
|
||||
type="text",
|
||||
text=system.content,
|
||||
cache_control={"type": "ephemeral"},
|
||||
)
|
||||
]
|
||||
|
||||
messages, container_id = _convert_content(chat_log.content[1:])
|
||||
|
||||
model = options.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL])
|
||||
@@ -710,38 +735,65 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
model=model,
|
||||
messages=messages,
|
||||
max_tokens=options.get(CONF_MAX_TOKENS, DEFAULT[CONF_MAX_TOKENS]),
|
||||
system=system_prompt,
|
||||
system=system.content,
|
||||
stream=True,
|
||||
container=container_id,
|
||||
)
|
||||
|
||||
if not model.startswith(tuple(NON_ADAPTIVE_THINKING_MODELS)):
|
||||
if (
|
||||
options.get(CONF_PROMPT_CACHING, DEFAULT[CONF_PROMPT_CACHING])
|
||||
== PromptCaching.PROMPT
|
||||
):
|
||||
model_args["system"] = [
|
||||
{
|
||||
"type": "text",
|
||||
"text": system.content,
|
||||
"cache_control": {"type": "ephemeral"},
|
||||
}
|
||||
]
|
||||
elif (
|
||||
options.get(CONF_PROMPT_CACHING, DEFAULT[CONF_PROMPT_CACHING])
|
||||
== PromptCaching.AUTOMATIC
|
||||
):
|
||||
model_args["cache_control"] = {"type": "ephemeral"}
|
||||
|
||||
if (
|
||||
self.model_info.capabilities
|
||||
and self.model_info.capabilities.thinking.types.adaptive.supported
|
||||
):
|
||||
thinking_effort = options.get(
|
||||
CONF_THINKING_EFFORT, DEFAULT[CONF_THINKING_EFFORT]
|
||||
)
|
||||
if thinking_effort != "none":
|
||||
model_args["thinking"] = ThinkingConfigAdaptiveParam(type="adaptive")
|
||||
model_args["thinking"] = ThinkingConfigAdaptiveParam(
|
||||
type="adaptive", display="summarized"
|
||||
)
|
||||
model_args["output_config"] = OutputConfigParam(effort=thinking_effort)
|
||||
else:
|
||||
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||
model_args["temperature"] = options.get(
|
||||
CONF_TEMPERATURE, DEFAULT[CONF_TEMPERATURE]
|
||||
)
|
||||
else:
|
||||
thinking_budget = options.get(
|
||||
CONF_THINKING_BUDGET, DEFAULT[CONF_THINKING_BUDGET]
|
||||
)
|
||||
if (
|
||||
not model.startswith(tuple(NON_THINKING_MODELS))
|
||||
self.model_info.capabilities
|
||||
and self.model_info.capabilities.thinking.types.enabled.supported
|
||||
and thinking_budget >= MIN_THINKING_BUDGET
|
||||
):
|
||||
model_args["thinking"] = ThinkingConfigEnabledParam(
|
||||
type="enabled", budget_tokens=thinking_budget
|
||||
type="enabled", display="summarized", budget_tokens=thinking_budget
|
||||
)
|
||||
else:
|
||||
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||
model_args["temperature"] = options.get(
|
||||
CONF_TEMPERATURE, DEFAULT[CONF_TEMPERATURE]
|
||||
|
||||
if (
|
||||
self.model_info.capabilities
|
||||
and self.model_info.capabilities.effort.supported
|
||||
):
|
||||
model_args["output_config"] = OutputConfigParam(
|
||||
effort=options.get(
|
||||
CONF_THINKING_EFFORT, DEFAULT[CONF_THINKING_EFFORT]
|
||||
)
|
||||
)
|
||||
|
||||
tools: list[ToolUnionParam] = []
|
||||
@@ -753,9 +805,11 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
|
||||
if options.get(CONF_CODE_EXECUTION):
|
||||
# The `web_search_20260209` tool automatically enables `code_execution_20260120` tool
|
||||
if model.startswith(
|
||||
tuple(PROGRAMMATIC_TOOL_CALLING_UNSUPPORTED_MODELS)
|
||||
) or not options.get(CONF_WEB_SEARCH):
|
||||
if (
|
||||
not self.model_info.capabilities
|
||||
or not self.model_info.capabilities.code_execution.supported
|
||||
or not options.get(CONF_WEB_SEARCH)
|
||||
):
|
||||
tools.append(
|
||||
CodeExecutionTool20250825Param(
|
||||
name="code_execution",
|
||||
@@ -764,9 +818,11 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
)
|
||||
|
||||
if options.get(CONF_WEB_SEARCH):
|
||||
if model.startswith(
|
||||
tuple(PROGRAMMATIC_TOOL_CALLING_UNSUPPORTED_MODELS)
|
||||
) or not options.get(CONF_CODE_EXECUTION):
|
||||
if (
|
||||
not self.model_info.capabilities
|
||||
or not self.model_info.capabilities.code_execution.supported
|
||||
or not options.get(CONF_CODE_EXECUTION)
|
||||
):
|
||||
web_search: WebSearchTool20250305Param | WebSearchTool20260209Param = (
|
||||
WebSearchTool20250305Param(
|
||||
name="web_search",
|
||||
@@ -804,12 +860,17 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
]
|
||||
last_message["content"].extend( # type: ignore[union-attr]
|
||||
await async_prepare_files_for_prompt(
|
||||
self.hass, [(a.path, a.mime_type) for a in last_content.attachments]
|
||||
self.hass,
|
||||
self.model_info,
|
||||
[(a.path, a.mime_type) for a in last_content.attachments],
|
||||
)
|
||||
)
|
||||
|
||||
if structure and structure_name:
|
||||
if not model.startswith(tuple(UNSUPPORTED_STRUCTURED_OUTPUT_MODELS)):
|
||||
if (
|
||||
self.model_info.capabilities
|
||||
and self.model_info.capabilities.structured_outputs.supported
|
||||
):
|
||||
# Native structured output for those models who support it.
|
||||
structure_name = None
|
||||
model_args.setdefault("output_config", OutputConfigParam())[
|
||||
@@ -873,11 +934,27 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
),
|
||||
)
|
||||
)
|
||||
preloaded_tools.append(structure_name)
|
||||
|
||||
if tools:
|
||||
if (
|
||||
options.get(CONF_TOOL_SEARCH, DEFAULT[CONF_TOOL_SEARCH])
|
||||
and len(tools) > len(preloaded_tools) + 1
|
||||
):
|
||||
for tool in tools:
|
||||
if not tool["name"].endswith(tuple(preloaded_tools)):
|
||||
tool["defer_loading"] = True
|
||||
tools.append(
|
||||
ToolSearchToolBm25_20251119Param(
|
||||
type="tool_search_tool_bm25_20251119",
|
||||
name="tool_search_tool_bm25",
|
||||
)
|
||||
)
|
||||
|
||||
model_args["tools"] = tools
|
||||
|
||||
client = self.entry.runtime_data
|
||||
coordinator = self.entry.runtime_data
|
||||
client = coordinator.client
|
||||
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(max_iterations):
|
||||
@@ -899,13 +976,25 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
)
|
||||
messages.extend(new_messages)
|
||||
except anthropic.AuthenticationError as err:
|
||||
self.entry.async_start_reauth(self.hass)
|
||||
# Trigger coordinator to confirm the auth failure and trigger the reauth flow.
|
||||
await coordinator.async_request_refresh()
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_authentication_error",
|
||||
translation_placeholders={"message": err.message},
|
||||
) from err
|
||||
except anthropic.APIConnectionError as err:
|
||||
LOGGER.info("Connection error while talking to Anthropic: %s", err)
|
||||
coordinator.mark_connection_error()
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"message": err.message},
|
||||
) from err
|
||||
except anthropic.AnthropicError as err:
|
||||
# Non-connection error, mark connection as healthy
|
||||
coordinator.async_set_updated_data(coordinator.data)
|
||||
LOGGER.error("Error while talking to Anthropic: %s", err)
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
@@ -917,11 +1006,12 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
) from err
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
coordinator.async_set_updated_data(coordinator.data)
|
||||
break
|
||||
|
||||
|
||||
async def async_prepare_files_for_prompt(
|
||||
hass: HomeAssistant, files: list[tuple[Path, str | None]]
|
||||
hass: HomeAssistant, model_info: ModelInfo, files: list[tuple[Path, str | None]]
|
||||
) -> Iterable[ImageBlockParam | DocumentBlockParam]:
|
||||
"""Append files to a prompt.
|
||||
|
||||
@@ -942,13 +1032,26 @@ async def async_prepare_files_for_prompt(
|
||||
if mime_type is None:
|
||||
mime_type = guess_file_type(file_path)[0]
|
||||
|
||||
if not mime_type or not mime_type.startswith(("image/", "application/pdf")):
|
||||
if (
|
||||
not mime_type
|
||||
or not mime_type.startswith(("image/", "application/pdf"))
|
||||
or not model_info.capabilities
|
||||
or (
|
||||
mime_type.startswith("image/")
|
||||
and not model_info.capabilities.image_input.supported
|
||||
)
|
||||
or (
|
||||
mime_type.startswith("application/pdf")
|
||||
and not model_info.capabilities.pdf_input.supported
|
||||
)
|
||||
):
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="wrong_file_type",
|
||||
translation_placeholders={
|
||||
"file_path": file_path.as_posix(),
|
||||
"mime_type": mime_type or "unknown",
|
||||
"model": model_info.display_name,
|
||||
},
|
||||
)
|
||||
if mime_type == "image/jpg":
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["anthropic==0.83.0"]
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["anthropic==0.96.0"]
|
||||
}
|
||||
|
||||
@@ -35,9 +35,9 @@ rules:
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
log-when-unavailable: done
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: |
|
||||
@@ -81,7 +81,10 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
No entities disabled by default.
|
||||
entity-translations: todo
|
||||
entity-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities explicitly set `_attr_name` to `None`, so entity name translations are not used.
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
|
||||
@@ -5,6 +5,8 @@ from __future__ import annotations
|
||||
from collections.abc import Iterator
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import anthropic
|
||||
from anthropic.resources.messages.messages import DEPRECATED_MODELS
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
@@ -18,8 +20,8 @@ from homeassistant.helpers.selector import (
|
||||
SelectSelectorConfig,
|
||||
)
|
||||
|
||||
from .config_flow import get_model_list
|
||||
from .const import CONF_CHAT_MODEL, DEPRECATED_MODELS, DOMAIN
|
||||
from .const import CONF_CHAT_MODEL, DOMAIN
|
||||
from .coordinator import model_alias
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import AnthropicConfigEntry
|
||||
@@ -58,11 +60,11 @@ class ModelDeprecatedRepairFlow(RepairsFlow):
|
||||
if entry.entry_id in self._model_list_cache:
|
||||
model_list = self._model_list_cache[entry.entry_id]
|
||||
else:
|
||||
client = entry.runtime_data
|
||||
client = entry.runtime_data.client
|
||||
model_list = [
|
||||
model_option
|
||||
for model_option in await get_model_list(client)
|
||||
if not model_option["value"].startswith(tuple(DEPRECATED_MODELS))
|
||||
for model_option in await self.get_model_list(client)
|
||||
if model_option["value"] not in DEPRECATED_MODELS
|
||||
]
|
||||
self._model_list_cache[entry.entry_id] = model_list
|
||||
|
||||
@@ -104,9 +106,26 @@ class ModelDeprecatedRepairFlow(RepairsFlow):
|
||||
"model": model,
|
||||
"subentry_name": subentry.title,
|
||||
"subentry_type": self._format_subentry_type(subentry.subentry_type),
|
||||
"retirement_date": DEPRECATED_MODELS[model],
|
||||
},
|
||||
)
|
||||
|
||||
async def get_model_list(
|
||||
self, client: anthropic.AsyncAnthropic
|
||||
) -> list[SelectOptionDict]:
|
||||
"""Get list of available models."""
|
||||
try:
|
||||
models = (await client.models.list(timeout=10.0)).data
|
||||
except anthropic.AnthropicError:
|
||||
models = []
|
||||
return [
|
||||
SelectOptionDict(
|
||||
label=model_info.display_name,
|
||||
value=model_alias(model_info.id),
|
||||
)
|
||||
for model_info in models
|
||||
]
|
||||
|
||||
def _iter_deprecated_subentries(self) -> Iterator[tuple[str, str]]:
|
||||
"""Yield entry/subentry pairs that use deprecated models."""
|
||||
for entry in self.hass.config_entries.async_entries(DOMAIN):
|
||||
@@ -114,7 +133,7 @@ class ModelDeprecatedRepairFlow(RepairsFlow):
|
||||
continue
|
||||
for subentry in entry.subentries.values():
|
||||
model = subentry.data.get(CONF_CHAT_MODEL)
|
||||
if model and model.startswith(tuple(DEPRECATED_MODELS)):
|
||||
if model and model in DEPRECATED_MODELS:
|
||||
yield entry.entry_id, subentry.subentry_id
|
||||
|
||||
async def _async_next_target(
|
||||
@@ -141,7 +160,7 @@ class ModelDeprecatedRepairFlow(RepairsFlow):
|
||||
continue
|
||||
|
||||
model = subentry.data.get(CONF_CHAT_MODEL)
|
||||
if not model or not model.startswith(tuple(DEPRECATED_MODELS)):
|
||||
if not model or model not in DEPRECATED_MODELS:
|
||||
continue
|
||||
|
||||
self._current_entry_id = entry_id
|
||||
|
||||
@@ -38,6 +38,11 @@
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"entry_type": "AI task",
|
||||
"error": {
|
||||
"api_error": "[%key:component::anthropic::config_subentries::conversation::error::api_error%]",
|
||||
"model_not_found": "[%key:component::anthropic::config_subentries::conversation::error::model_not_found%]",
|
||||
"thinking_budget_too_large": "[%key:component::anthropic::config_subentries::conversation::error::thinking_budget_too_large%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"reconfigure": "Reconfigure AI task",
|
||||
"user": "Add AI task"
|
||||
@@ -46,12 +51,12 @@
|
||||
"advanced": {
|
||||
"data": {
|
||||
"chat_model": "[%key:common::generic::model%]",
|
||||
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::max_tokens%]",
|
||||
"prompt_caching": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::prompt_caching%]",
|
||||
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::temperature%]"
|
||||
},
|
||||
"data_description": {
|
||||
"chat_model": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::chat_model%]",
|
||||
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::max_tokens%]",
|
||||
"prompt_caching": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::prompt_caching%]",
|
||||
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::temperature%]"
|
||||
},
|
||||
"title": "[%key:component::anthropic::config_subentries::conversation::step::advanced::title%]"
|
||||
@@ -70,16 +75,20 @@
|
||||
"model": {
|
||||
"data": {
|
||||
"code_execution": "[%key:component::anthropic::config_subentries::conversation::step::model::data::code_execution%]",
|
||||
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::model::data::max_tokens%]",
|
||||
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data::thinking_budget%]",
|
||||
"thinking_effort": "[%key:component::anthropic::config_subentries::conversation::step::model::data::thinking_effort%]",
|
||||
"tool_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data::tool_search%]",
|
||||
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data::user_location%]",
|
||||
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search%]",
|
||||
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search_max_uses%]"
|
||||
},
|
||||
"data_description": {
|
||||
"code_execution": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::code_execution%]",
|
||||
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::max_tokens%]",
|
||||
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::thinking_budget%]",
|
||||
"thinking_effort": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::thinking_effort%]",
|
||||
"tool_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::tool_search%]",
|
||||
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::user_location%]",
|
||||
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search%]",
|
||||
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search_max_uses%]"
|
||||
@@ -94,6 +103,11 @@
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"entry_type": "Conversation agent",
|
||||
"error": {
|
||||
"api_error": "Unable to get model info: {message}",
|
||||
"model_not_found": "Model not found",
|
||||
"thinking_budget_too_large": "Thinking budget must be less than the Maximum tokens."
|
||||
},
|
||||
"initiate_flow": {
|
||||
"reconfigure": "Reconfigure conversation agent",
|
||||
"user": "Add conversation agent"
|
||||
@@ -102,12 +116,12 @@
|
||||
"advanced": {
|
||||
"data": {
|
||||
"chat_model": "[%key:common::generic::model%]",
|
||||
"max_tokens": "Maximum tokens to return in response",
|
||||
"prompt_caching": "Caching strategy",
|
||||
"temperature": "Temperature"
|
||||
},
|
||||
"data_description": {
|
||||
"chat_model": "The model to serve the responses.",
|
||||
"max_tokens": "Limit the number of response tokens.",
|
||||
"prompt_caching": "Optimize your API cost and response times based on your usage.",
|
||||
"temperature": "Control the randomness of the response, trading off between creativity and coherence."
|
||||
},
|
||||
"title": "Advanced settings"
|
||||
@@ -130,16 +144,20 @@
|
||||
"model": {
|
||||
"data": {
|
||||
"code_execution": "Code execution",
|
||||
"max_tokens": "Maximum tokens to return in response",
|
||||
"thinking_budget": "Thinking budget",
|
||||
"thinking_effort": "Thinking effort",
|
||||
"tool_search": "Enable tool search tool",
|
||||
"user_location": "Include home location",
|
||||
"web_search": "Enable web search",
|
||||
"web_search_max_uses": "Maximum web searches"
|
||||
},
|
||||
"data_description": {
|
||||
"code_execution": "Allow the model to execute code in a secure sandbox environment, enabling it to analyze data and perform complex calculations.",
|
||||
"max_tokens": "Limit the number of response tokens.",
|
||||
"thinking_budget": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking.",
|
||||
"thinking_effort": "Control how many tokens Claude uses when responding, trading off between response thoroughness and token efficiency",
|
||||
"tool_search": "Enable dynamic tool discovery instead of preloading all tools into the context",
|
||||
"user_location": "Localize search results based on home location",
|
||||
"web_search": "The web search tool gives Claude direct access to real-time web content, allowing it to answer questions with up-to-date information beyond its knowledge cutoff",
|
||||
"web_search_max_uses": "Limit the number of searches performed per response"
|
||||
@@ -187,7 +205,7 @@
|
||||
"message": "`{file_path}` does not exist."
|
||||
},
|
||||
"wrong_file_type": {
|
||||
"message": "Only images and PDF are supported by the Anthropic API, `{file_path}` ({mime_type}) is not an image file or PDF."
|
||||
"message": "The {model} model does not support {mime_type} file types (for `{file_path}`)."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
@@ -201,7 +219,7 @@
|
||||
"data_description": {
|
||||
"chat_model": "Select the new model to use."
|
||||
},
|
||||
"description": "You are updating {subentry_name} ({subentry_type}) in {entry_name}. The current model {model} is deprecated. Select a supported model to continue.",
|
||||
"description": "You are updating {subentry_name} ({subentry_type}) in {entry_name}. The current model {model} is deprecated and will reach end-of-life on {retirement_date}. Select a supported model to continue.",
|
||||
"title": "Update model"
|
||||
}
|
||||
}
|
||||
@@ -210,13 +228,21 @@
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"prompt_caching": {
|
||||
"options": {
|
||||
"automatic": "Full",
|
||||
"off": "Disabled",
|
||||
"prompt": "System prompt"
|
||||
}
|
||||
},
|
||||
"thinking_effort": {
|
||||
"options": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"max": "Max",
|
||||
"medium": "[%key:common::state::medium%]",
|
||||
"none": "None"
|
||||
"none": "None",
|
||||
"xhigh": "X-High"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,6 +14,8 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import SIGNAL_CONNECTED, AppleTvConfigEntry
|
||||
from .entity import AppleTVEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["arcam"],
|
||||
"requirements": ["arcam-fmj==1.8.2"],
|
||||
"requirements": ["arcam-fmj==1.8.3"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||
|
||||
@@ -91,6 +91,7 @@ SENSORS: tuple[ArcamFmjSensorEntityDescription, ...] = (
|
||||
value_fn=lambda state: (
|
||||
vp.colorspace.name.lower()
|
||||
if (vp := state.get_incoming_video_parameters()) is not None
|
||||
and vp.colorspace is not None
|
||||
else None
|
||||
),
|
||||
),
|
||||
|
||||
@@ -945,7 +945,10 @@ class PipelineRun:
|
||||
try:
|
||||
# Transcribe audio stream
|
||||
stt_vad: VoiceCommandSegmenter | None = None
|
||||
if self.audio_settings.is_vad_enabled:
|
||||
if (
|
||||
self.audio_settings.is_vad_enabled
|
||||
and self.stt_provider.audio_processing.requires_external_vad
|
||||
):
|
||||
stt_vad = VoiceCommandSegmenter(
|
||||
silence_seconds=self.audio_settings.silence_seconds
|
||||
)
|
||||
|
||||
@@ -7,13 +7,17 @@ from .const import DOMAIN
|
||||
from .entity import AssistSatelliteState
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_idle": make_entity_state_condition(DOMAIN, AssistSatelliteState.IDLE),
|
||||
"is_listening": make_entity_state_condition(DOMAIN, AssistSatelliteState.LISTENING),
|
||||
"is_idle": make_entity_state_condition(
|
||||
DOMAIN, AssistSatelliteState.IDLE, support_duration=True
|
||||
),
|
||||
"is_listening": make_entity_state_condition(
|
||||
DOMAIN, AssistSatelliteState.LISTENING, support_duration=True
|
||||
),
|
||||
"is_processing": make_entity_state_condition(
|
||||
DOMAIN, AssistSatelliteState.PROCESSING
|
||||
DOMAIN, AssistSatelliteState.PROCESSING, support_duration=True
|
||||
),
|
||||
"is_responding": make_entity_state_condition(
|
||||
DOMAIN, AssistSatelliteState.RESPONDING
|
||||
DOMAIN, AssistSatelliteState.RESPONDING, support_duration=True
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -12,6 +12,11 @@
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
for:
|
||||
required: true
|
||||
default: 00:00:00
|
||||
selector:
|
||||
duration:
|
||||
|
||||
is_idle: *condition_common
|
||||
is_listening: *condition_common
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
{
|
||||
"common": {
|
||||
"condition_behavior_name": "Condition passes if",
|
||||
"trigger_behavior_name": "Trigger when"
|
||||
"condition_for_name": "For at least",
|
||||
"trigger_behavior_name": "Trigger when",
|
||||
"trigger_for_name": "For at least"
|
||||
},
|
||||
"conditions": {
|
||||
"is_idle": {
|
||||
@@ -9,6 +11,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::assist_satellite::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite is idle"
|
||||
@@ -18,6 +23,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::assist_satellite::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite is listening"
|
||||
@@ -27,6 +35,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::assist_satellite::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite is processing"
|
||||
@@ -36,6 +47,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::assist_satellite::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite is responding"
|
||||
@@ -75,7 +89,7 @@
|
||||
},
|
||||
"services": {
|
||||
"announce": {
|
||||
"description": "Lets a satellite announce a message.",
|
||||
"description": "Lets an Assist satellite announce a message.",
|
||||
"fields": {
|
||||
"media_id": {
|
||||
"description": "The media ID to announce instead of using text-to-speech.",
|
||||
@@ -94,10 +108,10 @@
|
||||
"name": "Preannounce media ID"
|
||||
}
|
||||
},
|
||||
"name": "Announce"
|
||||
"name": "Announce on satellite"
|
||||
},
|
||||
"ask_question": {
|
||||
"description": "Asks a question and gets the user's response.",
|
||||
"description": "Lets an Assist satellite ask a question and get the user's response.",
|
||||
"fields": {
|
||||
"answers": {
|
||||
"description": "Possible answers to the question.",
|
||||
@@ -124,10 +138,10 @@
|
||||
"name": "Question media ID"
|
||||
}
|
||||
},
|
||||
"name": "Ask question"
|
||||
"name": "Ask question on satellite"
|
||||
},
|
||||
"start_conversation": {
|
||||
"description": "Starts a conversation from a satellite.",
|
||||
"description": "Starts a conversation from an Assist satellite.",
|
||||
"fields": {
|
||||
"extra_system_prompt": {
|
||||
"description": "Provide background information to the AI about the request.",
|
||||
@@ -150,43 +164,55 @@
|
||||
"name": "Message"
|
||||
}
|
||||
},
|
||||
"name": "Start conversation"
|
||||
"name": "Start conversation on satellite"
|
||||
}
|
||||
},
|
||||
"title": "Assist satellite",
|
||||
"triggers": {
|
||||
"idle": {
|
||||
"description": "Triggers after one or more voice assistant satellites become idle after having processed a command.",
|
||||
"description": "Triggers after one or more Assist satellites become idle after having processed a command.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::assist_satellite::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite became idle"
|
||||
},
|
||||
"listening": {
|
||||
"description": "Triggers after one or more voice assistant satellites start listening for a command from someone.",
|
||||
"description": "Triggers after one or more Assist satellites start listening for a command from someone.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::assist_satellite::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite started listening"
|
||||
},
|
||||
"processing": {
|
||||
"description": "Triggers after one or more voice assistant satellites start processing a command after having heard it.",
|
||||
"description": "Triggers after one or more Assist satellites start processing a command after having heard it.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::assist_satellite::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite started processing"
|
||||
},
|
||||
"responding": {
|
||||
"description": "Triggers after one or more voice assistant satellites start responding to a command after having processed it, or start announcing something.",
|
||||
"description": "Triggers after one or more Assist satellites start responding to a command after having processed it, or start announcing something.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::assist_satellite::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite started responding"
|
||||
|
||||
@@ -13,6 +13,11 @@
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
for:
|
||||
required: true
|
||||
default: 00:00:00
|
||||
selector:
|
||||
duration:
|
||||
|
||||
idle: *trigger_common
|
||||
listening: *trigger_common
|
||||
|
||||
@@ -7,9 +7,9 @@ import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from aurorapy.client import AuroraError, AuroraSerialClient
|
||||
import serial.tools.list_ports
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import usb
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import ATTR_SERIAL_NUMBER, CONF_ADDRESS, CONF_PORT
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -57,9 +57,11 @@ def validate_and_connect(
|
||||
return ret
|
||||
|
||||
|
||||
def scan_comports() -> tuple[list[str] | None, str | None]:
|
||||
async def async_scan_comports(
|
||||
hass: HomeAssistant,
|
||||
) -> tuple[list[str] | None, str | None]:
|
||||
"""Find and store available com ports for the GUI dropdown."""
|
||||
com_ports = serial.tools.list_ports.comports(include_links=True)
|
||||
com_ports = await usb.async_scan_serial_ports(hass)
|
||||
com_ports_list = []
|
||||
for port in com_ports:
|
||||
com_ports_list.append(port.device)
|
||||
@@ -87,7 +89,7 @@ class AuroraABBConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
errors = {}
|
||||
if self._com_ports_list is None:
|
||||
result = await self.hass.async_add_executor_job(scan_comports)
|
||||
result = await async_scan_comports(self.hass)
|
||||
self._com_ports_list, self._default_com_port = result
|
||||
if self._default_com_port is None:
|
||||
return self.async_abort(reason="no_serial_ports")
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
"name": "Aurora ABB PowerOne Solar PV",
|
||||
"codeowners": ["@davet2001"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["usb"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/aurora_abb_powerone",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
|
||||
@@ -157,7 +157,6 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import OAuth2AuthorizeCallbackView
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
@@ -173,7 +172,6 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
DELETE_CURRENT_TOKEN_DELAY = 2
|
||||
|
||||
|
||||
@bind_hass
|
||||
def create_auth_code(
|
||||
hass: HomeAssistant, client_id: str, credential: Credentials
|
||||
) -> str:
|
||||
|
||||
@@ -83,7 +83,6 @@ from homeassistant.helpers.trace import (
|
||||
trace_path,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.dt import parse_datetime
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
@@ -143,6 +142,7 @@ _EXPERIMENTAL_CONDITION_PLATFORMS = {
|
||||
"occupancy",
|
||||
"person",
|
||||
"power",
|
||||
"remote",
|
||||
"schedule",
|
||||
"select",
|
||||
"siren",
|
||||
@@ -150,6 +150,8 @@ _EXPERIMENTAL_CONDITION_PLATFORMS = {
|
||||
"temperature",
|
||||
"text",
|
||||
"timer",
|
||||
"todo",
|
||||
"update",
|
||||
"vacuum",
|
||||
"valve",
|
||||
"water_heater",
|
||||
@@ -167,6 +169,7 @@ _EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
"cover",
|
||||
"device_tracker",
|
||||
"door",
|
||||
"doorbell",
|
||||
"event",
|
||||
"fan",
|
||||
"garage_door",
|
||||
@@ -235,7 +238,6 @@ class IfAction(Protocol):
|
||||
"""AND all conditions."""
|
||||
|
||||
|
||||
@bind_hass
|
||||
def is_on(hass: HomeAssistant, entity_id: str) -> bool:
|
||||
"""Return true if specified automation entity_id is on.
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ from autoskope_client.models import CannotConnect, InvalidAuth
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import DEFAULT_HOST
|
||||
@@ -31,8 +31,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AutoskopeConfigEntry) ->
|
||||
try:
|
||||
await api.connect()
|
||||
except InvalidAuth as err:
|
||||
# Raise ConfigEntryError until reauth flow is implemented (then ConfigEntryAuthFailed)
|
||||
raise ConfigEntryError(
|
||||
raise ConfigEntryAuthFailed(
|
||||
"Authentication failed, please check credentials"
|
||||
) from err
|
||||
except CannotConnect as err:
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from autoskope_client.api import AutoskopeApi
|
||||
@@ -39,12 +40,39 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): TextSelector(
|
||||
TextSelectorConfig(type=TextSelectorType.PASSWORD)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class AutoskopeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Autoskope."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def _async_validate_credentials(
|
||||
self, host: str, username: str, password: str, errors: dict[str, str]
|
||||
) -> bool:
|
||||
"""Validate credentials against the Autoskope API."""
|
||||
try:
|
||||
async with AutoskopeApi(
|
||||
host=host,
|
||||
username=username,
|
||||
password=password,
|
||||
):
|
||||
pass
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
return True
|
||||
return False
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -63,18 +91,9 @@ class AutoskopeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(f"{username}@{host}")
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
try:
|
||||
async with AutoskopeApi(
|
||||
host=host,
|
||||
username=username,
|
||||
password=user_input[CONF_PASSWORD],
|
||||
):
|
||||
pass
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
if await self._async_validate_credentials(
|
||||
host, username, user_input[CONF_PASSWORD], errors
|
||||
):
|
||||
return self.async_create_entry(
|
||||
title=f"Autoskope ({username})",
|
||||
data={
|
||||
@@ -87,3 +106,35 @@ class AutoskopeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle initiation of re-authentication."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle re-authentication with new credentials."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
|
||||
if await self._async_validate_credentials(
|
||||
reauth_entry.data[CONF_HOST],
|
||||
reauth_entry.data[CONF_USERNAME],
|
||||
user_input[CONF_PASSWORD],
|
||||
errors,
|
||||
):
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=STEP_REAUTH_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -39,10 +39,7 @@ rules:
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: todo
|
||||
comment: |
|
||||
Reauthentication flow removed for initial PR, will be added in follow-up.
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -10,6 +11,15 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "The new password for your Autoskope account."
|
||||
},
|
||||
"description": "Please re-enter your password for your Autoskope account."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
"""Support for Amazon Web Services (AWS)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections import OrderedDict
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiobotocore.session import AioSession
|
||||
import voluptuous as vol
|
||||
@@ -30,14 +34,22 @@ from .const import (
|
||||
CONF_REGION,
|
||||
CONF_SECRET_ACCESS_KEY,
|
||||
CONF_VALIDATE,
|
||||
DATA_CONFIG,
|
||||
DATA_HASS_CONFIG,
|
||||
DATA_SESSIONS,
|
||||
DATA_AWS,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AWSData:
|
||||
"""Runtime data for the AWS integration."""
|
||||
|
||||
hass_config: ConfigType
|
||||
config: dict[str, Any]
|
||||
sessions: OrderedDict[str, AioSession]
|
||||
|
||||
|
||||
AWS_CREDENTIAL_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
@@ -88,14 +100,13 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up AWS component."""
|
||||
hass.data[DATA_HASS_CONFIG] = config
|
||||
|
||||
if (conf := config.get(DOMAIN)) is None:
|
||||
# create a default conf using default profile
|
||||
conf = CONFIG_SCHEMA({ATTR_CREDENTIALS: DEFAULT_CREDENTIAL})
|
||||
|
||||
hass.data[DATA_CONFIG] = conf
|
||||
hass.data[DATA_SESSIONS] = OrderedDict()
|
||||
hass.data[DATA_AWS] = AWSData(
|
||||
hass_config=config, config=conf, sessions=OrderedDict()
|
||||
)
|
||||
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
@@ -111,8 +122,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
Validate and save sessions per aws credential.
|
||||
"""
|
||||
config = hass.data[DATA_HASS_CONFIG]
|
||||
conf = hass.data[DATA_CONFIG]
|
||||
data = hass.data[DATA_AWS]
|
||||
conf = data.config
|
||||
|
||||
if entry.source == config_entries.SOURCE_IMPORT:
|
||||
if conf is None:
|
||||
@@ -143,14 +154,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
validation = False
|
||||
else:
|
||||
hass.data[DATA_SESSIONS][name] = result
|
||||
data.sessions[name] = result
|
||||
|
||||
# set up notify platform, no entry support for notify component yet,
|
||||
# have to use discovery to load platform.
|
||||
for notify_config in conf[CONF_NOTIFY]:
|
||||
hass.async_create_task(
|
||||
discovery.async_load_platform(
|
||||
hass, Platform.NOTIFY, DOMAIN, notify_config, config
|
||||
hass, Platform.NOTIFY, DOMAIN, notify_config, data.hass_config
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user