Compare commits

...

274 Commits

Author SHA1 Message Date
Erik
c0ef7212b2 Update kitchen_sink 2025-01-07 13:08:53 +01:00
Erik
92dad98b14 Update kitchen_sink 2025-01-07 13:05:18 +01:00
Erik
f6214a22c1 Rename supported_subentry_flows to supported_subentry_types 2025-01-07 13:04:55 +01:00
Erik
8541e6e9cd Minor adjustment 2025-01-07 11:31:46 +01:00
Erik
1fbf929819 Store subentry type in subentry 2025-01-07 11:29:48 +01:00
Erik
07734239b4 Update entity platform 2025-01-07 11:22:17 +01:00
Erik
82b13d6b75 Update entity registry 2025-01-07 11:20:50 +01:00
Erik
3ef9f7360f Add subentry reconfigure support to kitchen_sink 2025-01-07 09:56:23 +01:00
Erik
fa6cc8edfe Add reconfigure support to config subentries 2025-01-07 09:56:23 +01:00
Erik
a12a42710f Add subentry support to kitchen sink 2025-01-07 09:56:23 +01:00
Erik
a4653bb8dc Clean up registries when removing subentry 2025-01-07 09:56:23 +01:00
Erik
d2bf58e1ba Add config subentry support to entity platform 2025-01-07 09:56:23 +01:00
Erik
53fd84a5a4 Allow a device to be connected to no or a single subentry of a config entry 2025-01-07 09:56:23 +01:00
Erik
6b4c27e700 Address review comments 2025-01-07 09:56:23 +01:00
Erik
04754ac83a Update syrupy serializer 2025-01-07 09:56:23 +01:00
Erik Montnemery
42e465a4f3 Apply suggestions from code review 2025-01-07 09:56:23 +01:00
Erik
6fca5022b1 Add config subentry support to device registry 2025-01-07 09:56:23 +01:00
Erik
68f8c3e9ed Add config subentry support to entity registry 2025-01-07 09:56:23 +01:00
Erik
90b2504d5a Reapply "Add support for subentries to config entries" (#133470)
This reverts commit ecb3bf79f3.
2025-01-07 09:56:13 +01:00
Eli Schleifer
875727ed27 add proxy view for unifiprotect to grab snapshot at specific time (#133546)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-01-06 13:49:58 -10:00
Raphael Hehl
f1c62000e1 UnifiProtect Refactor light control methods to use new API (#134625) 2025-01-06 13:48:22 -10:00
Norbert Rittel
e38f21c4ef Fix spelling of "ID", slightly reword action descriptions (#134778) 2025-01-07 00:25:42 +01:00
Franck Nijhof
00c052bb22 Revert "Remove deprecated supported features warning in ..." (multiple) (#134933) 2025-01-07 00:08:02 +01:00
Simone Chemelli
111ef13a3f Add device tracker test for Vodafone Station (#134334) 2025-01-06 23:17:50 +01:00
J. Nick Koston
89c73f56b1 Migrate to using aiohttp-asyncmdnsresolver for aiohttp resolver (#134830) 2025-01-06 12:06:28 -10:00
Paulus Schoutsen
d13c14eedb Add support for extra_system_prompt to OpenAI (#134931) 2025-01-06 23:01:13 +01:00
G Johansson
9532e98166 Remove deprecated config entry import from bluesound (#134926) 2025-01-06 22:58:29 +01:00
G Johansson
6884d790ca Remove deprecated hdr switch from reolink (#134924) 2025-01-06 22:46:59 +01:00
G Johansson
6ab45f8c9e Bump holidays to 0.64 (#134922) 2025-01-06 22:45:04 +01:00
Artur Pragacz
7009a96711 Revert "Remove deprecated supported features warning in LightEntity" (#134927) 2025-01-06 22:39:24 +01:00
Josef Zweck
a47fa08a9b Add device trackers to enabled_by_default fixture (#134446) 2025-01-06 22:03:32 +01:00
Norbert Rittel
4eb23f3039 Remove excessive newline code, fix "ID", enhance descriptions (#134920) 2025-01-06 20:54:26 +01:00
Klaas Schoute
1c314b5c02 Bump powerfox to v1.2.0 (#134908) 2025-01-06 20:52:54 +01:00
Tomer Shemesh
edee58f114 Bump pylutron-caseta to 0.23.0 (#134906) 2025-01-06 20:44:06 +01:00
Manu
ef652e57d1 Add bring_api to loggers in Bring integration (#134897)
Add bring-api to loggers
2025-01-06 20:37:01 +01:00
Paulus Schoutsen
b956aa68da Handle discovering user configured Wyoming flow (#134916) 2025-01-06 20:26:49 +01:00
Michael
75ce89dc41 Bump py-synologydsm-api to 2.6.0 (#134914)
bump py-synologydsm-api to 2.6.0
2025-01-06 20:08:58 +01:00
Manu
a9540e893f Fix wrong power limit decimal place in IronOS (#134902) 2025-01-06 19:55:47 +01:00
Bram Kragten
dd5625436b Update frontend to 20250106.0 (#134905) 2025-01-06 19:11:01 +01:00
Paulus Schoutsen
7a484ee0ae Add extra prompt to assist pipeline and conversation (#124743)
* Add extra prompt to assist pipeline and conversation

* extra_prompt -> extra_system_prompt

* Fix rebase

* Fix tests
2025-01-06 12:58:42 -05:00
starkillerOG
e5c5d1bcfd Fix Reolink playback of recodings (#134652) 2025-01-06 18:54:32 +01:00
Glenn Reilly
56a9cd010e fix typo "looses" to "loses" in MQTT configuration message (#134894) 2025-01-06 17:59:31 +01:00
Steven B.
b7b5577f0c Bump python-kasa to 0.9.1 (#134893)
Bump tplink python-kasa dependency to 0.9.1
2025-01-06 16:58:33 +01:00
Norbert Rittel
0787257cc0 Use uppercase for "ID" and sentence-case for "name" / "icon" (#134890) 2025-01-06 16:30:40 +01:00
Thijs W.
54263f1325 Bump pymodbus version to 3.8.3 (#134809) 2025-01-06 14:56:17 +00:00
Luke Lashley
14d2f2c589 Add extra failure exceptions during roborock setup (#134889)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-01-06 15:46:21 +01:00
starkillerOG
c533f63a87 Add Decorquip virtual motion blinds integration (#134402) 2025-01-06 15:36:38 +01:00
Ludovic BOUÉ
cd30f75be9 Matter Battery replacement icon (#134460) 2025-01-06 15:35:42 +01:00
jb101010-2
527775a5f1 Bump pysuezV2 to 2.0.1 (#134769) 2025-01-06 15:27:23 +01:00
Klaas Schoute
99d7f462a0 Add heat meter to Powerfox integration (#134799) 2025-01-06 15:23:47 +01:00
J. Diego Rodríguez Royo
67e2379d2b Iterate over a copy of the list of programs at Home Connect select setup entry (#134684) 2025-01-06 15:21:02 +01:00
Norbert Rittel
fb0047ead0 Use correct uppercase for "ID" and sentence-case otherwise (#134815) 2025-01-06 15:15:31 +01:00
Norbert Rittel
9764d704bd Fix a few typos or grammar issues in asus_wrt (#134813) 2025-01-06 15:15:08 +01:00
Norbert Rittel
3690d7c2b4 Fix spelling of "set up", change "id" to uppercase (#134888) 2025-01-06 14:12:52 +00:00
Norbert Rittel
204b5989e0 Replace "id" with "ID" for consistency across HA (#134798) 2025-01-06 15:10:29 +01:00
G Johansson
3892f6d8f3 Remove deprecated binary sensor battery charging from technove (#134844) 2025-01-06 15:03:52 +01:00
J. Diego Rodríguez Royo
140ff50eaf Fix how function arguments are passed on actions at Home Connect (#134845) 2025-01-06 15:03:25 +01:00
Avi Miller
5ef06b1f33 Bump aiolifx-themes to update colors (#134846) 2025-01-06 15:02:57 +01:00
Manu
9638bee8de Bump pynecil to v4.0.1 (#134852) 2025-01-06 14:55:50 +01:00
Norbert Rittel
cd88a8cebd Fix missing sentence-casing etc. in several strings (#134775) 2025-01-06 14:53:28 +01:00
G Johansson
d896b4e66a Raise ImportError in python_script (#134792) 2025-01-06 14:52:40 +01:00
Robin Wohlers-Reichel
e4eb414be8 Bump solax to 3.2.3 (#134876) 2025-01-06 14:47:52 +01:00
Joakim Sørensen
fce5be928e Log upload BackupAgentError (#134865)
* Log out BackupAgentError

* Update homeassistant/components/backup/manager.py

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>

* Update homeassistant/components/backup/manager.py

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>

* Format

---------

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2025-01-06 14:19:34 +01:00
Joakim Sørensen
c4455c709b Log cloud backup upload response status (#134871)
Log the status of the upload response
2025-01-06 13:10:38 +01:00
Allen Porter
2c7a1446b8 Update Roborock config flow message when an account is already configured (#134854) 2025-01-06 11:24:06 +01:00
G Johansson
20cf21d88e Add horizontal swing to Sensibo (#132117)
* Add horizontal swing to Sensibo

* Fixes

* Only load select if already there

* Remove feature check

* Fixes

* Mods

* Last bits

* Mod

* Fixes

* Mods

* Fix test

---------

Co-authored-by: J. Nick Koston <nick@koston.org>
2025-01-06 11:20:11 +01:00
G Johansson
eafbf1d1fd Add get device capabilities action call for Sensibo (#134596)
* Add get device capabilities action call for Sensibo

* Tests

* Mod

* Fix services

---------

Co-authored-by: J. Nick Koston <nick@koston.org>
2025-01-06 11:09:08 +01:00
Michael
acd95975e4 Make ChunkAsyncStreamIterator an aiohttp helper (#134843)
make ChunkAsyncStreamIterator a generic aiohttp helper
2025-01-06 04:37:07 +01:00
G Johansson
bc22e34fc3 Add python_script to strict typing (#134822) 2025-01-05 22:22:54 -05:00
G Johansson
bf0cf1c30f Set single_config_entry in System monitor manifest (#134838) 2025-01-05 22:17:08 -05:00
G Johansson
e95bfe438b Pass config entry directly to coordinator in System monitor (#134837) 2025-01-05 22:16:58 -05:00
Norbert Rittel
0a457979ec Fix spelling of "ID", slightly reword action description (#134817)
This commit fixes the spelling of "ID" (uppercase for abbreviations) and slightly changes the action description to use third-person singular.

The latter ensures proper (machine) translations, keeping a descriptive style.
2025-01-05 18:36:17 -08:00
Allen Porter
2f295efb3f Update roborock to ensure every room has a name, falling back to a placeholder (#134733)
* Update roborock to ensure every room has a name, falling back to a placeholder

* Change Map to Room
2025-01-05 18:28:17 -08:00
J. Nick Koston
74613ae0c4 Bump habluetooth to 3.7.0 (#134833) 2025-01-05 12:44:37 -10:00
Raphael Hehl
4d4cfabfba Bump uiprotect to version 7.4.1 (#134829) 2025-01-05 11:25:44 -10:00
Norbert Rittel
7ae81bae4c Fix spelling of "ID" in Roku integration (#134779)
* Fix spelling of "ID" and "Ethernet" in Roku integration

Small commit replacing "id" with "ID" and "ethernet" with "Ethernet".

* Revert entity change

Web editor does not support the necessary tests.
2025-01-05 20:21:06 +01:00
Norbert Rittel
7ec10bfd6f Use uppercase "ID" in Home Connect strings (#134783) 2025-01-05 20:19:45 +01:00
cdnninja
d662a4465c Remove unneeded vesync device base class (#134499)
* Remove unneeded entity to make code cleaner

* Update light.py

* Update fan.py

* Typing.

* Update homeassistant/components/vesync/common.py

Co-authored-by: Allen Porter <allen.porter@gmail.com>

* Wrap

---------

Co-authored-by: Allen Porter <allen.porter@gmail.com>
2025-01-05 09:18:52 -08:00
Markus Lanthaler
66b4b24612 Add latest Nighthawk WiFi 7 routers to V2 models (#134765)
Click `WiFi Routers` | `Nighthawk WiFi 7 Router` on https://www.netgear.com/support/ to see the list of devices
2025-01-05 16:59:34 +01:00
TheJulianJES
a2077405e2 Bump ZHA to 0.0.45 (#134726) 2025-01-05 16:49:58 +01:00
Lucas Gasenzer
f0a1a6c2ad Add ATTR_MODEL to DeviceInfo for Aranet (#134307)
* add ATTR_MODEL to DeviceInfo

* add tests for device context

* Upstream change type.name --> type.model

* fix test to represent model names
2025-01-05 16:16:12 +02:00
Norbert Rittel
32b7b5aa66 Small fixes in the strings file of the Vera integration (#134780)
Just for consistency …
2025-01-05 15:10:32 +01:00
Norbert Rittel
871a7d0dc1 Use uppercase "Chime" for product name, fix "MAC address" (#134748) 2025-01-05 14:31:02 +01:00
Michael
da807001ab Register base device entry during coordinator setup in AVM Fritz!Tools integration (#134764)
* register base device entry during coordinator setup

* make mypy happy
2025-01-05 08:16:33 -05:00
Norbert Rittel
a104799893 Fix spelling of "MAC (address)" and "Slide" name in slide_local (#134747) 2025-01-05 13:06:48 +01:00
Duco Sebel
45d1624d70 Bumb python-homewizard-energy to 7.0.1 (#134753) 2025-01-05 12:37:06 +01:00
Lucas Gasenzer
1059cf3f07 Bump aranet4 to 2.5.0 (#134752)
update aranet4 to 2.5.0 before pull request
2025-01-05 13:10:08 +02:00
Norbert Rittel
dd34a10934 Fix swapped letter order in "°F" and "°C" temperature units (#134750)
Fixes the wrong order "F°" and "C°" for the temperature units.
2025-01-05 10:43:32 +01:00
Klaas Schoute
d4f3dd2335 Bump powerfox to v1.1.0 (#134730) 2025-01-05 10:10:55 +01:00
Sid
0ecb1ea8cf Bump openwebifpy to 4.3.1 (#134746) 2025-01-05 10:04:59 +01:00
Sid
3d5a42749d Bump ruff to 0.8.6 (#134745) 2025-01-05 09:47:42 +01:00
Rylie Pavlik
a2c2d37eb1 Add support for "Lumin Smart Light" LD-0003 (#133328) 2025-01-04 13:53:16 -10:00
Andrew Sayre
f68c16586d Deprecate HEOS sign_in and sign_out actions (#134616) 2025-01-05 00:13:46 +01:00
Norbert Rittel
11d80065ef Fix spelling of "MAC (address)" and "Gateway" name (#134724) 2025-01-05 00:05:15 +01:00
Norbert Rittel
7012648bf8 Fix typos / grammar in nasweb integration (#134721) 2025-01-04 23:23:26 +01:00
Norbert Rittel
d96b2499e2 Fix typos / grammar in description of create_task action (#134705) 2025-01-04 23:21:23 +01:00
Norbert Rittel
a41bdfe0cc Fix wrong description of group.set action (#134697) 2025-01-04 23:20:30 +01:00
dontinelli
0d3872a4c7 Change from host to ip in zeroconf discovery for slide_local (#134709) 2025-01-04 21:28:47 +01:00
Norbert Rittel
65d8d071dd Remove excessive newline codes from squeezebox strings (#134682) 2025-01-04 18:42:28 +01:00
Allen Porter
bb97a16756 Add prompts to MCP server (#134619)
* Add prompts to MCP server

* Improve test coverage for get prompt error cases
2025-01-04 12:35:05 -05:00
Andrew Sayre
c9a607aa45 Clean-up HEOS entity event setup (#134683)
* Use async_on_remove

* Remove redundant signal clearing
2025-01-04 12:32:19 -05:00
Allen Porter
c7993eff99 Bump gcal_sync to 7.0.0 (#134687) 2025-01-04 12:30:57 -05:00
Cyrill Raccaud
8a880d6134 Cookidoo exotic domains (#134676) 2025-01-04 16:33:42 +01:00
Brynley McDonald
cc0fb80481 Fix Flick Electric authentication (#134611) 2025-01-04 16:21:21 +01:00
epenet
276806d3e1 Fix hive color tunable light (#134628) 2025-01-04 16:19:38 +01:00
Franck Nijhof
0589df7d95 Update demetriek to 1.1.1 (#134663) 2025-01-04 16:19:16 +01:00
Joost Lekkerkerker
aab676a313 Add Overseerr service to get requests (#134229)
* Add service to get requests

* Add service to get requests

* Add service to get requests

* fix

* Add tests
2025-01-04 15:53:15 +01:00
Joost Lekkerkerker
7f473b8260 Prefer a local webhook for Overseerr (#134667) 2025-01-04 15:39:47 +01:00
Shay Levy
fea4a00424 Remove LG WebOS TV legacy uuid migration (#134671) 2025-01-04 15:31:36 +01:00
Cyrill Raccaud
7d146ddae0 Bump cookidoo-api library to 0.11.1 of for Cookidoo (#134661) 2025-01-04 15:02:00 +01:00
Franck Nijhof
8f06e0903f Update peblar to 0.3.3 (#134658) 2025-01-04 14:34:45 +01:00
Maikel Punie
677ba3a6a6 Add velbus cover platform testcases (#134654) 2025-01-04 14:07:25 +01:00
Franck Nijhof
a322deaab8 Update twentemilieu to 2.2.1 (#134651) 2025-01-04 14:05:24 +01:00
Franck Nijhof
584439cade Update guppy to 3.1.5 (#134646) 2025-01-04 13:24:33 +01:00
Joost Lekkerkerker
baa13debcc Remove call to remove slide (#134647) 2025-01-04 12:56:58 +01:00
Cyrill Raccaud
1d42890748 Set logging in manifest for Cookidoo (#134645) 2025-01-04 12:23:22 +01:00
Norbert Rittel
622d23cadd Fix description of device_id field of reconnect_client actions (#134275) 2025-01-04 12:21:25 +01:00
G Johansson
ebeb2ecb09 Replace aioclient_mock in Sensibo tests (#134543) 2025-01-04 12:14:58 +01:00
Norbert Rittel
b3cb2928fc Fix typo 'devide_id', use uppercase for abbreviations ID and LED (#134634) 2025-01-04 12:01:39 +01:00
J. Nick Koston
b639466453 Bump bleak-esphome to 2.0.0 (#134580) 2025-01-04 11:30:41 +01:00
Teemu R.
69241e4ca6 Mention case-sensitivity in tplink credentials prompt (#134606) 2025-01-04 11:12:46 +01:00
Allen Porter
80371a865e Bump ical to 8.3.0 (#134617)
* Bump ical to 8.3.0

* Update snapshots
2025-01-04 09:49:56 +01:00
Maikel Punie
c9dbb205dd Add velbus diagnostics tests (#134621) 2025-01-04 09:10:34 +01:00
Raphael Hehl
197ff932af Bump uiprotect to version 7.2.0 (#134587) 2025-01-04 00:27:06 +01:00
G Johansson
287b7eec13 Clean up docstrings in Sensibo (#134591) 2025-01-04 00:24:51 +01:00
Maikel Punie
e6da6d9612 Add velbus light and sensor platform testcases (#134485)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-01-03 20:42:01 +01:00
peteS-UK
d4f38099ae Small fix to allow playing of expandable favorites on Squeezebox (#134572) 2025-01-03 20:28:05 +01:00
Manu
9f2cb7bf56 Add image platform to Habitica integration (#129009) 2025-01-03 20:23:43 +01:00
Maikel Punie
8a84abd50f Velbus diagnostics code cleanup (#134553) 2025-01-03 20:15:58 +01:00
Nerdix
b15e08ca9c Add sleep switch for all Foscam cameras if more than 1 camera are configured (#126064) 2025-01-03 20:15:09 +01:00
Norbert Rittel
3fb980901e Improve habitica action descriptions (#134563) 2025-01-03 20:07:30 +01:00
Joost Lekkerkerker
bd3a3fd26c Require at least bronze for new integrations (#134537)
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2025-01-03 18:14:27 +00:00
Andrew Sayre
dfcb977a1d Add HEOS Reauth Flow (#134465) 2025-01-03 18:11:10 +00:00
Abílio Costa
94ad6ae814 Bump whirlpool-sixth-sense to 0.18.11 (#134562) 2025-01-03 17:45:27 +01:00
G Johansson
97aa93f92b Add supported features property in Sensibo (#134479) 2025-01-03 17:30:18 +01:00
G Johansson
ee025198e8 Update quality scale for Sensibo (#134551) 2025-01-03 17:28:02 +01:00
puddly
90265e2afd Move SiLabs firmware probing helper from ZHA into homeassistant_hardware (#131586)
* Move firmware probing helper out of ZHA and into hardware

* Add a unit test
2025-01-03 10:57:39 -05:00
Bram Kragten
a53554dad3 Update frontend to 20250103.0 (#134561) 2025-01-03 16:36:40 +01:00
Joost Lekkerkerker
2b6ad84cf5 Set Ituran to silver (#134538) 2025-01-03 16:31:31 +01:00
Erik Montnemery
92655fd640 Log cloud backup agent file list (#134556) 2025-01-03 16:30:14 +01:00
Maciej Bieniek
e43f72c452 Add support for xvoltage sensor for Shelly Plus UNI (#134261)
* Add support for xvoltage sensor

* Cleaning
2025-01-03 15:27:47 +01:00
Manu
9320ccfa4f Remove deprecated sensors in Habitica integration (#134320)
* Remove deprecated sensors

* remove todos/dailies also from enum
2025-01-03 14:48:26 +01:00
Erik Montnemery
336af8b551 Avoid early COMPLETED event when restoring backup (#134546) 2025-01-03 14:44:24 +01:00
starkillerOG
8a2f8dc736 Add Reolink proxy for playback (#133916) 2025-01-03 14:24:39 +01:00
Erik Montnemery
dc048bfcf5 Simplify error handling when creating backup (#134528) 2025-01-03 14:16:05 +01:00
Norbert Rittel
fb474827b5 Fix description of google_assistant.request_sync action (#134535) 2025-01-03 14:08:54 +01:00
Markus Adrario
eec5fb2133 Add Homee integration to Core (#133738)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-01-03 13:44:06 +01:00
Erik Montnemery
8ad7c522f4 Add backup as after_dependency of frontend (#134534) 2025-01-03 13:35:56 +01:00
Maikel Punie
c7f6630718 Velbus add init testcases (#134533) 2025-01-03 13:29:01 +01:00
Marc Mueller
afa95293dc Enable strict typing for pandora (#134536) 2025-01-03 13:23:39 +01:00
G Johansson
36582f9ac2 Refactor all Sensibo tests (#134478)
* Add me json

* Mods

* Mods

* More

* Mods

* Mods

* clean

* last bits

* Fix

* unique id

* return_value

* remove blocking

* Fix rebase
2025-01-03 12:44:47 +01:00
Ståle Storø Hauknes
19852ecc24 Add state_class to Airthings integration (#134503)
Add state class
2025-01-03 11:55:24 +01:00
Manu
5726d090b0 Add get_tasks action to Habitica integration (#127687)
Add get_tasks action
2025-01-03 11:53:30 +01:00
Indu Prakash
add401ffcf Add coordinator to vesync (#134087) 2025-01-03 11:33:16 +01:00
Robert Svensson
fd12ae2ccd Handle deCONZ color temp 0 is never used when calculating kelvin CT (#134521) 2025-01-03 10:51:20 +01:00
Franck Nijhof
e15eda3aa2 Only load Peblar customization update entity when present (#134526) 2025-01-03 10:51:05 +01:00
Dan Raper
cc0adcf47f Add switch platform to Ohme (#134347)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-01-03 10:39:41 +01:00
Franck Nijhof
06580ce10f Update peblar to v0.3.2 (#134524) 2025-01-03 10:37:39 +01:00
Erik Montnemery
b78e39da2d Fix activating backup retention config on startup (#134523) 2025-01-03 10:29:29 +01:00
G Johansson
46824a2a53 Add quality scale to Sensibo (#134296) 2025-01-03 10:23:25 +01:00
Joost Lekkerkerker
ee01289ee8 Bump python-overseerr to 0.5.0 (#134522) 2025-01-03 10:22:59 +01:00
Erik Montnemery
0bd22eabc7 Improve recorder schema migration error test (#134518) 2025-01-03 10:05:07 +01:00
Erik Montnemery
c901352bef Add error prints for recorder fatal errors (#134517) 2025-01-03 10:01:35 +01:00
Joost Lekkerkerker
23ed62c1bc Push Overseerr updates via webhook (#134187) 2025-01-03 08:26:01 +01:00
Paulus Schoutsen
0ef254bc9a Fix backup dir not existing (#134506) 2025-01-03 00:21:19 -05:00
rrooggiieerr
629d108078 Use the latest version of the pyserial-asyncio-fast library (#134501) 2025-01-03 03:15:46 +01:00
Marc Mueller
6f3544fa47 Add types package for pexpect (#134461) 2025-01-03 02:53:08 +01:00
Franck Nijhof
cb389d29ea Fix input_datetime.set_datetime not accepting 0 timestamp value (#134489) 2025-01-02 23:45:00 +01:00
Josef Zweck
ac26ca2da5 Bump aioacaia to 0.1.13 (#134496) 2025-01-03 00:28:29 +02:00
G Johansson
d5bcb73d33 Bump psutil to 6.1.1 (#134494) 2025-01-02 22:45:24 +01:00
Marc Mueller
e6a18357db Update pillow to 11.1.0 (#134469) 2025-01-02 22:36:14 +01:00
G Johansson
13ec0659ff Remove deprecated uptime sensor from qnap_qsw (#134493) 2025-01-02 22:29:50 +01:00
G Johansson
a7fb20ab58 Remove deprecated attributes from ecovacs (#134492) 2025-01-02 22:19:51 +01:00
G Johansson
657da47458 Remove worldclock config entry import (#134491) 2025-01-02 21:45:20 +01:00
Franck Nijhof
a4708876a9 Update peblar to 0.3.1 (#134486) 2025-01-02 21:41:54 +01:00
G Johansson
4239c5b557 Improve error strings in Sensibo (#134487) 2025-01-02 21:19:20 +01:00
G Johansson
836354bb99 Use username as config entry title in Sensibo (#134488) 2025-01-02 21:18:19 +01:00
Norbert Rittel
a7af042e57 Fix a few small typos in peblar (#134481) 2025-01-02 21:17:29 +01:00
Franck Nijhof
09476ade82 Remove sneaked in IronOS submodule (#134477) 2025-01-02 20:22:17 +01:00
Andrea Arcangeli
25937d7868 open_meteo: correct UTC timezone handling in hourly forecast (#129664)
Co-authored-by: G Johansson <goran.johansson@shiftit.se>
2025-01-02 19:37:36 +01:00
Duco Sebel
4e74d14beb Include host in Peblar EV-Charger discovery setup description (#133954)
Co-authored-by: Franck Nijhof <git@frenck.dev>
2025-01-02 19:34:51 +01:00
SparkyDan555
309b7eb436 Change Reolink person binary sensor icon (#134472) 2025-01-02 19:18:40 +01:00
Erik Montnemery
cf238cd8f7 Don't start recorder if a database from the future is used (#134467) 2025-01-02 18:56:23 +01:00
Robert Resch
ee46edffa3 Bump deebot-client to 10.1.0 (#134470) 2025-01-02 18:54:27 +01:00
Erik Montnemery
876b3423ba Improve hassio backup create and restore parameter checks (#134434) 2025-01-02 17:52:50 +01:00
Norbert Rittel
2752a35e23 Remove excessive newline codes from strings.json (#134468) 2025-01-02 17:43:49 +01:00
Craig Andrews
9e8df72c0d Improve is docker env checks (#132404)
Co-authored-by: Franck Nijhof <frenck@frenck.nl>
Co-authored-by: Sander Hoentjen <sander@hoentjen.eu>
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
Co-authored-by: Robert Resch <robert@resch.dev>
2025-01-02 17:21:49 +01:00
Bram Kragten
5439613bff Update frontend to 20250102.0 (#134462) 2025-01-02 17:17:57 +01:00
Ілля Піскурьов
3b5455bc49 Add support for specifying hvac_onoff_register value on modbus (#128366)
Co-authored-by: Abílio Costa <abmantis@users.noreply.github.com>
2025-01-02 15:18:05 +00:00
Noah Husby
104151d322 Remove deprecated YAML import from MPD (#134459) 2025-01-02 16:08:33 +01:00
Martin Hjelmare
a329828bdf Handle backup errors more consistently (#133522)
* Add backup manager and read writer errors

* Clean up not needed default argument

* Clean up todo comment

* Trap agent bugs during upload

* Always release stream

* Clean up leftover

* Update test for backup with automatic settings

* Fix use of vol.Any

* Refactor test helper

* Only update successful timestamp if completed event is sent

* Always delete surplus copies

* Fix after rebase

* Fix after rebase

* Revert "Fix use of vol.Any"

This reverts commit 28fd7a544899bb6ed05f771e9e608bc5b41d2b5e.

* Inherit BackupReaderWriterError in IncorrectPasswordError

---------

Co-authored-by: Erik Montnemery <erik@montnemery.com>
2025-01-02 15:45:46 +01:00
Marc Mueller
aa9e721e8b Update pexpect to 4.9.0 (#134450) 2025-01-02 15:36:44 +01:00
Josef Zweck
1b49f88be9 Bump aioacaia to 0.1.12 (#134454) 2025-01-02 15:33:22 +01:00
Marc Mueller
c345f2d548 Improve pandora media_player typing (#134447) 2025-01-02 13:55:59 +01:00
Manu
1d731875ae Remove deprecated yaml import from pyLoad integration (#134200) 2025-01-02 13:29:55 +01:00
Erik Montnemery
0c3489c1b3 Adjust language in backup integration (#134440)
* Adjust language in backup integration

* Update tests
2025-01-02 13:29:46 +01:00
Marc Mueller
c5865c6d18 Add types package for pyserial (#134444) 2025-01-02 13:21:20 +01:00
Norbert Rittel
e1a0fb2f1a Improve action descriptions with some more detail from the docs (#134120)
Co-authored-by: G Johansson <goran.johansson@shiftit.se>
2025-01-02 12:52:51 +01:00
Erik Montnemery
d725cdae13 Initialize AppleTVConfigFlow.identifiers (#134443) 2025-01-02 12:49:03 +01:00
Erik Montnemery
e1bd82ea32 Export IncorrectPasswordError from backup integration (#134436) 2025-01-02 12:40:10 +01:00
Thomas55555
4bcc551b61 Add sw_version to apsystems (#134441) 2025-01-02 12:28:48 +01:00
Marc Mueller
08019e76d8 Update types packages (#134433) 2025-01-02 12:00:29 +01:00
Sven Naumann
0b32342bf0 Add mode selector to Twinkly (#134041) 2025-01-02 10:54:29 +00:00
Krzysztof Dąbrowski
add4e1a708 Add state attributes translations to GIOS (#134390) 2025-01-02 11:38:12 +01:00
Stefan Agner
fb3105bdc0 Improve Supervisor backup error handling (#134346)
* Raise Home Assistant error in case backup restore fails

This change raises a Home Assistant error in case the backup restore
fails. The Supervisor is checking some common issues before starting
the actual restore in background. This early checks raise an exception
(represented by a HTTP 400 error). This change catches such errors and
raises a Home Assistant error with the message from the Supervisor
exception.

* Add test coverage
2025-01-02 11:37:25 +01:00
Norbert Rittel
3845acd0ce Improve names and descriptions in neato.custom_cleaning action (#134399) 2025-01-02 11:04:23 +01:00
Manu
b45c68554c Remove habitipy references in Habitica integration (#134419) 2025-01-02 10:47:40 +01:00
G Johansson
8a45aa4c42 Add translations to all Sensibo errors (#134422)
Co-authored-by: Franck Nijhof <git@frenck.dev>
2025-01-02 10:46:55 +01:00
G Johansson
51ccba12af Add action translations to Sensibo (#134420) 2025-01-02 10:45:20 +01:00
John Barreiros
c8699dc066 Add current_humidity state attribute to Google Nest climate entity (#134426) 2025-01-02 10:44:15 +01:00
ashionky
87454babfa Add debug log and Optimize code (#134328)
* debug log

* add sw_version hw_version

* log
2025-01-02 09:10:01 +01:00
Andrew Sayre
c9ff575628 Add HEOS options flow for optional authentication (#134105)
* Add heos options flow

* Add options flow tests

* Test error condition during options sign out

* Use credentials when setting up

* Update warning instructions

* Simplify exception logic

* Cover unknown command error condition

* Add test for options

* Correct const import location

* Review feedback

* Update per feedback

* Parameterize tests and remaining feedback

* Correct log level in init

* nitpick feedback
2025-01-02 09:07:34 +01:00
G Johansson
877d16273b Fix SQL sensor name (#134414) 2025-01-02 08:51:49 +01:00
Marc Mueller
dc5bfba902 Update mypy-dev to 1.15.0a1 (#134416) 2025-01-02 08:45:05 +01:00
TheJulianJES
5e7a405f34 Bump ZHA to 0.0.44 (#134427) 2025-01-02 08:43:38 +01:00
Matthew FitzGerald-Chamberlain
5228f3d85c Improve support for Aprilaire S86WMUPR (#133974) 2025-01-02 08:39:57 +01:00
G Johansson
2efc75fdf5 Add base entity to Mill (#134415) 2025-01-02 07:31:54 +01:00
Michael Hansen
a435fd12f0 Bump intents to 2025.1.1 (#134424) 2025-01-01 21:03:17 -05:00
Allen Porter
a5d0c3528c Add the Model Context Protocol Server integration (#134122)
* Add the Model Context Protocol Server integration

* Remove unusued code in init

* Fix comment wording

* Use util.uild for unique ids

* Set config entry title to the LLM API name

* Extract an SSE parser and update comments

* Update comments and defend against already closed sessions

* Shorten description

* Update homeassistant/components/mcp_server/__init__.py

Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>

* Change integration type to service

---------

Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
2025-01-01 19:38:33 -05:00
Daniel Hjelseth Høyer
5e981d00a4 Add mill number platform (#134044)
* Mill number, max heating power

Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>

* Mill number, max heating power

Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>

* Mill number, max heating power

Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>

* Mill number, max heating power

Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>

* Mill number, max heating power

Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>

* type

Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>

---------

Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2025-01-01 23:25:42 +01:00
G Johansson
97dc72a6e2 Move available property to base entity in Sensibo (#134410)
* Move available property to base entity in Sensibo

* Fix test
2025-01-01 23:02:06 +01:00
Maikel Punie
088b097a03 Velbus select platform testcases (#134394) 2025-01-01 17:39:39 +01:00
Jan Bouwhuis
85c94e6403 Calculate number of discovery topics correctly (#134393) 2025-01-01 16:55:41 +01:00
Maikel Punie
a2ef1604af Add Velbus climate platform tests (#134387) 2025-01-01 16:01:02 +01:00
Sven Naumann
55dc4b0d2c Implement base entity class for Twinkly (#134382)
* implement base entity class for twinkly

* Update homeassistant/components/twinkly/entity.py

Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>

* super init

---------

Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-01-01 13:49:13 +01:00
Adam Štrauch
18e8a3b185 Add new ID LAP-V201S-AEUR for Vital200S AirPurifier in Vesync integration (#133999) 2025-01-01 13:10:40 +01:00
cdnninja
3a68a0a67f Vesync unload error when not all platforms used (#134166) 2025-01-01 13:03:39 +01:00
Josef Zweck
7ab2d2e07a Cleanup lamarzocco tests (#134383) 2025-01-01 13:00:14 +01:00
Keith
809629c0e2 Add integration for igloohome devices (#130657)
Co-authored-by: Josef Zweck <24647999+zweckj@users.noreply.github.com>
Co-authored-by: Josef Zweck <josef@zweck.dev>
2025-01-01 12:55:04 +01:00
G Johansson
2be578a33f Add diagnostics to Trafikverket Weatherstation (#134314) 2025-01-01 12:32:35 +01:00
Maikel Punie
5cff79ce50 Add velbus switch platform testcases (#134207) 2025-01-01 12:11:27 +01:00
Brett Adams
513c8487c5 Check vehicle metadata (#134381) 2025-01-01 12:09:15 +01:00
Kenny Root
031de8da51 Bump zabbix-utils to 2.0.2 (#134373) 2025-01-01 11:42:16 +01:00
G Johansson
2e1463b9e9 Add placeholder url to Sensibo api description (#134342) 2024-12-31 20:09:49 -06:00
Norbert Rittel
9a58440296 Use "restore from" in field descriptions of restore_partial action (#134285) 2024-12-31 23:29:15 +01:00
Joost Lekkerkerker
26e0fcdb08 Improve Mealie set mealplan service (#130606)
* Improve Mealie set mealplan service

* Fix

* Fix
2024-12-31 17:06:42 -05:00
Bram Kragten
e835e41d59 Update frontend to 20241231.0 (#134363) 2024-12-31 17:04:28 -05:00
Niels Mündler
c53c0a13be Bump pysynthru version to 0.8.0 (#134294) 2024-12-31 23:03:35 +01:00
Jan Bouwhuis
8098122dfe Ensure an entity platform is added in mqtt tests (#134331) 2024-12-31 23:01:55 +01:00
starkillerOG
1d6ecbd1d5 Change Reolink test switch entity ID (#134339) 2024-12-31 22:57:43 +01:00
Dan Raper
c8276ec325 Bump ohmepy to 1.2.3 (#134348) 2024-12-31 22:54:20 +01:00
Josef Zweck
ddfad614ab Bump pylamarzocco to 1.4.6 (#134367) 2024-12-31 22:49:29 +01:00
Norbert Rittel
8eb21749b5 Remove leftover newline codes and periods from strings.json (#134354) 2024-12-31 22:39:07 +01:00
Josef Zweck
a6ba25d3d4 Use text selectors for lamarzocco config flow (#134368) 2024-12-31 22:38:31 +01:00
starkillerOG
1e70a0060b Add Reolink baby crying binary sensor (#134290)
* Add baby crying detection

* Bump reolink-aio to 0.11.6
2024-12-31 22:27:01 +01:00
Noah Husby
6c47f03d17 Bump aiorussound to 4.4.0 (#134366) 2024-12-31 22:21:14 +01:00
Markus Jacobsen
2054988790 Add Bang & Olufsen button Event entities (#127550)
* Add button events

* Remove unused common keys
Rename Preset to Favourite

* Add event testing

* Add check for Beoconnect Core

* Rename device controls

* Add test for Beoconnect core event entity creation

* Fix config entry type

* Add a type checking check before assertion

* Add icon translations

* Remove useless defined icons

* Remove base event class

* Update homeassistant/components/bang_olufsen/event.py

Co-authored-by: Josef Zweck <josef@zweck.dev>

---------

Co-authored-by: Josef Zweck <josef@zweck.dev>
2024-12-31 21:55:24 +01:00
tronikos
f1ad3040b8 Allow automations to pass any conversation_id for Google Generative AI (#134251) 2024-12-31 15:52:29 -05:00
Michael Hansen
53ca31c112 Bump hassil to 2.1.0 (#134359) 2024-12-31 15:52:15 -05:00
Michael Hansen
23459a0355 Revert speech seconds to 0.3 (#134360) 2024-12-31 20:04:41 +01:00
Simone Chemelli
a8bfe285bf Bump aioshelly to 12.2.0 (#134352) 2024-12-31 17:16:12 +01:00
Noah Husby
0888d1a169 Bump aiorussound to 4.3.0 (#134242)
* Bump aiorussound to 4.3.0

* Force CI
2024-12-31 16:14:24 +01:00
Dave T
8b20272272 Refactor and simplify config flow in generic camera (#134330)
Refactor and simplify config flow
2024-12-31 07:05:50 -08:00
Bram Kragten
06b33e5589 Set backup manager state to completed when restore is finished (#134283) 2024-12-31 15:01:06 +01:00
Brynley McDonald
9348569f90 Update Flick Electric API (#133475) 2024-12-31 14:28:24 +01:00
starkillerOG
4a9d545ffe Bump reolink-aio to 0.11.6 (#134286) 2024-12-31 10:31:40 +01:00
Simone Chemelli
277ee03145 Full test coverage for Vodafone Station sensor platform (#133285)
Co-authored-by: Joostlek <joostlek@outlook.com>
2024-12-31 09:55:54 +01:00
Norbert Rittel
6c9c17f129 Update description of the script toggle action (#134093) 2024-12-31 07:07:52 +00:00
Dave T
bf59241dab Add stream preview to options flow in generic camera (#133927)
* Add stream preview to options flow

* Increase test coverage

* Code review: use correct flow handler type in cast

* Restore test coverage to 100%

* Remove error and test that can't be triggered yet
2024-12-30 15:46:42 -08:00
Indu Prakash
57b7635b70 Bump pyvesync to 2.1.15 (#134156)
Bumped pyvesync to 2.1.15
2024-12-30 23:33:41 +01:00
G Johansson
4b96266647 Set parallel updates in Trafikverket Train (#134302) 2024-12-30 23:18:35 +01:00
G Johansson
6266a4153d Explicitly set config entry in Trafikverket Train coordinator (#134304) 2024-12-30 23:18:22 +01:00
G Johansson
a9949a0aab Use typed config entry everywhere in Trafikverket Train (#134303) 2024-12-30 23:17:21 +01:00
G Johansson
428a74fa48 Explicitly set config entry in Trafikverket Ferry coordinator (#134305) 2024-12-30 23:17:04 +01:00
G Johansson
9f1023b195 Explicitly set config entry in Trafikverket Weatherstation coordinator (#134310) 2024-12-30 23:16:41 +01:00
G Johansson
256fc54aa1 Set parallel updates in Trafiverket Weatherstation (#134309) 2024-12-30 23:16:16 +01:00
G Johansson
94c1b9a434 Use typed config entry everywhere in Trafikverket Weatherstation (#134308) 2024-12-30 23:15:54 +01:00
G Johansson
275c15e2ae Set parallel updates in Trafikverket Ferry (#134301) 2024-12-30 22:50:47 +01:00
G Johansson
9cdbcd93cd Use typed config entry everywhere in Trafikverket Ferry (#134300) 2024-12-30 22:48:33 +01:00
G Johansson
f2e856b8a2 Use typed config entry in Trafikverket Camera (#134299) 2024-12-30 22:48:22 +01:00
G Johansson
820f04e1e1 Add parallel updates to camera platform in Trafikverket Camera (#134298) 2024-12-30 22:48:12 +01:00
Noah Husby
b7541f098c Add discovery to Russound RIO (#134245) 2024-12-30 22:46:08 +01:00
Norbert Rittel
a345e80368 Replace unnecessary abbreviations in set_room_temperature action (#134278) 2024-12-30 21:28:38 +01:00
Norbert Rittel
7a3d9a9345 Replace "service" with "action" (#134279) 2024-12-30 21:26:53 +01:00
681 changed files with 35052 additions and 6236 deletions

View File

@@ -166,7 +166,7 @@ jobs:
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev"
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-ng-dev"
skip-binary: aiohttp;multidict;propcache;yarl;SQLAlchemy
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
@@ -234,7 +234,7 @@ jobs:
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
@@ -248,7 +248,7 @@ jobs:
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
@@ -262,7 +262,7 @@ jobs:
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.3
rev: v0.8.6
hooks:
- id: ruff
args:

View File

@@ -311,6 +311,7 @@ homeassistant.components.manual.*
homeassistant.components.mastodon.*
homeassistant.components.matrix.*
homeassistant.components.matter.*
homeassistant.components.mcp_server.*
homeassistant.components.mealie.*
homeassistant.components.media_extractor.*
homeassistant.components.media_player.*
@@ -364,6 +365,7 @@ homeassistant.components.otbr.*
homeassistant.components.overkiz.*
homeassistant.components.overseerr.*
homeassistant.components.p1_monitor.*
homeassistant.components.pandora.*
homeassistant.components.panel_custom.*
homeassistant.components.peblar.*
homeassistant.components.peco.*
@@ -381,6 +383,7 @@ homeassistant.components.pure_energie.*
homeassistant.components.purpleair.*
homeassistant.components.pushbullet.*
homeassistant.components.pvoutput.*
homeassistant.components.python_script.*
homeassistant.components.qnap_qsw.*
homeassistant.components.rabbitair.*
homeassistant.components.radarr.*

View File

@@ -637,6 +637,8 @@ build.json @home-assistant/supervisor
/tests/components/homeassistant_sky_connect/ @home-assistant/core
/homeassistant/components/homeassistant_yellow/ @home-assistant/core
/tests/components/homeassistant_yellow/ @home-assistant/core
/homeassistant/components/homee/ @Taraman17
/tests/components/homee/ @Taraman17
/homeassistant/components/homekit/ @bdraco
/tests/components/homekit/ @bdraco
/homeassistant/components/homekit_controller/ @Jc2k @bdraco
@@ -686,6 +688,8 @@ build.json @home-assistant/supervisor
/tests/components/icloud/ @Quentame @nzapponi
/homeassistant/components/idasen_desk/ @abmantis
/tests/components/idasen_desk/ @abmantis
/homeassistant/components/igloohome/ @keithle888
/tests/components/igloohome/ @keithle888
/homeassistant/components/ign_sismologia/ @exxamalte
/tests/components/ign_sismologia/ @exxamalte
/homeassistant/components/image/ @home-assistant/core
@@ -887,6 +891,8 @@ build.json @home-assistant/supervisor
/tests/components/matrix/ @PaarthShah
/homeassistant/components/matter/ @home-assistant/matter
/tests/components/matter/ @home-assistant/matter
/homeassistant/components/mcp_server/ @allenporter
/tests/components/mcp_server/ @allenporter
/homeassistant/components/mealie/ @joostlek @andrew-codechimp
/tests/components/mealie/ @joostlek @andrew-codechimp
/homeassistant/components/meater/ @Sotolotl @emontnemery

View File

@@ -89,7 +89,7 @@ from .helpers import (
)
from .helpers.dispatcher import async_dispatcher_send_internal
from .helpers.storage import get_internal_store_manager
from .helpers.system_info import async_get_system_info, is_official_image
from .helpers.system_info import async_get_system_info
from .helpers.typing import ConfigType
from .setup import (
# _setup_started is marked as protected to make it clear
@@ -106,6 +106,7 @@ from .util.async_ import create_eager_task
from .util.hass_dict import HassKey
from .util.logging import async_activate_log_queue_handler
from .util.package import async_get_user_site, is_docker_env, is_virtual_env
from .util.system_info import is_official_image
with contextlib.suppress(ImportError):
# Ensure anyio backend is imported to avoid it being imported in the event loop

View File

@@ -34,17 +34,17 @@
"services": {
"capture_image": {
"name": "Capture image",
"description": "Request a new image capture from a camera device.",
"description": "Requests a new image capture from a camera device.",
"fields": {
"entity_id": {
"name": "Entity",
"description": "Entity id of the camera to request an image."
"description": "Entity ID of the camera to request an image from."
}
}
},
"change_setting": {
"name": "Change setting",
"description": "Change an Abode system setting.",
"description": "Changes an Abode system setting.",
"fields": {
"setting": {
"name": "Setting",
@@ -58,11 +58,11 @@
},
"trigger_automation": {
"name": "Trigger automation",
"description": "Trigger an Abode automation.",
"description": "Triggers an Abode automation.",
"fields": {
"entity_id": {
"name": "Entity",
"description": "Entity id of the automation to trigger."
"description": "Entity ID of the automation to trigger."
}
}
}

View File

@@ -26,5 +26,5 @@
"iot_class": "local_push",
"loggers": ["aioacaia"],
"quality_scale": "platinum",
"requirements": ["aioacaia==0.1.11"]
"requirements": ["aioacaia==0.1.13"]
}

View File

@@ -39,45 +39,54 @@ SENSORS: dict[str, SensorEntityDescription] = {
key="temp",
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
state_class=SensorStateClass.MEASUREMENT,
),
"humidity": SensorEntityDescription(
key="humidity",
device_class=SensorDeviceClass.HUMIDITY,
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
),
"pressure": SensorEntityDescription(
key="pressure",
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
native_unit_of_measurement=UnitOfPressure.MBAR,
state_class=SensorStateClass.MEASUREMENT,
),
"battery": SensorEntityDescription(
key="battery",
device_class=SensorDeviceClass.BATTERY,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
"co2": SensorEntityDescription(
key="co2",
device_class=SensorDeviceClass.CO2,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
state_class=SensorStateClass.MEASUREMENT,
),
"voc": SensorEntityDescription(
key="voc",
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
state_class=SensorStateClass.MEASUREMENT,
),
"light": SensorEntityDescription(
key="light",
native_unit_of_measurement=PERCENTAGE,
translation_key="light",
state_class=SensorStateClass.MEASUREMENT,
),
"virusRisk": SensorEntityDescription(
key="virusRisk",
translation_key="virus_risk",
state_class=SensorStateClass.MEASUREMENT,
),
"mold": SensorEntityDescription(
key="mold",
translation_key="mold",
state_class=SensorStateClass.MEASUREMENT,
),
"rssi": SensorEntityDescription(
key="rssi",
@@ -85,16 +94,19 @@ SENSORS: dict[str, SensorEntityDescription] = {
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
"pm1": SensorEntityDescription(
key="pm1",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
device_class=SensorDeviceClass.PM1,
state_class=SensorStateClass.MEASUREMENT,
),
"pm25": SensorEntityDescription(
key="pm25",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
device_class=SensorDeviceClass.PM25,
state_class=SensorStateClass.MEASUREMENT,
),
}

View File

@@ -21,7 +21,7 @@
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"invalid_unique_id": "Impossible to determine a valid unique id for the device"
"invalid_unique_id": "Impossible to determine a valid unique ID for the device"
}
},
"options": {
@@ -38,17 +38,17 @@
}
},
"apps": {
"title": "Configure Android Apps",
"description": "Configure application id {app_id}",
"title": "Configure Android apps",
"description": "Configure application ID {app_id}",
"data": {
"app_name": "Application Name",
"app_name": "Application name",
"app_id": "Application ID",
"app_delete": "Check to delete this application"
}
},
"rules": {
"title": "Configure Android state detection rules",
"description": "Configure detection rule for application id {rule_id}",
"description": "Configure detection rule for application ID {rule_id}",
"data": {
"rule_id": "[%key:component::androidtv::options::step::apps::data::app_id%]",
"rule_values": "List of state detection rules (see documentation)",

View File

@@ -44,12 +44,12 @@
}
},
"apps": {
"title": "Configure Android Apps",
"description": "Configure application id {app_id}",
"title": "Configure Android apps",
"description": "Configure application ID {app_id}",
"data": {
"app_name": "Application Name",
"app_name": "Application name",
"app_id": "Application ID",
"app_icon": "Application Icon",
"app_icon": "Application icon",
"app_delete": "Check to delete this application"
}
}

View File

@@ -98,7 +98,6 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
VERSION = 1
scan_filter: str | None = None
all_identifiers: set[str]
atv: BaseConfig | None = None
atv_identifiers: list[str] | None = None
_host: str # host in zeroconf discovery info, should not be accessed by other flows
@@ -118,6 +117,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
def __init__(self) -> None:
"""Initialize a new AppleTVConfigFlow."""
self.credentials: dict[int, str | None] = {} # Protocol -> credentials
self.all_identifiers: set[str] = set()
@property
def device_identifier(self) -> str | None:

View File

@@ -120,6 +120,8 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
"""Wait for the client to be ready."""
if not self.data or Attribute.MAC_ADDRESS not in self.data:
await self.client.read_mac_address()
data = await self.client.wait_for_response(
FunctionalDomain.IDENTIFICATION, 2, WAIT_TIMEOUT
)
@@ -130,12 +132,9 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
return False
if not self.data or Attribute.NAME not in self.data:
await self.client.wait_for_response(
FunctionalDomain.IDENTIFICATION, 4, WAIT_TIMEOUT
)
if not self.data or Attribute.THERMOSTAT_MODES not in self.data:
await self.client.read_thermostat_iaq_available()
await self.client.wait_for_response(
FunctionalDomain.CONTROL, 7, WAIT_TIMEOUT
)
@@ -144,10 +143,16 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
not self.data
or Attribute.INDOOR_TEMPERATURE_CONTROLLING_SENSOR_STATUS not in self.data
):
await self.client.read_sensors()
await self.client.wait_for_response(
FunctionalDomain.SENSORS, 2, WAIT_TIMEOUT
)
await self.client.read_thermostat_status()
await self.client.read_iaq_status()
await ready_callback(True)
return True

View File

@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["pyaprilaire"],
"requirements": ["pyaprilaire==0.7.4"]
"requirements": ["pyaprilaire==0.7.7"]
}

View File

@@ -29,6 +29,8 @@ class ApSystemsSensorData:
class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
"""Coordinator used for all sensors."""
device_version: str
def __init__(self, hass: HomeAssistant, api: APsystemsEZ1M) -> None:
"""Initialize my coordinator."""
super().__init__(
@@ -46,6 +48,7 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
raise UpdateFailed from None
self.api.max_power = device_info.maxPower
self.api.min_power = device_info.minPower
self.device_version = device_info.devVer
async def _async_update_data(self) -> ApSystemsSensorData:
try:

View File

@@ -21,7 +21,8 @@ class ApSystemsEntity(Entity):
"""Initialize the APsystems entity."""
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, data.device_id)},
serial_number=data.device_id,
manufacturer="APsystems",
model="EZ1-M",
serial_number=data.device_id,
sw_version=data.coordinator.device_version.split(" ")[1],
)

View File

@@ -19,5 +19,5 @@
"documentation": "https://www.home-assistant.io/integrations/aranet",
"integration_type": "device",
"iot_class": "local_push",
"requirements": ["aranet4==2.4.0"]
"requirements": ["aranet4==2.5.0"]
}

View File

@@ -22,6 +22,7 @@ from homeassistant.components.sensor import (
)
from homeassistant.const import (
ATTR_MANUFACTURER,
ATTR_MODEL,
ATTR_NAME,
ATTR_SW_VERSION,
CONCENTRATION_PARTS_PER_MILLION,
@@ -142,6 +143,7 @@ def _sensor_device_info_to_hass(
if adv.readings and adv.readings.name:
hass_device_info[ATTR_NAME] = adv.readings.name
hass_device_info[ATTR_MANUFACTURER] = ARANET_MANUFACTURER_NAME
hass_device_info[ATTR_MODEL] = adv.readings.type.model
if adv.manufacturer_data:
hass_device_info[ATTR_SW_VERSION] = str(adv.manufacturer_data.version)
return hass_device_info

View File

@@ -90,7 +90,7 @@ class ArubaDeviceScanner(DeviceScanner):
"""Retrieve data from Aruba Access Point and return parsed result."""
connect = f"ssh {self.username}@{self.host} -o HostKeyAlgorithms=ssh-rsa"
ssh = pexpect.spawn(connect)
ssh: pexpect.spawn[str] = pexpect.spawn(connect, encoding="utf-8")
query = ssh.expect(
[
"password:",
@@ -125,12 +125,12 @@ class ArubaDeviceScanner(DeviceScanner):
ssh.expect("#")
ssh.sendline("show clients")
ssh.expect("#")
devices_result = ssh.before.split(b"\r\n")
devices_result = (ssh.before or "").splitlines()
ssh.sendline("exit")
devices: dict[str, dict[str, str]] = {}
for device in devices_result:
if match := _DEVICES_REGEX.search(device.decode("utf-8")):
if match := _DEVICES_REGEX.search(device):
devices[match.group("ip")] = {
"ip": match.group("ip"),
"mac": match.group("mac").upper(),

View File

@@ -6,5 +6,5 @@
"iot_class": "local_polling",
"loggers": ["pexpect", "ptyprocess"],
"quality_scale": "legacy",
"requirements": ["pexpect==4.6.0"]
"requirements": ["pexpect==4.9.0"]
}

View File

@@ -108,6 +108,7 @@ async def async_pipeline_from_audio_stream(
device_id: str | None = None,
start_stage: PipelineStage = PipelineStage.STT,
end_stage: PipelineStage = PipelineStage.TTS,
conversation_extra_system_prompt: str | None = None,
) -> None:
"""Create an audio pipeline from an audio stream.
@@ -119,6 +120,7 @@ async def async_pipeline_from_audio_stream(
stt_metadata=stt_metadata,
stt_stream=stt_stream,
wake_word_phrase=wake_word_phrase,
conversation_extra_system_prompt=conversation_extra_system_prompt,
run=PipelineRun(
hass,
context=context,

View File

@@ -1010,7 +1010,11 @@ class PipelineRun:
self.intent_agent = agent_info.id
async def recognize_intent(
self, intent_input: str, conversation_id: str | None, device_id: str | None
self,
intent_input: str,
conversation_id: str | None,
device_id: str | None,
conversation_extra_system_prompt: str | None,
) -> str:
"""Run intent recognition portion of pipeline. Returns text to speak."""
if self.intent_agent is None:
@@ -1045,6 +1049,7 @@ class PipelineRun:
device_id=device_id,
language=input_language,
agent_id=self.intent_agent,
extra_system_prompt=conversation_extra_system_prompt,
)
processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT
@@ -1392,8 +1397,13 @@ class PipelineInput:
"""Input for text-to-speech. Required when start_stage = tts."""
conversation_id: str | None = None
"""Identifier for the conversation."""
conversation_extra_system_prompt: str | None = None
"""Extra prompt information for the conversation agent."""
device_id: str | None = None
"""Identifier of the device that is processing the input/output of the pipeline."""
async def execute(self) -> None:
"""Run pipeline."""
@@ -1483,6 +1493,7 @@ class PipelineInput:
intent_input,
self.conversation_id,
self.device_id,
self.conversation_extra_system_prompt,
)
if tts_input.strip():
current_stage = PipelineStage.TTS

View File

@@ -75,7 +75,7 @@ class AudioBuffer:
class VoiceCommandSegmenter:
"""Segments an audio stream into voice commands."""
speech_seconds: float = 0.1
speech_seconds: float = 0.3
"""Seconds of speech before voice command has started."""
command_seconds: float = 1.0

View File

@@ -31,8 +31,8 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"invalid_unique_id": "Impossible to determine a valid unique id for the device",
"no_unique_id": "A device without a valid unique id is already configured. Configuration of multiple instance is not possible"
"invalid_unique_id": "Impossible to determine a valid unique ID for the device",
"no_unique_id": "A device without a valid unique ID is already configured. Configuration of multiple instances is not possible"
}
},
"options": {
@@ -42,7 +42,7 @@
"consider_home": "Seconds to wait before considering a device away",
"track_unknown": "Track unknown / unnamed devices",
"interface": "The interface that you want statistics from (e.g. eth0, eth1 etc)",
"dnsmasq": "The location in the router of the dnsmasq.leases files",
"dnsmasq": "The location of the dnsmasq.leases file in the router",
"require_ip": "Devices must have IP (for access point mode)"
}
}

View File

@@ -21,8 +21,10 @@ from .manager import (
BackupManager,
BackupPlatformProtocol,
BackupReaderWriter,
BackupReaderWriterError,
CoreBackupReaderWriter,
CreateBackupEvent,
IncorrectPasswordError,
ManagerBackup,
NewBackup,
WrittenBackup,
@@ -39,8 +41,10 @@ __all__ = [
"BackupAgentPlatformProtocol",
"BackupPlatformProtocol",
"BackupReaderWriter",
"BackupReaderWriterError",
"CreateBackupEvent",
"Folder",
"IncorrectPasswordError",
"LocalBackupAgent",
"NewBackup",
"WrittenBackup",

View File

@@ -17,7 +17,7 @@ from homeassistant.helpers.typing import UNDEFINED, UndefinedType
from homeassistant.util import dt as dt_util
from .const import LOGGER
from .models import Folder
from .models import BackupManagerError, Folder
if TYPE_CHECKING:
from .manager import BackupManager, ManagerBackup
@@ -124,6 +124,7 @@ class BackupConfig:
def load(self, stored_config: StoredBackupConfig) -> None:
"""Load config."""
self.data = BackupConfigData.from_dict(stored_config)
self.data.retention.apply(self._manager)
self.data.schedule.apply(self._manager)
async def update(
@@ -160,8 +161,13 @@ class RetentionConfig:
def apply(self, manager: BackupManager) -> None:
"""Apply backup retention configuration."""
if self.days is not None:
LOGGER.debug(
"Scheduling next automatic delete of backups older than %s in 1 day",
self.days,
)
self._schedule_next(manager)
else:
LOGGER.debug("Unscheduling next automatic delete")
self._unschedule_next(manager)
def to_dict(self) -> StoredRetentionConfig:
@@ -318,9 +324,9 @@ class BackupSchedule:
password=config_data.create_backup.password,
with_automatic_settings=True,
)
except BackupManagerError as err:
LOGGER.error("Error creating backup: %s", err)
except Exception: # noqa: BLE001
# another more specific exception will be added
# and handled in the future
LOGGER.exception("Unexpected error creating automatic backup")
manager.remove_next_backup_event = async_track_point_in_time(

View File

@@ -46,15 +46,11 @@ from .const import (
EXCLUDE_FROM_BACKUP,
LOGGER,
)
from .models import AgentBackup, Folder
from .models import AgentBackup, BackupManagerError, Folder
from .store import BackupStore
from .util import make_backup_dir, read_backup, validate_password
class IncorrectPasswordError(HomeAssistantError):
"""Raised when the password is incorrect."""
@dataclass(frozen=True, kw_only=True, slots=True)
class NewBackup:
"""New backup class."""
@@ -245,6 +241,14 @@ class BackupReaderWriter(abc.ABC):
"""Restore a backup."""
class BackupReaderWriterError(HomeAssistantError):
"""Backup reader/writer error."""
class IncorrectPasswordError(BackupReaderWriterError):
"""Raised when the password is incorrect."""
class BackupManager:
"""Define the format that backup managers can have."""
@@ -373,7 +377,9 @@ class BackupManager:
)
for result in pre_backup_results:
if isinstance(result, Exception):
raise result
raise BackupManagerError(
f"Error during pre-backup: {result}"
) from result
async def async_post_backup_actions(self) -> None:
"""Perform post backup actions."""
@@ -386,7 +392,9 @@ class BackupManager:
)
for result in post_backup_results:
if isinstance(result, Exception):
raise result
raise BackupManagerError(
f"Error during post-backup: {result}"
) from result
async def load_platforms(self) -> None:
"""Load backup platforms."""
@@ -422,11 +430,22 @@ class BackupManager:
return_exceptions=True,
)
for idx, result in enumerate(sync_backup_results):
if isinstance(result, Exception):
if isinstance(result, BackupReaderWriterError):
# writer errors will affect all agents
# no point in continuing
raise BackupManagerError(str(result)) from result
if isinstance(result, BackupAgentError):
LOGGER.error("Error uploading to %s: %s", agent_ids[idx], result)
agent_errors[agent_ids[idx]] = result
LOGGER.exception(
"Error during backup upload - %s", result, exc_info=result
)
continue
if isinstance(result, Exception):
# trap bugs from agents
agent_errors[agent_ids[idx]] = result
LOGGER.error("Unexpected error: %s", result, exc_info=result)
continue
if isinstance(result, BaseException):
raise result
return agent_errors
async def async_get_backups(
@@ -449,7 +468,7 @@ class BackupManager:
agent_errors[agent_ids[idx]] = result
continue
if isinstance(result, BaseException):
raise result
raise result # unexpected error
for agent_backup in result:
if (backup_id := agent_backup.backup_id) not in backups:
if known_backup := self.known_backups.get(backup_id):
@@ -499,7 +518,7 @@ class BackupManager:
agent_errors[agent_ids[idx]] = result
continue
if isinstance(result, BaseException):
raise result
raise result # unexpected error
if not result:
continue
if backup is None:
@@ -563,7 +582,7 @@ class BackupManager:
agent_errors[agent_ids[idx]] = result
continue
if isinstance(result, BaseException):
raise result
raise result # unexpected error
if not agent_errors:
self.known_backups.remove(backup_id)
@@ -578,7 +597,7 @@ class BackupManager:
) -> None:
"""Receive and store a backup file from upload."""
if self.state is not BackupManagerState.IDLE:
raise HomeAssistantError(f"Backup manager busy: {self.state}")
raise BackupManagerError(f"Backup manager busy: {self.state}")
self.async_on_backup_event(
ReceiveBackupEvent(stage=None, state=ReceiveBackupState.IN_PROGRESS)
)
@@ -652,6 +671,7 @@ class BackupManager:
include_homeassistant=include_homeassistant,
name=name,
password=password,
raise_task_error=True,
with_automatic_settings=with_automatic_settings,
)
assert self._backup_finish_task
@@ -669,11 +689,12 @@ class BackupManager:
include_homeassistant: bool,
name: str | None,
password: str | None,
raise_task_error: bool = False,
with_automatic_settings: bool = False,
) -> NewBackup:
"""Initiate generating a backup."""
if self.state is not BackupManagerState.IDLE:
raise HomeAssistantError(f"Backup manager busy: {self.state}")
raise BackupManagerError(f"Backup manager busy: {self.state}")
if with_automatic_settings:
self.config.data.last_attempted_automatic_backup = dt_util.now()
@@ -692,6 +713,7 @@ class BackupManager:
include_homeassistant=include_homeassistant,
name=name,
password=password,
raise_task_error=raise_task_error,
with_automatic_settings=with_automatic_settings,
)
except Exception:
@@ -714,57 +736,81 @@ class BackupManager:
include_homeassistant: bool,
name: str | None,
password: str | None,
raise_task_error: bool,
with_automatic_settings: bool,
) -> NewBackup:
"""Initiate generating a backup."""
if not agent_ids:
raise HomeAssistantError("At least one agent must be selected")
if any(agent_id not in self.backup_agents for agent_id in agent_ids):
raise HomeAssistantError("Invalid agent selected")
raise BackupManagerError("At least one agent must be selected")
if invalid_agents := [
agent_id for agent_id in agent_ids if agent_id not in self.backup_agents
]:
raise BackupManagerError(f"Invalid agents selected: {invalid_agents}")
if include_all_addons and include_addons:
raise HomeAssistantError(
raise BackupManagerError(
"Cannot include all addons and specify specific addons"
)
backup_name = (
name
or f"{"Automatic" if with_automatic_settings else "Custom"} {HAVERSION}"
or f"{"Automatic" if with_automatic_settings else "Custom"} backup {HAVERSION}"
)
new_backup, self._backup_task = await self._reader_writer.async_create_backup(
agent_ids=agent_ids,
backup_name=backup_name,
extra_metadata={
"instance_id": await instance_id.async_get(self.hass),
"with_automatic_settings": with_automatic_settings,
},
include_addons=include_addons,
include_all_addons=include_all_addons,
include_database=include_database,
include_folders=include_folders,
include_homeassistant=include_homeassistant,
on_progress=self.async_on_backup_event,
password=password,
)
self._backup_finish_task = self.hass.async_create_task(
try:
(
new_backup,
self._backup_task,
) = await self._reader_writer.async_create_backup(
agent_ids=agent_ids,
backup_name=backup_name,
extra_metadata={
"instance_id": await instance_id.async_get(self.hass),
"with_automatic_settings": with_automatic_settings,
},
include_addons=include_addons,
include_all_addons=include_all_addons,
include_database=include_database,
include_folders=include_folders,
include_homeassistant=include_homeassistant,
on_progress=self.async_on_backup_event,
password=password,
)
except BackupReaderWriterError as err:
raise BackupManagerError(str(err)) from err
backup_finish_task = self._backup_finish_task = self.hass.async_create_task(
self._async_finish_backup(agent_ids, with_automatic_settings),
name="backup_manager_finish_backup",
)
if not raise_task_error:
def log_finish_task_error(task: asyncio.Task[None]) -> None:
if task.done() and not task.cancelled() and (err := task.exception()):
if isinstance(err, BackupManagerError):
LOGGER.error("Error creating backup: %s", err)
else:
LOGGER.error("Unexpected error: %s", err, exc_info=err)
backup_finish_task.add_done_callback(log_finish_task_error)
return new_backup
async def _async_finish_backup(
self, agent_ids: list[str], with_automatic_settings: bool
) -> None:
"""Finish a backup."""
if TYPE_CHECKING:
assert self._backup_task is not None
backup_success = False
try:
written_backup = await self._backup_task
except Exception as err: # noqa: BLE001
LOGGER.debug("Generating backup failed", exc_info=err)
self.async_on_backup_event(
CreateBackupEvent(stage=None, state=CreateBackupState.FAILED)
)
except Exception as err:
if with_automatic_settings:
self._update_issue_backup_failed()
if isinstance(err, BackupReaderWriterError):
raise BackupManagerError(str(err)) from err
raise # unexpected error
else:
LOGGER.debug(
"Generated new backup with backup_id %s, uploading to agents %s",
@@ -777,28 +823,40 @@ class BackupManager:
state=CreateBackupState.IN_PROGRESS,
)
)
agent_errors = await self._async_upload_backup(
backup=written_backup.backup,
agent_ids=agent_ids,
open_stream=written_backup.open_stream,
)
await written_backup.release_stream()
if with_automatic_settings:
# create backup was successful, update last_completed_automatic_backup
self.config.data.last_completed_automatic_backup = dt_util.now()
self.store.save()
self._update_issue_after_agent_upload(agent_errors)
self.known_backups.add(written_backup.backup, agent_errors)
try:
agent_errors = await self._async_upload_backup(
backup=written_backup.backup,
agent_ids=agent_ids,
open_stream=written_backup.open_stream,
)
finally:
await written_backup.release_stream()
self.known_backups.add(written_backup.backup, agent_errors)
if not agent_errors:
if with_automatic_settings:
# create backup was successful, update last_completed_automatic_backup
self.config.data.last_completed_automatic_backup = dt_util.now()
self.store.save()
backup_success = True
if with_automatic_settings:
self._update_issue_after_agent_upload(agent_errors)
# delete old backups more numerous than copies
# try this regardless of agent errors above
await delete_backups_exceeding_configured_count(self)
self.async_on_backup_event(
CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED)
)
finally:
self._backup_task = None
self._backup_finish_task = None
self.async_on_backup_event(
CreateBackupEvent(
stage=None,
state=CreateBackupState.COMPLETED
if backup_success
else CreateBackupState.FAILED,
)
)
self.async_on_backup_event(IdleEvent())
async def async_restore_backup(
@@ -814,7 +872,7 @@ class BackupManager:
) -> None:
"""Initiate restoring a backup."""
if self.state is not BackupManagerState.IDLE:
raise HomeAssistantError(f"Backup manager busy: {self.state}")
raise BackupManagerError(f"Backup manager busy: {self.state}")
self.async_on_backup_event(
RestoreBackupEvent(stage=None, state=RestoreBackupState.IN_PROGRESS)
@@ -829,6 +887,9 @@ class BackupManager:
restore_folders=restore_folders,
restore_homeassistant=restore_homeassistant,
)
self.async_on_backup_event(
RestoreBackupEvent(stage=None, state=RestoreBackupState.COMPLETED)
)
except Exception:
self.async_on_backup_event(
RestoreBackupEvent(stage=None, state=RestoreBackupState.FAILED)
@@ -851,7 +912,7 @@ class BackupManager:
"""Initiate restoring a backup."""
agent = self.backup_agents[agent_id]
if not await agent.async_get_backup(backup_id):
raise HomeAssistantError(
raise BackupManagerError(
f"Backup {backup_id} not found in agent {agent_id}"
)
@@ -1024,11 +1085,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
backup_id = _generate_backup_id(date_str, backup_name)
if include_addons or include_all_addons or include_folders:
raise HomeAssistantError(
raise BackupReaderWriterError(
"Addons and folders are not supported by core backup"
)
if not include_homeassistant:
raise HomeAssistantError("Home Assistant must be included in backup")
raise BackupReaderWriterError("Home Assistant must be included in backup")
backup_task = self._hass.async_create_task(
self._async_create_backup(
@@ -1099,6 +1160,13 @@ class CoreBackupReaderWriter(BackupReaderWriter):
password,
local_agent_tar_file_path,
)
except (BackupManagerError, OSError, tarfile.TarError, ValueError) as err:
# BackupManagerError from async_pre_backup_actions
# OSError from file operations
# TarError from tarfile
# ValueError from json_bytes
raise BackupReaderWriterError(str(err)) from err
else:
backup = AgentBackup(
addons=[],
backup_id=backup_id,
@@ -1116,12 +1184,15 @@ class CoreBackupReaderWriter(BackupReaderWriter):
async_add_executor_job = self._hass.async_add_executor_job
async def send_backup() -> AsyncIterator[bytes]:
f = await async_add_executor_job(tar_file_path.open, "rb")
try:
while chunk := await async_add_executor_job(f.read, 2**20):
yield chunk
finally:
await async_add_executor_job(f.close)
f = await async_add_executor_job(tar_file_path.open, "rb")
try:
while chunk := await async_add_executor_job(f.read, 2**20):
yield chunk
finally:
await async_add_executor_job(f.close)
except OSError as err:
raise BackupReaderWriterError(str(err)) from err
async def open_backup() -> AsyncIterator[bytes]:
return send_backup()
@@ -1129,14 +1200,20 @@ class CoreBackupReaderWriter(BackupReaderWriter):
async def remove_backup() -> None:
if local_agent_tar_file_path:
return
await async_add_executor_job(tar_file_path.unlink, True)
try:
await async_add_executor_job(tar_file_path.unlink, True)
except OSError as err:
raise BackupReaderWriterError(str(err)) from err
return WrittenBackup(
backup=backup, open_stream=open_backup, release_stream=remove_backup
)
finally:
# Inform integrations the backup is done
await manager.async_post_backup_actions()
try:
await manager.async_post_backup_actions()
except BackupManagerError as err:
raise BackupReaderWriterError(str(err)) from err
def _mkdir_and_generate_backup_contents(
self,
@@ -1206,6 +1283,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
if self._local_agent_id in agent_ids:
local_agent = manager.local_backup_agents[self._local_agent_id]
tar_file_path = local_agent.get_backup_path(backup.backup_id)
await async_add_executor_job(make_backup_dir, tar_file_path.parent)
await async_add_executor_job(shutil.move, temp_file, tar_file_path)
else:
tar_file_path = temp_file
@@ -1249,11 +1327,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
"""
if restore_addons or restore_folders:
raise HomeAssistantError(
raise BackupReaderWriterError(
"Addons and folders are not supported in core restore"
)
if not restore_homeassistant and not restore_database:
raise HomeAssistantError(
raise BackupReaderWriterError(
"Home Assistant or database must be included in restore"
)
@@ -1298,7 +1376,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
)
await self._hass.async_add_executor_job(_write_restore_file)
await self._hass.services.async_call("homeassistant", "restart", {})
await self._hass.services.async_call("homeassistant", "restart", blocking=True)
def _generate_backup_id(date: str, name: str) -> str:

View File

@@ -6,6 +6,8 @@ from dataclasses import asdict, dataclass
from enum import StrEnum
from typing import Any, Self
from homeassistant.exceptions import HomeAssistantError
@dataclass(frozen=True, kw_only=True)
class AddonInfo:
@@ -67,3 +69,7 @@ class AgentBackup:
protected=data["protected"],
size=data["size"],
)
class BackupManagerError(HomeAssistantError):
"""Backup manager error."""

View File

@@ -5,8 +5,8 @@
"description": "The automatic backup could not be created. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
},
"automatic_backup_failed_upload_agents": {
"title": "Automatic backup could not be uploaded to agents",
"description": "The automatic backup could not be uploaded to agents {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
"title": "Automatic backup could not be uploaded to the configured locations",
"description": "The automatic backup could not be uploaded to the configured locations {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
}
},
"services": {

View File

@@ -34,7 +34,7 @@ class BangOlufsenData:
type BangOlufsenConfigEntry = ConfigEntry[BangOlufsenData]
PLATFORMS = [Platform.MEDIA_PLAYER]
PLATFORMS = [Platform.EVENT, Platform.MEDIA_PLAYER]
async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry) -> bool:

View File

@@ -79,6 +79,7 @@ class WebsocketNotification(StrEnum):
"""Enum for WebSocket notification types."""
ACTIVE_LISTENING_MODE = "active_listening_mode"
BUTTON = "button"
PLAYBACK_ERROR = "playback_error"
PLAYBACK_METADATA = "playback_metadata"
PLAYBACK_PROGRESS = "playback_progress"
@@ -203,14 +204,60 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
),
]
)
# Map for storing compatibility of devices.
MODEL_SUPPORT_DEVICE_BUTTONS: Final[str] = "device_buttons"
MODEL_SUPPORT_MAP = {
MODEL_SUPPORT_DEVICE_BUTTONS: (
BangOlufsenModel.BEOLAB_8,
BangOlufsenModel.BEOLAB_28,
BangOlufsenModel.BEOSOUND_2,
BangOlufsenModel.BEOSOUND_A5,
BangOlufsenModel.BEOSOUND_A9,
BangOlufsenModel.BEOSOUND_BALANCE,
BangOlufsenModel.BEOSOUND_EMERGE,
BangOlufsenModel.BEOSOUND_LEVEL,
BangOlufsenModel.BEOSOUND_THEATRE,
)
}
# Device events
BANG_OLUFSEN_WEBSOCKET_EVENT: Final[str] = f"{DOMAIN}_websocket_event"
# Dict used to translate native Bang & Olufsen event names to string.json compatible ones
EVENT_TRANSLATION_MAP: dict[str, str] = {
"shortPress (Release)": "short_press_release",
"longPress (Timeout)": "long_press_timeout",
"longPress (Release)": "long_press_release",
"veryLongPress (Timeout)": "very_long_press_timeout",
"veryLongPress (Release)": "very_long_press_release",
}
CONNECTION_STATUS: Final[str] = "CONNECTION_STATUS"
DEVICE_BUTTONS: Final[list[str]] = [
"Bluetooth",
"Microphone",
"Next",
"PlayPause",
"Preset1",
"Preset2",
"Preset3",
"Preset4",
"Previous",
"Volume",
]
DEVICE_BUTTON_EVENTS: Final[list[str]] = [
"short_press_release",
"long_press_timeout",
"long_press_release",
"very_long_press_timeout",
"very_long_press_release",
]
# Beolink Converter NL/ML sources need to be transformed to upper case
BEOLINK_JOIN_SOURCES_TO_UPPER = (
"aux_a",

View File

@@ -0,0 +1,76 @@
"""Event entities for the Bang & Olufsen integration."""
from __future__ import annotations
from homeassistant.components.event import EventDeviceClass, EventEntity
from homeassistant.const import CONF_MODEL
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import BangOlufsenConfigEntry
from .const import (
CONNECTION_STATUS,
DEVICE_BUTTON_EVENTS,
DEVICE_BUTTONS,
MODEL_SUPPORT_DEVICE_BUTTONS,
MODEL_SUPPORT_MAP,
WebsocketNotification,
)
from .entity import BangOlufsenEntity
async def async_setup_entry(
hass: HomeAssistant,
config_entry: BangOlufsenConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up Sensor entities from config entry."""
if config_entry.data[CONF_MODEL] in MODEL_SUPPORT_MAP[MODEL_SUPPORT_DEVICE_BUTTONS]:
async_add_entities(
BangOlufsenButtonEvent(config_entry, button_type)
for button_type in DEVICE_BUTTONS
)
class BangOlufsenButtonEvent(BangOlufsenEntity, EventEntity):
"""Event class for Button events."""
_attr_device_class = EventDeviceClass.BUTTON
_attr_entity_registry_enabled_default = False
_attr_event_types = DEVICE_BUTTON_EVENTS
def __init__(self, config_entry: BangOlufsenConfigEntry, button_type: str) -> None:
"""Initialize Button."""
super().__init__(config_entry, config_entry.runtime_data.client)
self._attr_unique_id = f"{self._unique_id}_{button_type}"
# Make the native button name Home Assistant compatible
self._attr_translation_key = button_type.lower()
self._button_type = button_type
async def async_added_to_hass(self) -> None:
"""Listen to WebSocket button events."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{self._unique_id}_{CONNECTION_STATUS}",
self._async_update_connection_state,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{self._button_type}",
self._async_handle_event,
)
)
@callback
def _async_handle_event(self, event: str) -> None:
"""Handle event."""
self._trigger_event(event)
self.async_write_ha_state()

View File

@@ -1,7 +1,12 @@
{
"common": {
"jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity.",
"jid_options_name": "JID options",
"jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity."
"long_press_release": "Release of long press",
"long_press_timeout": "Long press",
"short_press_release": "Release of short press",
"very_long_press_release": "Release of very long press",
"very_long_press_timeout": "Very long press"
},
"config": {
"error": {
@@ -29,6 +34,150 @@
}
}
},
"entity": {
"event": {
"bluetooth": {
"name": "Bluetooth",
"state_attributes": {
"event_type": {
"state": {
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
}
}
}
},
"microphone": {
"name": "Microphone",
"state_attributes": {
"event_type": {
"state": {
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
}
}
}
},
"next": {
"name": "Next",
"state_attributes": {
"event_type": {
"state": {
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
}
}
}
},
"playpause": {
"name": "Play / Pause",
"state_attributes": {
"event_type": {
"state": {
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
}
}
}
},
"preset1": {
"name": "Favourite 1",
"state_attributes": {
"event_type": {
"state": {
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
}
}
}
},
"preset2": {
"name": "Favourite 2",
"state_attributes": {
"event_type": {
"state": {
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
}
}
}
},
"preset3": {
"name": "Favourite 3",
"state_attributes": {
"event_type": {
"state": {
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
}
}
}
},
"preset4": {
"name": "Favourite 4",
"state_attributes": {
"event_type": {
"state": {
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
}
}
}
},
"previous": {
"name": "Previous",
"state_attributes": {
"event_type": {
"state": {
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
}
}
}
},
"volume": {
"name": "Volume",
"state_attributes": {
"event_type": {
"state": {
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
}
}
}
}
}
},
"selector": {
"source_ids": {
"options": {

View File

@@ -3,8 +3,10 @@
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
from mozart_api.models import (
ButtonEvent,
ListeningModeProps,
PlaybackContentMetadata,
PlaybackError,
@@ -26,6 +28,7 @@ from homeassistant.util.enum import try_parse_enum
from .const import (
BANG_OLUFSEN_WEBSOCKET_EVENT,
CONNECTION_STATUS,
EVENT_TRANSLATION_MAP,
WebsocketNotification,
)
from .entity import BangOlufsenBase
@@ -54,6 +57,8 @@ class BangOlufsenWebsocket(BangOlufsenBase):
self._client.get_active_listening_mode_notifications(
self.on_active_listening_mode
)
self._client.get_button_notifications(self.on_button_notification)
self._client.get_playback_error_notifications(
self.on_playback_error_notification
)
@@ -104,6 +109,19 @@ class BangOlufsenWebsocket(BangOlufsenBase):
notification,
)
def on_button_notification(self, notification: ButtonEvent) -> None:
"""Send button dispatch."""
# State is expected to always be available.
if TYPE_CHECKING:
assert notification.state
# Send to event entity
async_dispatcher_send(
self.hass,
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{notification.button}",
EVENT_TRANSLATION_MAP[notification.state],
)
def on_notification_notification(
self, notification: WebsocketNotificationTag
) -> None:

View File

@@ -71,27 +71,6 @@ class BluesoundConfigFlow(ConfigFlow, domain=DOMAIN):
),
)
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
"""Import bluesound config entry from configuration.yaml."""
session = async_get_clientsession(self.hass)
async with Player(
import_data[CONF_HOST], import_data[CONF_PORT], session=session
) as player:
try:
sync_status = await player.sync_status(timeout=1)
except PlayerUnreachableError:
return self.async_abort(reason="cannot_connect")
await self.async_set_unique_id(
format_unique_id(sync_status.mac, import_data[CONF_PORT])
)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=sync_status.name,
data=import_data,
)
async def async_step_zeroconf(
self, discovery_info: zeroconf.ZeroconfServiceInfo
) -> ConfigFlowResult:

View File

@@ -15,7 +15,6 @@ import voluptuous as vol
from homeassistant.components import media_source
from homeassistant.components.media_player import (
PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA,
BrowseMedia,
MediaPlayerEntity,
MediaPlayerEntityFeature,
@@ -23,16 +22,10 @@ from homeassistant.components.media_player import (
MediaType,
async_process_play_media_url,
)
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import CONF_HOST, CONF_HOSTS, CONF_NAME, CONF_PORT
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import (
config_validation as cv,
entity_platform,
issue_registry as ir,
)
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.device_registry import (
CONNECTION_NETWORK_MAC,
DeviceInfo,
@@ -43,10 +36,9 @@ from homeassistant.helpers.dispatcher import (
async_dispatcher_send,
)
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
import homeassistant.util.dt as dt_util
from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN, INTEGRATION_TITLE
from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN
from .utils import dispatcher_join_signal, dispatcher_unjoin_signal, format_unique_id
if TYPE_CHECKING:
@@ -71,64 +63,6 @@ SYNC_STATUS_INTERVAL = timedelta(minutes=5)
POLL_TIMEOUT = 120
PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_HOSTS): vol.All(
cv.ensure_list,
[
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
],
)
}
)
async def _async_import(hass: HomeAssistant, config: ConfigType) -> None:
"""Import config entry from configuration.yaml."""
if not hass.config_entries.async_entries(DOMAIN):
# Start import flow
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
)
if (
result["type"] == FlowResultType.ABORT
and result["reason"] == "cannot_connect"
):
ir.async_create_issue(
hass,
DOMAIN,
f"deprecated_yaml_import_issue_{result['reason']}",
breaks_in_ha_version="2025.2.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=ir.IssueSeverity.WARNING,
translation_key=f"deprecated_yaml_import_issue_{result['reason']}",
translation_placeholders={
"domain": DOMAIN,
"integration_title": INTEGRATION_TITLE,
},
)
return
ir.async_create_issue(
hass,
HOMEASSISTANT_DOMAIN,
f"deprecated_yaml_{DOMAIN}",
breaks_in_ha_version="2025.2.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=ir.IssueSeverity.WARNING,
translation_key="deprecated_yaml",
translation_placeholders={
"domain": DOMAIN,
"integration_title": INTEGRATION_TITLE,
},
)
async def async_setup_entry(
hass: HomeAssistant,
@@ -159,22 +93,6 @@ async def async_setup_entry(
async_add_entities([bluesound_player], update_before_add=True)
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None,
) -> None:
"""Trigger import flows."""
hosts = config.get(CONF_HOSTS, [])
for host in hosts:
import_data = {
CONF_HOST: host[CONF_HOST],
CONF_PORT: host.get(CONF_PORT, 11000),
}
hass.async_create_task(_async_import(hass, import_data))
class BluesoundPlayer(MediaPlayerEntity):
"""Representation of a Bluesound Player."""

View File

@@ -20,6 +20,6 @@
"bluetooth-auto-recovery==1.4.2",
"bluetooth-data-tools==1.20.0",
"dbus-fast==2.24.3",
"habluetooth==3.6.0"
"habluetooth==3.7.0"
]
}

View File

@@ -6,5 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/bring",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["bring_api"],
"requirements": ["bring-api==0.9.1"]
}

View File

@@ -12,7 +12,7 @@
}
},
"discovery_confirm": {
"description": "Do you want to setup {name}?"
"description": "Do you want to set up {name}?"
},
"reconfigure": {
"description": "Reconfigure your Cambridge Audio Streamer.",
@@ -28,7 +28,7 @@
"cannot_connect": "Failed to connect to Cambridge Audio device. Please make sure the device is powered up and connected to the network. Try power-cycling the device if it does not connect."
},
"abort": {
"wrong_device": "This Cambridge Audio device does not match the existing device id. Please make sure you entered the correct IP address.",
"wrong_device": "This Cambridge Audio device does not match the existing device ID. Please make sure you entered the correct IP address.",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"

View File

@@ -516,6 +516,19 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
"""Flag supported features."""
return self._attr_supported_features
@property
def supported_features_compat(self) -> CameraEntityFeature:
"""Return the supported features as CameraEntityFeature.
Remove this compatibility shim in 2025.1 or later.
"""
features = self.supported_features
if type(features) is int: # noqa: E721
new_features = CameraEntityFeature(features)
self._report_deprecated_supported_features_values(new_features)
return new_features
return features
@cached_property
def is_recording(self) -> bool:
"""Return true if the device is recording."""
@@ -569,7 +582,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
self._deprecate_attr_frontend_stream_type_logged = True
return self._attr_frontend_stream_type
if CameraEntityFeature.STREAM not in self.supported_features:
if CameraEntityFeature.STREAM not in self.supported_features_compat:
return None
if (
self._webrtc_provider
@@ -798,7 +811,9 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
async def async_internal_added_to_hass(self) -> None:
"""Run when entity about to be added to hass."""
await super().async_internal_added_to_hass()
self.__supports_stream = self.supported_features & CameraEntityFeature.STREAM
self.__supports_stream = (
self.supported_features_compat & CameraEntityFeature.STREAM
)
await self.async_refresh_providers(write_state=False)
async def async_refresh_providers(self, *, write_state: bool = True) -> None:
@@ -838,7 +853,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
self, fn: Callable[[HomeAssistant, Camera], Coroutine[None, None, _T | None]]
) -> _T | None:
"""Get first provider that supports this camera."""
if CameraEntityFeature.STREAM not in self.supported_features:
if CameraEntityFeature.STREAM not in self.supported_features_compat:
return None
return await fn(self.hass, self)
@@ -896,7 +911,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
def camera_capabilities(self) -> CameraCapabilities:
"""Return the camera capabilities."""
frontend_stream_types = set()
if CameraEntityFeature.STREAM in self.supported_features:
if CameraEntityFeature.STREAM in self.supported_features_compat:
if self._supports_native_sync_webrtc or self._supports_native_async_webrtc:
# The camera has a native WebRTC implementation
frontend_stream_types.add(StreamType.WEB_RTC)
@@ -916,7 +931,8 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
"""
super().async_write_ha_state()
if self.__supports_stream != (
supports_stream := self.supported_features & CameraEntityFeature.STREAM
supports_stream := self.supported_features_compat
& CameraEntityFeature.STREAM
):
self.__supports_stream = supports_stream
self._invalidate_camera_capabilities_cache()

View File

@@ -3,7 +3,6 @@
from __future__ import annotations
import logging
import re
from pexpect import pxssh
import voluptuous as vol
@@ -101,11 +100,11 @@ class CiscoDeviceScanner(DeviceScanner):
return False
def _get_arp_data(self):
def _get_arp_data(self) -> str | None:
"""Open connection to the router and get arp entries."""
try:
cisco_ssh = pxssh.pxssh()
cisco_ssh: pxssh.pxssh[str] = pxssh.pxssh(encoding="uft-8")
cisco_ssh.login(
self.host,
self.username,
@@ -115,12 +114,11 @@ class CiscoDeviceScanner(DeviceScanner):
)
# Find the hostname
initial_line = cisco_ssh.before.decode("utf-8").splitlines()
initial_line = (cisco_ssh.before or "").splitlines()
router_hostname = initial_line[len(initial_line) - 1]
router_hostname += "#"
# Set the discovered hostname as prompt
regex_expression = f"(?i)^{router_hostname}".encode()
cisco_ssh.PROMPT = re.compile(regex_expression, re.MULTILINE)
cisco_ssh.PROMPT = f"(?i)^{router_hostname}"
# Allow full arp table to print at once
cisco_ssh.sendline("terminal length 0")
cisco_ssh.prompt(1)
@@ -128,13 +126,11 @@ class CiscoDeviceScanner(DeviceScanner):
cisco_ssh.sendline("show ip arp")
cisco_ssh.prompt(1)
devices_result = cisco_ssh.before
return devices_result.decode("utf-8")
except pxssh.ExceptionPxssh as px_e:
_LOGGER.error("Failed to login via pxssh: %s", px_e)
return None
return None
return cisco_ssh.before
def _parse_cisco_mac_address(cisco_hardware_addr):

View File

@@ -6,5 +6,5 @@
"iot_class": "local_polling",
"loggers": ["pexpect", "ptyprocess"],
"quality_scale": "legacy",
"requirements": ["pexpect==4.6.0"]
"requirements": ["pexpect==4.9.0"]
}

View File

@@ -5,9 +5,10 @@ from __future__ import annotations
import base64
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
import hashlib
from typing import Any, Self
import logging
from typing import Any
from aiohttp import ClientError, ClientTimeout, StreamReader
from aiohttp import ClientError, ClientTimeout
from hass_nabucasa import Cloud, CloudError
from hass_nabucasa.cloud_api import (
async_files_delete_file,
@@ -18,11 +19,13 @@ from hass_nabucasa.cloud_api import (
from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.aiohttp_client import ChunkAsyncStreamIterator
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .client import CloudClient
from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT
_LOGGER = logging.getLogger(__name__)
_STORAGE_BACKUP = "backup"
@@ -71,31 +74,6 @@ def async_register_backup_agents_listener(
return unsub
class ChunkAsyncStreamIterator:
"""Async iterator for chunked streams.
Based on aiohttp.streams.ChunkTupleAsyncStreamIterator, but yields
bytes instead of tuple[bytes, bool].
"""
__slots__ = ("_stream",)
def __init__(self, stream: StreamReader) -> None:
"""Initialize."""
self._stream = stream
def __aiter__(self) -> Self:
"""Iterate."""
return self
async def __anext__(self) -> bytes:
"""Yield next chunk."""
rv = await self._stream.readchunk()
if rv == (b"", False):
raise StopAsyncIteration
return rv[0]
class CloudBackupAgent(BackupAgent):
"""Cloud backup agent."""
@@ -179,6 +157,11 @@ class CloudBackupAgent(BackupAgent):
headers=details["headers"] | {"content-length": str(backup.size)},
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
)
_LOGGER.log(
logging.DEBUG if upload_status.status < 400 else logging.WARNING,
"Backup upload status: %s",
upload_status.status,
)
upload_status.raise_for_status()
except (TimeoutError, ClientError) as err:
raise BackupAgentError("Failed to upload backup") from err
@@ -208,6 +191,7 @@ class CloudBackupAgent(BackupAgent):
"""List backups."""
try:
backups = await async_files_list(self._cloud, storage_type=_STORAGE_BACKUP)
_LOGGER.debug("Cloud backups: %s", backups)
except (ClientError, CloudError) as err:
raise BackupAgentError("Failed to list backups") from err

View File

@@ -46,6 +46,13 @@ def async_setup(hass: HomeAssistant) -> bool:
hass.http.register_view(OptionManagerFlowIndexView(hass.config_entries.options))
hass.http.register_view(OptionManagerFlowResourceView(hass.config_entries.options))
hass.http.register_view(
SubentryManagerFlowIndexView(hass.config_entries.subentries)
)
hass.http.register_view(
SubentryManagerFlowResourceView(hass.config_entries.subentries)
)
websocket_api.async_register_command(hass, config_entries_get)
websocket_api.async_register_command(hass, config_entry_disable)
websocket_api.async_register_command(hass, config_entry_get_single)
@@ -54,6 +61,9 @@ def async_setup(hass: HomeAssistant) -> bool:
websocket_api.async_register_command(hass, config_entries_progress)
websocket_api.async_register_command(hass, ignore_config_flow)
websocket_api.async_register_command(hass, config_subentry_delete)
websocket_api.async_register_command(hass, config_subentry_list)
return True
@@ -285,6 +295,66 @@ class OptionManagerFlowResourceView(
return await super().post(request, flow_id)
class SubentryManagerFlowIndexView(
FlowManagerIndexView[config_entries.ConfigSubentryFlowManager]
):
"""View to create subentry flows."""
url = "/api/config/config_entries/subentries/flow"
name = "api:config:config_entries:subentries:flow"
@require_admin(
error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT)
)
@RequestDataValidator(
vol.Schema(
{
vol.Required("handler"): vol.All(vol.Coerce(tuple), (str, str)),
vol.Optional("show_advanced_options", default=False): cv.boolean,
},
extra=vol.ALLOW_EXTRA,
)
)
async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response:
"""Handle a POST request.
handler in request is [entry_id, subentry_type].
"""
return await super()._post_impl(request, data)
def get_context(self, data: dict[str, Any]) -> dict[str, Any]:
"""Return context."""
context = super().get_context(data)
context["source"] = config_entries.SOURCE_USER
if subentry_id := data.get("subentry_id"):
context["source"] = config_entries.SOURCE_RECONFIGURE
context["subentry_id"] = subentry_id
return context
class SubentryManagerFlowResourceView(
FlowManagerResourceView[config_entries.ConfigSubentryFlowManager]
):
"""View to interact with the subentry flow manager."""
url = "/api/config/config_entries/subentries/flow/{flow_id}"
name = "api:config:config_entries:subentries:flow:resource"
@require_admin(
error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT)
)
async def get(self, request: web.Request, /, flow_id: str) -> web.Response:
"""Get the current state of a data_entry_flow."""
return await super().get(request, flow_id)
@require_admin(
error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT)
)
async def post(self, request: web.Request, flow_id: str) -> web.Response:
"""Handle a POST request."""
return await super().post(request, flow_id)
@websocket_api.require_admin
@websocket_api.websocket_command({"type": "config_entries/flow/progress"})
def config_entries_progress(
@@ -588,3 +658,63 @@ async def _async_matching_config_entries_json_fragments(
)
or (filter_is_not_helper and entry.domain not in integrations)
]
@websocket_api.require_admin
@websocket_api.websocket_command(
{
"type": "config_entries/subentries/list",
"entry_id": str,
}
)
@websocket_api.async_response
async def config_subentry_list(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""List subentries of a config entry."""
entry = get_entry(hass, connection, msg["entry_id"], msg["id"])
if entry is None:
return
result = [
{
"subentry_id": subentry.subentry_id,
"subentry_type": subentry.subentry_type,
"title": subentry.title,
"unique_id": subentry.unique_id,
}
for subentry in entry.subentries.values()
]
connection.send_result(msg["id"], result)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
"type": "config_entries/subentries/delete",
"entry_id": str,
"subentry_id": str,
}
)
@websocket_api.async_response
async def config_subentry_delete(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Delete a subentry of a config entry."""
entry = get_entry(hass, connection, msg["entry_id"], msg["id"])
if entry is None:
return
try:
hass.config_entries.async_remove_subentry(entry, msg["subentry_id"])
except config_entries.UnknownSubEntry:
connection.send_error(
msg["id"], websocket_api.const.ERR_NOT_FOUND, "Config subentry not found"
)
return
connection.send_result(msg["id"])

View File

@@ -75,6 +75,7 @@ async def async_converse(
language: str | None = None,
agent_id: str | None = None,
device_id: str | None = None,
extra_system_prompt: str | None = None,
) -> ConversationResult:
"""Process text and get intent."""
agent = async_get_agent(hass, agent_id)
@@ -99,6 +100,7 @@ async def async_converse(
device_id=device_id,
language=language,
agent_id=agent_id,
extra_system_prompt=extra_system_prompt,
)
with async_conversation_trace() as trace:
trace.add_event(

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.20"]
"requirements": ["hassil==2.1.0", "home-assistant-intents==2025.1.1"]
}

View File

@@ -40,6 +40,9 @@ class ConversationInput:
agent_id: str | None = None
"""Agent to use for processing."""
extra_system_prompt: str | None = None
"""Extra prompt to provide extra info to LLMs how to understand the command."""
def as_dict(self) -> dict[str, Any]:
"""Return input as a dict."""
return {
@@ -49,6 +52,7 @@ class ConversationInput:
"device_id": self.device_id,
"language": self.language,
"agent_id": self.agent_id,
"extra_system_prompt": self.extra_system_prompt,
}

View File

@@ -2,7 +2,7 @@
from __future__ import annotations
from cookidoo_api import Cookidoo, CookidooConfig, CookidooLocalizationConfig
from cookidoo_api import Cookidoo, CookidooConfig, get_localization_options
from homeassistant.const import (
CONF_COUNTRY,
@@ -22,15 +22,17 @@ PLATFORMS: list[Platform] = [Platform.BUTTON, Platform.TODO]
async def async_setup_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool:
"""Set up Cookidoo from a config entry."""
localizations = await get_localization_options(
country=entry.data[CONF_COUNTRY].lower(),
language=entry.data[CONF_LANGUAGE],
)
cookidoo = Cookidoo(
async_get_clientsession(hass),
CookidooConfig(
email=entry.data[CONF_EMAIL],
password=entry.data[CONF_PASSWORD],
localization=CookidooLocalizationConfig(
country_code=entry.data[CONF_COUNTRY].lower(),
language=entry.data[CONF_LANGUAGE],
),
localization=localizations[0],
),
)

View File

@@ -10,7 +10,6 @@ from cookidoo_api import (
Cookidoo,
CookidooAuthException,
CookidooConfig,
CookidooLocalizationConfig,
CookidooRequestException,
get_country_options,
get_localization_options,
@@ -219,18 +218,19 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN):
else:
data_input[CONF_LANGUAGE] = (
await get_localization_options(country=data_input[CONF_COUNTRY].lower())
)[0] # Pick any language to test login
)[0].language # Pick any language to test login
localizations = await get_localization_options(
country=data_input[CONF_COUNTRY].lower(),
language=data_input[CONF_LANGUAGE],
)
session = async_get_clientsession(self.hass)
cookidoo = Cookidoo(
session,
async_get_clientsession(self.hass),
CookidooConfig(
email=data_input[CONF_EMAIL],
password=data_input[CONF_PASSWORD],
localization=CookidooLocalizationConfig(
country_code=data_input[CONF_COUNTRY].lower(),
language=data_input[CONF_LANGUAGE],
),
localization=localizations[0],
),
)
try:

View File

@@ -6,6 +6,7 @@
"documentation": "https://www.home-assistant.io/integrations/cookidoo",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["cookidoo_api"],
"quality_scale": "silver",
"requirements": ["cookidoo-api==0.10.0"]
"requirements": ["cookidoo-api==0.11.2"]
}

View File

@@ -300,6 +300,10 @@ class CoverEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
def supported_features(self) -> CoverEntityFeature:
"""Flag supported features."""
if (features := self._attr_supported_features) is not None:
if type(features) is int: # noqa: E721
new_features = CoverEntityFeature(features)
self._report_deprecated_supported_features_values(new_features)
return new_features
return features
supported_features = (

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
from collections.abc import Sequence
import os
from serial.tools.list_ports_common import ListPortInfo
@@ -12,7 +13,7 @@ from .const import DONT_USE_USB, MANUAL_PATH, REFRESH_LIST
def list_ports_as_str(
serial_ports: list[ListPortInfo], no_usb_option: bool = True
serial_ports: Sequence[ListPortInfo], no_usb_option: bool = True
) -> list[str]:
"""Represent currently available serial ports as string.

View File

@@ -266,7 +266,7 @@ class DeconzBaseLight[_LightDeviceT: Group | Light](
@property
def color_temp_kelvin(self) -> int | None:
"""Return the CT color value."""
if self._device.color_temp is None:
if self._device.color_temp is None or self._device.color_temp == 0:
return None
return color_temperature_mired_to_kelvin(self._device.color_temp)

View File

@@ -0,0 +1 @@
"""Virtual integration: Decorquip."""

View File

@@ -0,0 +1,6 @@
{
"domain": "decorquip",
"name": "Decorquip Dream",
"integration_type": "virtual",
"supported_by": "motion_blinds"
}

View File

@@ -6,5 +6,5 @@
"iot_class": "local_polling",
"loggers": ["pydoods"],
"quality_scale": "legacy",
"requirements": ["pydoods==1.0.2", "Pillow==11.0.0"]
"requirements": ["pydoods==1.0.2", "Pillow==11.1.0"]
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
"iot_class": "cloud_push",
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
"requirements": ["py-sucks==0.9.10", "deebot-client==10.0.1"]
"requirements": ["py-sucks==0.9.10", "deebot-client==10.1.0"]
}

View File

@@ -163,11 +163,6 @@ class EcovacsLegacyVacuum(EcovacsLegacyEntity, StateVacuumEntity):
data: dict[str, Any] = {}
data[ATTR_ERROR] = self.error
# these attributes are deprecated and can be removed in 2025.2
for key, val in self.device.components.items():
attr_name = ATTR_COMPONENT_PREFIX + key
data[attr_name] = int(val * 100)
return data
def return_to_base(self, **kwargs: Any) -> None:

View File

@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["openwebif"],
"requirements": ["openwebifpy==4.3.0"]
"requirements": ["openwebifpy==4.3.1"]
}

View File

@@ -22,5 +22,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["eq3btsmart"],
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==1.1.0"]
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.0.0"]
}

View File

@@ -7,7 +7,6 @@ from typing import TYPE_CHECKING
from aioesphomeapi import APIClient, DeviceInfo
from bleak_esphome import connect_scanner
from bleak_esphome.backend.cache import ESPHomeBluetoothCache
from homeassistant.components.bluetooth import async_register_scanner
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback as hass_callback
@@ -28,10 +27,9 @@ def async_connect_scanner(
entry_data: RuntimeEntryData,
cli: APIClient,
device_info: DeviceInfo,
cache: ESPHomeBluetoothCache,
) -> CALLBACK_TYPE:
"""Connect scanner."""
client_data = connect_scanner(cli, device_info, cache, entry_data.available)
client_data = connect_scanner(cli, device_info, entry_data.available)
entry_data.bluetooth_device = client_data.bluetooth_device
client_data.disconnect_callbacks = entry_data.disconnect_callbacks
scanner = client_data.scanner

View File

@@ -6,8 +6,6 @@ from dataclasses import dataclass, field
from functools import cache
from typing import Self
from bleak_esphome.backend.cache import ESPHomeBluetoothCache
from homeassistant.core import HomeAssistant
from homeassistant.helpers.json import JSONEncoder
@@ -22,9 +20,6 @@ class DomainData:
"""Define a class that stores global esphome data in hass.data[DOMAIN]."""
_stores: dict[str, ESPHomeStorage] = field(default_factory=dict)
bluetooth_cache: ESPHomeBluetoothCache = field(
default_factory=ESPHomeBluetoothCache
)
def get_entry_data(self, entry: ESPHomeConfigEntry) -> RuntimeEntryData:
"""Return the runtime entry data associated with this config entry.

View File

@@ -423,9 +423,7 @@ class ESPHomeManager:
if device_info.bluetooth_proxy_feature_flags_compat(api_version):
entry_data.disconnect_callbacks.add(
async_connect_scanner(
hass, entry_data, cli, device_info, self.domain_data.bluetooth_cache
)
async_connect_scanner(hass, entry_data, cli, device_info)
)
if device_info.voice_assistant_feature_flags_compat(api_version) and (

View File

@@ -18,7 +18,7 @@
"requirements": [
"aioesphomeapi==28.0.0",
"esphome-dashboard-api==1.2.3",
"bleak-esphome==1.1.0"
"bleak-esphome==2.0.0"
],
"zeroconf": ["_esphomelib._tcp.local."]
}

View File

@@ -23,10 +23,10 @@ from homeassistant.helpers.dispatcher import (
async_dispatcher_send,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.system_info import is_official_image
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import bind_hass
from homeassistant.util.signal_type import SignalType
from homeassistant.util.system_info import is_official_image
DOMAIN = "ffmpeg"

View File

@@ -2,10 +2,11 @@
from datetime import datetime as dt
import logging
from typing import Any
import jwt
from pyflick import FlickAPI
from pyflick.authentication import AbstractFlickAuth
from pyflick.authentication import SimpleFlickAuth
from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET
from homeassistant.config_entries import ConfigEntry
@@ -20,7 +21,8 @@ from homeassistant.const import (
from homeassistant.core import HomeAssistant
from homeassistant.helpers import aiohttp_client
from .const import CONF_TOKEN_EXPIRY, DOMAIN
from .const import CONF_ACCOUNT_ID, CONF_SUPPLY_NODE_REF, CONF_TOKEN_EXPIRY
from .coordinator import FlickConfigEntry, FlickElectricDataCoordinator
_LOGGER = logging.getLogger(__name__)
@@ -29,36 +31,85 @@ CONF_ID_TOKEN = "id_token"
PLATFORMS = [Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: FlickConfigEntry) -> bool:
"""Set up Flick Electric from a config entry."""
auth = HassFlickAuth(hass, entry)
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][entry.entry_id] = FlickAPI(auth)
coordinator = FlickElectricDataCoordinator(
hass, FlickAPI(auth), entry.data[CONF_SUPPLY_NODE_REF]
)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: FlickConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
class HassFlickAuth(AbstractFlickAuth):
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Migrate old entry."""
_LOGGER.debug(
"Migrating configuration from version %s.%s",
config_entry.version,
config_entry.minor_version,
)
if config_entry.version > 2:
return False
if config_entry.version == 1:
api = FlickAPI(HassFlickAuth(hass, config_entry))
accounts = await api.getCustomerAccounts()
active_accounts = [
account for account in accounts if account["status"] == "active"
]
# A single active account can be auto-migrated
if (len(active_accounts)) == 1:
account = active_accounts[0]
new_data = {**config_entry.data}
new_data[CONF_ACCOUNT_ID] = account["id"]
new_data[CONF_SUPPLY_NODE_REF] = account["main_consumer"]["supply_node_ref"]
hass.config_entries.async_update_entry(
config_entry,
title=account["address"],
unique_id=account["id"],
data=new_data,
version=2,
)
return True
config_entry.async_start_reauth(hass, data={**config_entry.data})
return False
return True
class HassFlickAuth(SimpleFlickAuth):
"""Implementation of AbstractFlickAuth based on a Home Assistant entity config."""
def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None:
def __init__(self, hass: HomeAssistant, entry: FlickConfigEntry) -> None:
"""Flick authentication based on a Home Assistant entity config."""
super().__init__(aiohttp_client.async_get_clientsession(hass))
super().__init__(
username=entry.data[CONF_USERNAME],
password=entry.data[CONF_PASSWORD],
client_id=entry.data.get(CONF_CLIENT_ID, DEFAULT_CLIENT_ID),
client_secret=entry.data.get(CONF_CLIENT_SECRET, DEFAULT_CLIENT_SECRET),
websession=aiohttp_client.async_get_clientsession(hass),
)
self._entry = entry
self._hass = hass
async def _get_entry_token(self):
async def _get_entry_token(self) -> dict[str, Any]:
# No token saved, generate one
if (
CONF_TOKEN_EXPIRY not in self._entry.data
@@ -75,13 +126,8 @@ class HassFlickAuth(AbstractFlickAuth):
async def _update_token(self):
_LOGGER.debug("Fetching new access token")
token = await self.get_new_token(
username=self._entry.data[CONF_USERNAME],
password=self._entry.data[CONF_PASSWORD],
client_id=self._entry.data.get(CONF_CLIENT_ID, DEFAULT_CLIENT_ID),
client_secret=self._entry.data.get(
CONF_CLIENT_SECRET, DEFAULT_CLIENT_SECRET
),
token = await super().get_new_token(
self._username, self._password, self._client_id, self._client_secret
)
_LOGGER.debug("New token: %s", token)

View File

@@ -1,14 +1,18 @@
"""Config Flow for Flick Electric integration."""
import asyncio
from collections.abc import Mapping
import logging
from typing import Any
from pyflick.authentication import AuthException, SimpleFlickAuth
from aiohttp import ClientResponseError
from pyflick import FlickAPI
from pyflick.authentication import AbstractFlickAuth, SimpleFlickAuth
from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET
from pyflick.types import APIException, AuthException, CustomerAccount
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
from homeassistant.const import (
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
@@ -17,12 +21,18 @@ from homeassistant.const import (
)
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.selector import (
SelectOptionDict,
SelectSelector,
SelectSelectorConfig,
SelectSelectorMode,
)
from .const import DOMAIN
from .const import CONF_ACCOUNT_ID, CONF_SUPPLY_NODE_REF, DOMAIN
_LOGGER = logging.getLogger(__name__)
DATA_SCHEMA = vol.Schema(
LOGIN_SCHEMA = vol.Schema(
{
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
@@ -35,10 +45,13 @@ DATA_SCHEMA = vol.Schema(
class FlickConfigFlow(ConfigFlow, domain=DOMAIN):
"""Flick config flow."""
VERSION = 1
VERSION = 2
auth: AbstractFlickAuth
accounts: list[CustomerAccount]
data: dict[str, Any]
async def _validate_input(self, user_input):
auth = SimpleFlickAuth(
async def _validate_auth(self, user_input: Mapping[str, Any]) -> bool:
self.auth = SimpleFlickAuth(
username=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
websession=aiohttp_client.async_get_clientsession(self.hass),
@@ -48,22 +61,83 @@ class FlickConfigFlow(ConfigFlow, domain=DOMAIN):
try:
async with asyncio.timeout(60):
token = await auth.async_get_access_token()
except TimeoutError as err:
token = await self.auth.async_get_access_token()
except (TimeoutError, ClientResponseError) as err:
raise CannotConnect from err
except AuthException as err:
raise InvalidAuth from err
return token is not None
async def async_step_select_account(
self, user_input: Mapping[str, Any] | None = None
) -> ConfigFlowResult:
"""Ask user to select account."""
errors = {}
if user_input is not None and CONF_ACCOUNT_ID in user_input:
self.data[CONF_ACCOUNT_ID] = user_input[CONF_ACCOUNT_ID]
self.data[CONF_SUPPLY_NODE_REF] = self._get_supply_node_ref(
user_input[CONF_ACCOUNT_ID]
)
try:
# Ensure supply node is active
await FlickAPI(self.auth).getPricing(self.data[CONF_SUPPLY_NODE_REF])
except (APIException, ClientResponseError):
errors["base"] = "cannot_connect"
except AuthException:
# We should never get here as we have a valid token
return self.async_abort(reason="no_permissions")
else:
# Supply node is active
return await self._async_create_entry()
try:
self.accounts = await FlickAPI(self.auth).getCustomerAccounts()
except (APIException, ClientResponseError):
errors["base"] = "cannot_connect"
active_accounts = [a for a in self.accounts if a["status"] == "active"]
if len(active_accounts) == 0:
return self.async_abort(reason="no_accounts")
if len(active_accounts) == 1:
self.data[CONF_ACCOUNT_ID] = active_accounts[0]["id"]
self.data[CONF_SUPPLY_NODE_REF] = self._get_supply_node_ref(
active_accounts[0]["id"]
)
return await self._async_create_entry()
return self.async_show_form(
step_id="select_account",
data_schema=vol.Schema(
{
vol.Required(CONF_ACCOUNT_ID): SelectSelector(
SelectSelectorConfig(
options=[
SelectOptionDict(
value=account["id"], label=account["address"]
)
for account in active_accounts
],
mode=SelectSelectorMode.LIST,
)
)
}
),
errors=errors,
)
async def async_step_user(
self, user_input: dict[str, Any] | None = None
self, user_input: Mapping[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle gathering login info."""
errors = {}
if user_input is not None:
try:
await self._validate_input(user_input)
await self._validate_auth(user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
@@ -72,20 +146,61 @@ class FlickConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(
f"flick_electric_{user_input[CONF_USERNAME]}"
)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=f"Flick Electric: {user_input[CONF_USERNAME]}",
data=user_input,
)
self.data = dict(user_input)
return await self.async_step_select_account(user_input)
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
step_id="user", data_schema=LOGIN_SCHEMA, errors=errors
)
async def async_step_reauth(
self, user_input: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle re-authentication."""
self.data = {**user_input}
return await self.async_step_user(user_input)
async def _async_create_entry(self) -> ConfigFlowResult:
"""Create an entry for the flow."""
await self.async_set_unique_id(self.data[CONF_ACCOUNT_ID])
account = self._get_account(self.data[CONF_ACCOUNT_ID])
if self.source == SOURCE_REAUTH:
# Migration completed
if self._get_reauth_entry().version == 1:
self.hass.config_entries.async_update_entry(
self._get_reauth_entry(),
unique_id=self.unique_id,
data=self.data,
version=self.VERSION,
)
return self.async_update_reload_and_abort(
self._get_reauth_entry(),
unique_id=self.unique_id,
title=account["address"],
data=self.data,
)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=account["address"],
data=self.data,
)
def _get_account(self, account_id: str) -> CustomerAccount:
"""Get the account for the account ID."""
return next(a for a in self.accounts if a["id"] == account_id)
def _get_supply_node_ref(self, account_id: str) -> str:
"""Get the supply node ref for the account."""
return self._get_account(account_id)["main_consumer"][CONF_SUPPLY_NODE_REF]
class CannotConnect(HomeAssistantError):
"""Error to indicate we cannot connect."""

View File

@@ -3,6 +3,8 @@
DOMAIN = "flick_electric"
CONF_TOKEN_EXPIRY = "expires"
CONF_ACCOUNT_ID = "account_id"
CONF_SUPPLY_NODE_REF = "supply_node_ref"
ATTR_START_AT = "start_at"
ATTR_END_AT = "end_at"

View File

@@ -0,0 +1,47 @@
"""Data Coordinator for Flick Electric."""
import asyncio
from datetime import timedelta
import logging
import aiohttp
from pyflick import FlickAPI, FlickPrice
from pyflick.types import APIException, AuthException
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(minutes=5)
type FlickConfigEntry = ConfigEntry[FlickElectricDataCoordinator]
class FlickElectricDataCoordinator(DataUpdateCoordinator[FlickPrice]):
"""Coordinator for flick power price."""
def __init__(
self, hass: HomeAssistant, api: FlickAPI, supply_node_ref: str
) -> None:
"""Initialize FlickElectricDataCoordinator."""
super().__init__(
hass,
_LOGGER,
name="Flick Electric",
update_interval=SCAN_INTERVAL,
)
self.supply_node_ref = supply_node_ref
self._api = api
async def _async_update_data(self) -> FlickPrice:
"""Fetch pricing data from Flick Electric."""
try:
async with asyncio.timeout(60):
return await self._api.getPricing(self.supply_node_ref)
except AuthException as err:
raise ConfigEntryAuthFailed from err
except (APIException, aiohttp.ClientResponseError) as err:
raise UpdateFailed from err

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["pyflick"],
"requirements": ["PyFlick==0.0.2"]
"requirements": ["PyFlick==1.1.2"]
}

View File

@@ -1,74 +1,72 @@
"""Support for Flick Electric Pricing data."""
import asyncio
from datetime import timedelta
from decimal import Decimal
import logging
from typing import Any
from pyflick import FlickAPI, FlickPrice
from homeassistant.components.sensor import SensorEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CURRENCY_CENT, UnitOfEnergy
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.util.dt import utcnow
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import ATTR_COMPONENTS, ATTR_END_AT, ATTR_START_AT, DOMAIN
from .const import ATTR_COMPONENTS, ATTR_END_AT, ATTR_START_AT
from .coordinator import FlickConfigEntry, FlickElectricDataCoordinator
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(minutes=5)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
hass: HomeAssistant,
entry: FlickConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Flick Sensor Setup."""
api: FlickAPI = hass.data[DOMAIN][entry.entry_id]
coordinator = entry.runtime_data
async_add_entities([FlickPricingSensor(api)], True)
async_add_entities([FlickPricingSensor(coordinator)])
class FlickPricingSensor(SensorEntity):
class FlickPricingSensor(CoordinatorEntity[FlickElectricDataCoordinator], SensorEntity):
"""Entity object for Flick Electric sensor."""
_attr_attribution = "Data provided by Flick Electric"
_attr_native_unit_of_measurement = f"{CURRENCY_CENT}/{UnitOfEnergy.KILO_WATT_HOUR}"
_attr_has_entity_name = True
_attr_translation_key = "power_price"
_attributes: dict[str, Any] = {}
def __init__(self, api: FlickAPI) -> None:
def __init__(self, coordinator: FlickElectricDataCoordinator) -> None:
"""Entity object for Flick Electric sensor."""
self._api: FlickAPI = api
self._price: FlickPrice = None
super().__init__(coordinator)
self._attr_unique_id = f"{coordinator.supply_node_ref}_pricing"
@property
def native_value(self):
def native_value(self) -> Decimal:
"""Return the state of the sensor."""
return self._price.price
# The API should return a unit price with quantity of 1.0 when no start/end time is provided
if self.coordinator.data.quantity != 1:
_LOGGER.warning(
"Unexpected quantity for unit price: %s", self.coordinator.data
)
return self.coordinator.data.cost
@property
def extra_state_attributes(self):
def extra_state_attributes(self) -> dict[str, Any] | None:
"""Return the state attributes."""
return self._attributes
components: dict[str, Decimal] = {}
async def async_update(self) -> None:
"""Get the Flick Pricing data from the web service."""
if self._price and self._price.end_at >= utcnow():
return # Power price data is still valid
async with asyncio.timeout(60):
self._price = await self._api.getPricing()
_LOGGER.debug("Pricing data: %s", self._price)
self._attributes[ATTR_START_AT] = self._price.start_at
self._attributes[ATTR_END_AT] = self._price.end_at
for component in self._price.components:
for component in self.coordinator.data.components:
if component.charge_setter not in ATTR_COMPONENTS:
_LOGGER.warning("Found unknown component: %s", component.charge_setter)
continue
self._attributes[component.charge_setter] = float(component.value)
components[component.charge_setter] = component.value
return {
ATTR_START_AT: self.coordinator.data.start_at,
ATTR_END_AT: self.coordinator.data.end_at,
**components,
}

View File

@@ -9,6 +9,12 @@
"client_id": "Client ID (optional)",
"client_secret": "Client Secret (optional)"
}
},
"select_account": {
"title": "Select account",
"data": {
"account_id": "Account"
}
}
},
"error": {
@@ -17,7 +23,10 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"no_permissions": "Cannot get pricing for this account. Please check user permissions.",
"no_accounts": "No services are active on this Flick account"
}
},
"entity": {

View File

@@ -11,7 +11,7 @@ from homeassistant.const import (
Platform,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_registry import async_migrate_entries
from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries
from .config_flow import DEFAULT_RTSP_PORT
from .const import CONF_RTSP_PORT, DOMAIN, LOGGER, SERVICE_PTZ, SERVICE_PTZ_PRESET
@@ -36,6 +36,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
# Migrate to correct unique IDs for switches
await async_migrate_entities(hass, entry)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
@@ -92,3 +95,24 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
LOGGER.debug("Migration to version %s successful", entry.version)
return True
async def async_migrate_entities(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Migrate old entry."""
@callback
def _update_unique_id(
entity_entry: RegistryEntry,
) -> dict[str, str] | None:
"""Update unique ID of entity entry."""
if (
entity_entry.domain == Platform.SWITCH
and entity_entry.unique_id == "sleep_switch"
):
entity_new_unique_id = f"{entity_entry.config_entry_id}_sleep_switch"
return {"new_unique_id": entity_new_unique_id}
return None
# Migrate entities
await async_migrate_entries(hass, entry.entry_id, _update_unique_id)

View File

@@ -41,7 +41,7 @@ class FoscamSleepSwitch(FoscamEntity, SwitchEntity):
"""Initialize a Foscam Sleep Switch."""
super().__init__(coordinator, config_entry.entry_id)
self._attr_unique_id = "sleep_switch"
self._attr_unique_id = f"{config_entry.entry_id}_sleep_switch"
self._attr_translation_key = "sleep_switch"
self._attr_has_entity_name = True

View File

@@ -214,6 +214,18 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
self._options = options
await self.hass.async_add_executor_job(self.setup)
device_registry = dr.async_get(self.hass)
device_registry.async_get_or_create(
config_entry_id=self.config_entry.entry_id,
configuration_url=f"http://{self.host}",
connections={(dr.CONNECTION_NETWORK_MAC, self.mac)},
identifiers={(DOMAIN, self.unique_id)},
manufacturer="AVM",
model=self.model,
name=self.config_entry.title,
sw_version=self.current_firmware,
)
def setup(self) -> None:
"""Set up FritzboxTools class."""

View File

@@ -68,23 +68,14 @@ class FritzBoxBaseEntity:
"""Init device info class."""
self._avm_wrapper = avm_wrapper
self._device_name = device_name
@property
def mac_address(self) -> str:
"""Return the mac address of the main device."""
return self._avm_wrapper.mac
self.mac_address = self._avm_wrapper.mac
@property
def device_info(self) -> DeviceInfo:
"""Return the device information."""
return DeviceInfo(
configuration_url=f"http://{self._avm_wrapper.host}",
connections={(dr.CONNECTION_NETWORK_MAC, self.mac_address)},
identifiers={(DOMAIN, self._avm_wrapper.unique_id)},
manufacturer="AVM",
model=self._avm_wrapper.model,
name=self._device_name,
sw_version=self._avm_wrapper.current_firmware,
)

View File

@@ -1,6 +1,7 @@
{
"domain": "frontend",
"name": "Home Assistant Frontend",
"after_dependencies": ["backup"],
"codeowners": ["@home-assistant/frontend"],
"dependencies": [
"api",
@@ -20,5 +21,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20241230.0"]
"requirements": ["home-assistant-frontend==20250106.0"]
}

View File

@@ -171,6 +171,8 @@ async def async_test_still(
"""Verify that the still image is valid before we create an entity."""
fmt = None
if not (url := info.get(CONF_STILL_IMAGE_URL)):
# If user didn't specify a still image URL,the automatically generated
# still image that stream generates is always jpeg.
return {}, info.get(CONF_CONTENT_TYPE, "image/jpeg")
try:
if not isinstance(url, template_helper.Template):
@@ -309,8 +311,8 @@ async def async_test_and_preview_stream(
return stream
def register_preview(hass: HomeAssistant) -> None:
"""Set up previews for camera feeds during config flow."""
def register_still_preview(hass: HomeAssistant) -> None:
"""Set up still image preview for camera feeds during config flow."""
hass.data.setdefault(DOMAIN, {})
if not hass.data[DOMAIN].get(IMAGE_PREVIEWS_ACTIVE):
@@ -326,7 +328,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
def __init__(self) -> None:
"""Initialize Generic ConfigFlow."""
self.preview_cam: dict[str, Any] = {}
self.preview_image_settings: dict[str, Any] = {}
self.preview_stream: Stream | None = None
self.user_input: dict[str, Any] = {}
self.title = ""
@@ -343,7 +345,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
) -> ConfigFlowResult:
"""Handle the start of the config flow."""
errors = {}
description_placeholders = {}
hass = self.hass
if user_input:
# Secondary validation because serialised vol can't seem to handle this complexity:
@@ -359,8 +360,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
)
except InvalidStreamException as err:
errors[CONF_STREAM_SOURCE] = str(err)
if err.details:
errors["error_details"] = err.details
self.preview_stream = None
if not errors:
user_input[CONF_CONTENT_TYPE] = still_format
@@ -369,18 +368,11 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
name = (
slug(hass, still_url) or slug(hass, stream_url) or DEFAULT_NAME
)
if still_url is None:
# If user didn't specify a still image URL,
# The automatically generated still image that stream generates
# is always jpeg
user_input[CONF_CONTENT_TYPE] = "image/jpeg"
self.user_input = user_input
self.title = name
# temporary preview for user to check the image
self.preview_cam = user_input
self.preview_image_settings = user_input
return await self.async_step_user_confirm()
if "error_details" in errors:
description_placeholders["error"] = errors.pop("error_details")
elif self.user_input:
user_input = self.user_input
else:
@@ -388,7 +380,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_show_form(
step_id="user",
data_schema=build_schema(user_input),
description_placeholders=description_placeholders,
errors=errors,
)
@@ -405,8 +396,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_create_entry(
title=self.title, data={}, options=self.user_input
)
register_preview(self.hass)
preview_url = f"/api/generic/preview_flow_image/{self.flow_id}?t={datetime.now().isoformat()}"
register_still_preview(self.hass)
return self.async_show_form(
step_id="user_confirm",
data_schema=vol.Schema(
@@ -414,7 +404,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
vol.Required(CONF_CONFIRMED_OK, default=False): bool,
}
),
description_placeholders={"preview_url": preview_url},
errors=None,
preview="generic_camera",
)
@@ -430,7 +419,8 @@ class GenericOptionsFlowHandler(OptionsFlow):
def __init__(self) -> None:
"""Initialize Generic IP Camera options flow."""
self.preview_cam: dict[str, Any] = {}
self.preview_image_settings: dict[str, Any] = {}
self.preview_stream: Stream | None = None
self.user_input: dict[str, Any] = {}
async def async_step_init(
@@ -438,42 +428,38 @@ class GenericOptionsFlowHandler(OptionsFlow):
) -> ConfigFlowResult:
"""Manage Generic IP Camera options."""
errors: dict[str, str] = {}
description_placeholders = {}
hass = self.hass
if user_input is not None:
errors, still_format = await async_test_still(
hass, self.config_entry.options | user_input
)
try:
await async_test_and_preview_stream(hass, user_input)
except InvalidStreamException as err:
errors[CONF_STREAM_SOURCE] = str(err)
if err.details:
errors["error_details"] = err.details
# Stream preview during options flow not yet implemented
still_url = user_input.get(CONF_STILL_IMAGE_URL)
if not errors:
if still_url is None:
# If user didn't specify a still image URL,
# The automatically generated still image that stream generates
# is always jpeg
still_format = "image/jpeg"
data = {
CONF_USE_WALLCLOCK_AS_TIMESTAMPS: self.config_entry.options.get(
CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False
),
**user_input,
CONF_CONTENT_TYPE: still_format
or self.config_entry.options.get(CONF_CONTENT_TYPE),
}
self.user_input = data
# temporary preview for user to check the image
self.preview_cam = data
return await self.async_step_confirm_still()
if "error_details" in errors:
description_placeholders["error"] = errors.pop("error_details")
if user_input:
# Secondary validation because serialised vol can't seem to handle this complexity:
if not user_input.get(CONF_STILL_IMAGE_URL) and not user_input.get(
CONF_STREAM_SOURCE
):
errors["base"] = "no_still_image_or_stream_url"
else:
errors, still_format = await async_test_still(hass, user_input)
try:
self.preview_stream = await async_test_and_preview_stream(
hass, user_input
)
except InvalidStreamException as err:
errors[CONF_STREAM_SOURCE] = str(err)
self.preview_stream = None
if not errors:
data = {
CONF_USE_WALLCLOCK_AS_TIMESTAMPS: self.config_entry.options.get(
CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False
),
**user_input,
CONF_CONTENT_TYPE: still_format
or self.config_entry.options.get(CONF_CONTENT_TYPE),
}
self.user_input = data
# temporary preview for user to check the image
self.preview_image_settings = data
return await self.async_step_user_confirm()
elif self.user_input:
user_input = self.user_input
return self.async_show_form(
step_id="init",
data_schema=build_schema(
@@ -481,34 +467,40 @@ class GenericOptionsFlowHandler(OptionsFlow):
True,
self.show_advanced_options,
),
description_placeholders=description_placeholders,
errors=errors,
)
async def async_step_confirm_still(
async def async_step_user_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle user clicking confirm after still preview."""
if user_input:
if ha_stream := self.preview_stream:
# Kill off the temp stream we created.
await ha_stream.stop()
if not user_input.get(CONF_CONFIRMED_OK):
return await self.async_step_init()
return self.async_create_entry(
title=self.config_entry.title,
data=self.user_input,
)
register_preview(self.hass)
preview_url = f"/api/generic/preview_flow_image/{self.flow_id}?t={datetime.now().isoformat()}"
register_still_preview(self.hass)
return self.async_show_form(
step_id="confirm_still",
step_id="user_confirm",
data_schema=vol.Schema(
{
vol.Required(CONF_CONFIRMED_OK, default=False): bool,
}
),
description_placeholders={"preview_url": preview_url},
errors=None,
preview="generic_camera",
)
@staticmethod
async def async_setup_preview(hass: HomeAssistant) -> None:
"""Set up preview WS API."""
websocket_api.async_register_command(hass, ws_start_preview)
class CameraImagePreview(HomeAssistantView):
"""Camera view to temporarily serve an image."""
@@ -534,7 +526,7 @@ class CameraImagePreview(HomeAssistantView):
if not flow:
_LOGGER.warning("Unknown flow while getting image preview")
raise web.HTTPNotFound
user_input = flow.preview_cam
user_input = flow.preview_image_settings
camera = GenericCamera(self.hass, user_input, flow_id, "preview")
if not camera.is_on:
_LOGGER.debug("Camera is off")
@@ -550,7 +542,7 @@ class CameraImagePreview(HomeAssistantView):
{
vol.Required("type"): "generic_camera/start_preview",
vol.Required("flow_id"): str,
vol.Optional("flow_type"): vol.Any("config_flow"),
vol.Optional("flow_type"): vol.Any("config_flow", "options_flow"),
vol.Optional("user_input"): dict,
}
)
@@ -564,11 +556,18 @@ async def ws_start_preview(
_LOGGER.debug("Generating websocket handler for generic camera preview")
flow_id = msg["flow_id"]
flow = cast(
GenericIPCamConfigFlow,
hass.config_entries.flow._progress.get(flow_id), # noqa: SLF001
)
user_input = flow.preview_cam
flow: GenericIPCamConfigFlow | GenericOptionsFlowHandler
if msg.get("flow_type", "config_flow") == "config_flow":
flow = cast(
GenericIPCamConfigFlow,
hass.config_entries.flow._progress.get(flow_id), # noqa: SLF001
)
else: # (flow type == "options flow")
flow = cast(
GenericOptionsFlowHandler,
hass.config_entries.options._progress.get(flow_id), # noqa: SLF001
)
user_input = flow.preview_image_settings
# Create an EntityPlatform, needed for name translations
platform = await async_prepare_setup_platform(hass, {}, CAMERA_DOMAIN, DOMAIN)

View File

@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/generic",
"integration_type": "device",
"iot_class": "local_push",
"requirements": ["av==13.1.0", "Pillow==11.0.0"]
"requirements": ["av==13.1.0", "Pillow==11.1.0"]
}

View File

@@ -67,11 +67,11 @@
"use_wallclock_as_timestamps": "This option may correct segmenting or crashing issues arising from buggy timestamp implementations on some cameras"
}
},
"confirm_still": {
"title": "Preview",
"description": "![Camera Still Image Preview]({preview_url})",
"user_confirm": {
"title": "Confirmation",
"description": "Please wait for previews to load...",
"data": {
"confirmed_ok": "This image looks good."
"confirmed_ok": "Everything looks good."
}
}
},

View File

@@ -34,6 +34,18 @@
"moderate": "Moderate",
"good": "Good",
"very_good": "Very good"
},
"state_attributes": {
"options": {
"state": {
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
}
}
}
},
"c6h6": {
@@ -51,6 +63,18 @@
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
},
"state_attributes": {
"options": {
"state": {
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
}
}
}
},
"o3_index": {
@@ -62,6 +86,18 @@
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
},
"state_attributes": {
"options": {
"state": {
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
}
}
}
},
"pm10_index": {
@@ -73,6 +109,18 @@
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
},
"state_attributes": {
"options": {
"state": {
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
}
}
}
},
"pm25_index": {
@@ -84,6 +132,18 @@
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
},
"state_attributes": {
"options": {
"state": {
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
}
}
}
},
"so2_index": {
@@ -95,6 +155,18 @@
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
},
"state_attributes": {
"options": {
"state": {
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
}
}
}
}
}

View File

@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/google",
"iot_class": "cloud_polling",
"loggers": ["googleapiclient"],
"requirements": ["gcal-sync==6.2.0", "oauth2client==4.1.3", "ical==8.2.0"]
"requirements": ["gcal-sync==7.0.0", "oauth2client==4.1.3", "ical==8.3.0"]
}

View File

@@ -13,7 +13,7 @@
"fields": {
"agent_user_id": {
"name": "Agent user ID",
"description": "Only needed for automations. Specific Home Assistant user id (not username, ID in configuration > users > under username) to sync with Google Assistant. Do not need when you use this action through Home Assistant front end or API. Used in automation script or other place where context.user_id is missing."
"description": "Only needed for automations. Specific Home Assistant user ID (not username, ID in Settings > People > Users > under username) to sync with Google Assistant. Not needed when you use this action through Home Assistant frontend or API. Used in automation, script or other place where context.user_id is missing."
}
}
}

View File

@@ -204,9 +204,7 @@ class GoogleGenerativeAIConversationEntity(
"""Process a sentence."""
result = conversation.ConversationResult(
response=intent.IntentResponse(language=user_input.language),
conversation_id=user_input.conversation_id
if user_input.conversation_id in self.history
else ulid.ulid_now(),
conversation_id=user_input.conversation_id or ulid.ulid_now(),
)
assert result.conversation_id

View File

@@ -66,11 +66,11 @@
"services": {
"upload": {
"name": "Upload media",
"description": "Upload images or videos to Google Photos.",
"description": "Uploads images or videos to Google Photos.",
"fields": {
"config_entry_id": {
"name": "Integration Id",
"description": "The Google Photos integration id."
"name": "Integration ID",
"description": "The Google Photos integration ID."
},
"filename": {
"name": "Filename",

View File

@@ -238,7 +238,7 @@
},
"set": {
"name": "Set",
"description": "Creates/Updates a user group.",
"description": "Creates/Updates a group.",
"fields": {
"object_id": {
"name": "Object ID",

View File

@@ -21,6 +21,7 @@ PLATFORMS = [
Platform.BINARY_SENSOR,
Platform.BUTTON,
Platform.CALENDAR,
Platform.IMAGE,
Platform.SENSOR,
Platform.SWITCH,
Platform.TODO,

View File

@@ -45,7 +45,7 @@ class HabiticaButtonEntityDescription(ButtonEntityDescription):
entity_picture: str | None = None
class HabitipyButtonEntity(StrEnum):
class HabiticaButtonEntity(StrEnum):
"""Habitica button entities."""
RUN_CRON = "run_cron"
@@ -68,14 +68,14 @@ class HabitipyButtonEntity(StrEnum):
BUTTON_DESCRIPTIONS: tuple[HabiticaButtonEntityDescription, ...] = (
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.RUN_CRON,
translation_key=HabitipyButtonEntity.RUN_CRON,
key=HabiticaButtonEntity.RUN_CRON,
translation_key=HabiticaButtonEntity.RUN_CRON,
press_fn=lambda coordinator: coordinator.habitica.run_cron(),
available_fn=lambda data: data.user.needsCron is True,
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.BUY_HEALTH_POTION,
translation_key=HabitipyButtonEntity.BUY_HEALTH_POTION,
key=HabiticaButtonEntity.BUY_HEALTH_POTION,
translation_key=HabiticaButtonEntity.BUY_HEALTH_POTION,
press_fn=lambda coordinator: coordinator.habitica.buy_health_potion(),
available_fn=(
lambda data: (data.user.stats.gp or 0) >= 25
@@ -84,8 +84,8 @@ BUTTON_DESCRIPTIONS: tuple[HabiticaButtonEntityDescription, ...] = (
entity_picture="shop_potion.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.ALLOCATE_ALL_STAT_POINTS,
translation_key=HabitipyButtonEntity.ALLOCATE_ALL_STAT_POINTS,
key=HabiticaButtonEntity.ALLOCATE_ALL_STAT_POINTS,
translation_key=HabiticaButtonEntity.ALLOCATE_ALL_STAT_POINTS,
press_fn=lambda coordinator: coordinator.habitica.allocate_stat_points(),
available_fn=(
lambda data: data.user.preferences.automaticAllocation is True
@@ -93,8 +93,8 @@ BUTTON_DESCRIPTIONS: tuple[HabiticaButtonEntityDescription, ...] = (
),
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.REVIVE,
translation_key=HabitipyButtonEntity.REVIVE,
key=HabiticaButtonEntity.REVIVE,
translation_key=HabiticaButtonEntity.REVIVE,
press_fn=lambda coordinator: coordinator.habitica.revive(),
available_fn=lambda data: data.user.stats.hp == 0,
),
@@ -103,8 +103,8 @@ BUTTON_DESCRIPTIONS: tuple[HabiticaButtonEntityDescription, ...] = (
CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.MPHEAL,
translation_key=HabitipyButtonEntity.MPHEAL,
key=HabiticaButtonEntity.MPHEAL,
translation_key=HabiticaButtonEntity.MPHEAL,
press_fn=(
lambda coordinator: coordinator.habitica.cast_skill(Skill.ETHEREAL_SURGE)
),
@@ -116,8 +116,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
entity_picture="shop_mpheal.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.EARTH,
translation_key=HabitipyButtonEntity.EARTH,
key=HabiticaButtonEntity.EARTH,
translation_key=HabiticaButtonEntity.EARTH,
press_fn=lambda coordinator: coordinator.habitica.cast_skill(Skill.EARTHQUAKE),
available_fn=(
lambda data: (data.user.stats.lvl or 0) >= 13
@@ -127,8 +127,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
entity_picture="shop_earth.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.FROST,
translation_key=HabitipyButtonEntity.FROST,
key=HabiticaButtonEntity.FROST,
translation_key=HabiticaButtonEntity.FROST,
press_fn=(
lambda coordinator: coordinator.habitica.cast_skill(Skill.CHILLING_FROST)
),
@@ -142,8 +142,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
entity_picture="shop_frost.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.DEFENSIVE_STANCE,
translation_key=HabitipyButtonEntity.DEFENSIVE_STANCE,
key=HabiticaButtonEntity.DEFENSIVE_STANCE,
translation_key=HabiticaButtonEntity.DEFENSIVE_STANCE,
press_fn=(
lambda coordinator: coordinator.habitica.cast_skill(Skill.DEFENSIVE_STANCE)
),
@@ -155,8 +155,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
entity_picture="shop_defensiveStance.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.VALOROUS_PRESENCE,
translation_key=HabitipyButtonEntity.VALOROUS_PRESENCE,
key=HabiticaButtonEntity.VALOROUS_PRESENCE,
translation_key=HabiticaButtonEntity.VALOROUS_PRESENCE,
press_fn=(
lambda coordinator: coordinator.habitica.cast_skill(Skill.VALOROUS_PRESENCE)
),
@@ -168,8 +168,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
entity_picture="shop_valorousPresence.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.INTIMIDATE,
translation_key=HabitipyButtonEntity.INTIMIDATE,
key=HabiticaButtonEntity.INTIMIDATE,
translation_key=HabiticaButtonEntity.INTIMIDATE,
press_fn=(
lambda coordinator: coordinator.habitica.cast_skill(Skill.INTIMIDATING_GAZE)
),
@@ -181,8 +181,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
entity_picture="shop_intimidate.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.TOOLS_OF_TRADE,
translation_key=HabitipyButtonEntity.TOOLS_OF_TRADE,
key=HabiticaButtonEntity.TOOLS_OF_TRADE,
translation_key=HabiticaButtonEntity.TOOLS_OF_TRADE,
press_fn=(
lambda coordinator: coordinator.habitica.cast_skill(
Skill.TOOLS_OF_THE_TRADE
@@ -196,8 +196,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
entity_picture="shop_toolsOfTrade.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.STEALTH,
translation_key=HabitipyButtonEntity.STEALTH,
key=HabiticaButtonEntity.STEALTH,
translation_key=HabiticaButtonEntity.STEALTH,
press_fn=lambda coordinator: coordinator.habitica.cast_skill(Skill.STEALTH),
# Stealth buffs stack and it can only be cast if the amount of
# buffs is smaller than the amount of unfinished dailies
@@ -219,8 +219,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
entity_picture="shop_stealth.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.HEAL,
translation_key=HabitipyButtonEntity.HEAL,
key=HabiticaButtonEntity.HEAL,
translation_key=HabiticaButtonEntity.HEAL,
press_fn=(
lambda coordinator: coordinator.habitica.cast_skill(Skill.HEALING_LIGHT)
),
@@ -233,8 +233,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
entity_picture="shop_heal.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.BRIGHTNESS,
translation_key=HabitipyButtonEntity.BRIGHTNESS,
key=HabiticaButtonEntity.BRIGHTNESS,
translation_key=HabiticaButtonEntity.BRIGHTNESS,
press_fn=(
lambda coordinator: coordinator.habitica.cast_skill(
Skill.SEARING_BRIGHTNESS
@@ -248,8 +248,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
entity_picture="shop_brightness.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.PROTECT_AURA,
translation_key=HabitipyButtonEntity.PROTECT_AURA,
key=HabiticaButtonEntity.PROTECT_AURA,
translation_key=HabiticaButtonEntity.PROTECT_AURA,
press_fn=(
lambda coordinator: coordinator.habitica.cast_skill(Skill.PROTECTIVE_AURA)
),
@@ -261,8 +261,8 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
entity_picture="shop_protectAura.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.HEAL_ALL,
translation_key=HabitipyButtonEntity.HEAL_ALL,
key=HabiticaButtonEntity.HEAL_ALL,
translation_key=HabiticaButtonEntity.HEAL_ALL,
press_fn=lambda coordinator: coordinator.habitica.cast_skill(Skill.BLESSING),
available_fn=(
lambda data: (data.user.stats.lvl or 0) >= 14

View File

@@ -31,6 +31,11 @@ ATTR_TASK = "task"
ATTR_DIRECTION = "direction"
ATTR_TARGET = "target"
ATTR_ITEM = "item"
ATTR_TYPE = "type"
ATTR_PRIORITY = "priority"
ATTR_TAG = "tag"
ATTR_KEYWORD = "keyword"
SERVICE_CAST_SKILL = "cast_skill"
SERVICE_START_QUEST = "start_quest"
SERVICE_ACCEPT_QUEST = "accept_quest"
@@ -38,6 +43,8 @@ SERVICE_CANCEL_QUEST = "cancel_quest"
SERVICE_ABORT_QUEST = "abort_quest"
SERVICE_REJECT_QUEST = "reject_quest"
SERVICE_LEAVE_QUEST = "leave_quest"
SERVICE_GET_TASKS = "get_tasks"
SERVICE_SCORE_HABIT = "score_habit"
SERVICE_SCORE_REWARD = "score_reward"

View File

@@ -5,6 +5,7 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from datetime import timedelta
from io import BytesIO
import logging
from typing import Any
@@ -18,6 +19,7 @@ from habiticalib import (
TaskFilter,
TooManyRequestsError,
UserData,
UserStyles,
)
from homeassistant.config_entries import ConfigEntry
@@ -130,3 +132,13 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
) from e
else:
await self.async_request_refresh()
async def generate_avatar(self, user_styles: UserStyles) -> bytes:
"""Generate Avatar."""
avatar = BytesIO()
await self.habitica.generate_avatar(
fp=avatar, user_styles=user_styles, fmt="PNG"
)
return avatar.getvalue()

View File

@@ -121,12 +121,6 @@
"rogue": "mdi:ninja"
}
},
"todos": {
"default": "mdi:checkbox-outline"
},
"dailys": {
"default": "mdi:calendar-month"
},
"habits": {
"default": "mdi:contrast-box"
},
@@ -196,6 +190,12 @@
},
"transformation": {
"service": "mdi:flask-round-bottom"
},
"get_tasks": {
"service": "mdi:calendar-export",
"sections": {
"filter": "mdi:calendar-filter"
}
}
}
}

View File

@@ -0,0 +1,76 @@
"""Image platform for Habitica integration."""
from __future__ import annotations
from dataclasses import asdict
from enum import StrEnum
from habiticalib import UserStyles
from homeassistant.components.image import ImageEntity, ImageEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.util import dt as dt_util
from . import HabiticaConfigEntry
from .coordinator import HabiticaDataUpdateCoordinator
from .entity import HabiticaBase
class HabiticaImageEntity(StrEnum):
"""Image entities."""
AVATAR = "avatar"
async def async_setup_entry(
hass: HomeAssistant,
config_entry: HabiticaConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the habitica image platform."""
coordinator = config_entry.runtime_data
async_add_entities([HabiticaImage(hass, coordinator)])
class HabiticaImage(HabiticaBase, ImageEntity):
"""A Habitica image entity."""
entity_description = ImageEntityDescription(
key=HabiticaImageEntity.AVATAR,
translation_key=HabiticaImageEntity.AVATAR,
)
_attr_content_type = "image/png"
_current_appearance: UserStyles | None = None
_cache: bytes | None = None
def __init__(
self,
hass: HomeAssistant,
coordinator: HabiticaDataUpdateCoordinator,
) -> None:
"""Initialize the image entity."""
super().__init__(coordinator, self.entity_description)
ImageEntity.__init__(self, hass)
self._attr_image_last_updated = dt_util.utcnow()
def _handle_coordinator_update(self) -> None:
"""Check if equipped gear and other things have changed since last avatar image generation."""
new_appearance = UserStyles.from_dict(asdict(self.coordinator.data.user))
if self._current_appearance != new_appearance:
self._current_appearance = new_appearance
self._attr_image_last_updated = dt_util.utcnow()
self._cache = None
return super()._handle_coordinator_update()
async def async_image(self) -> bytes | None:
"""Return cached bytes, otherwise generate new avatar."""
if not self._cache and self._current_appearance:
self._cache = await self.coordinator.generate_avatar(
self._current_appearance
)
return self._cache

View File

@@ -6,7 +6,7 @@ from collections.abc import Callable, Mapping
from dataclasses import asdict, dataclass
from enum import StrEnum
import logging
from typing import TYPE_CHECKING, Any
from typing import Any
from habiticalib import (
ContentData,
@@ -18,32 +18,25 @@ from habiticalib import (
)
from homeassistant.components.sensor import (
DOMAIN as SENSOR_DOMAIN,
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.issue_registry import (
IssueSeverity,
async_create_issue,
async_delete_issue,
)
from homeassistant.helpers.typing import StateType
from .const import ASSETS_URL, DOMAIN
from .const import ASSETS_URL
from .entity import HabiticaBase
from .types import HabiticaConfigEntry
from .util import entity_used_in, get_attribute_points, get_attributes_total
from .util import get_attribute_points, get_attributes_total
_LOGGER = logging.getLogger(__name__)
@dataclass(kw_only=True, frozen=True)
class HabitipySensorEntityDescription(SensorEntityDescription):
"""Habitipy Sensor Description."""
class HabiticaSensorEntityDescription(SensorEntityDescription):
"""Habitica Sensor Description."""
value_fn: Callable[[UserData, ContentData], StateType]
attributes_fn: Callable[[UserData, ContentData], dict[str, Any] | None] | None = (
@@ -53,14 +46,14 @@ class HabitipySensorEntityDescription(SensorEntityDescription):
@dataclass(kw_only=True, frozen=True)
class HabitipyTaskSensorEntityDescription(SensorEntityDescription):
"""Habitipy Task Sensor Description."""
class HabiticaTaskSensorEntityDescription(SensorEntityDescription):
"""Habitica Task Sensor Description."""
value_fn: Callable[[list[TaskData]], list[TaskData]]
class HabitipySensorEntity(StrEnum):
"""Habitipy Entities."""
class HabiticaSensorEntity(StrEnum):
"""Habitica Entities."""
DISPLAY_NAME = "display_name"
HEALTH = "health"
@@ -73,8 +66,6 @@ class HabitipySensorEntity(StrEnum):
GOLD = "gold"
CLASS = "class"
HABITS = "habits"
DAILIES = "dailys"
TODOS = "todos"
REWARDS = "rewards"
GEMS = "gems"
TRINKETS = "trinkets"
@@ -84,105 +75,105 @@ class HabitipySensorEntity(StrEnum):
PERCEPTION = "perception"
SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = (
HabitipySensorEntityDescription(
key=HabitipySensorEntity.DISPLAY_NAME,
translation_key=HabitipySensorEntity.DISPLAY_NAME,
SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
HabiticaSensorEntityDescription(
key=HabiticaSensorEntity.DISPLAY_NAME,
translation_key=HabiticaSensorEntity.DISPLAY_NAME,
value_fn=lambda user, _: user.profile.name,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.HEALTH,
translation_key=HabitipySensorEntity.HEALTH,
HabiticaSensorEntityDescription(
key=HabiticaSensorEntity.HEALTH,
translation_key=HabiticaSensorEntity.HEALTH,
suggested_display_precision=0,
value_fn=lambda user, _: user.stats.hp,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.HEALTH_MAX,
translation_key=HabitipySensorEntity.HEALTH_MAX,
HabiticaSensorEntityDescription(
key=HabiticaSensorEntity.HEALTH_MAX,
translation_key=HabiticaSensorEntity.HEALTH_MAX,
entity_registry_enabled_default=False,
value_fn=lambda user, _: 50,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.MANA,
translation_key=HabitipySensorEntity.MANA,
HabiticaSensorEntityDescription(
key=HabiticaSensorEntity.MANA,
translation_key=HabiticaSensorEntity.MANA,
suggested_display_precision=0,
value_fn=lambda user, _: user.stats.mp,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.MANA_MAX,
translation_key=HabitipySensorEntity.MANA_MAX,
HabiticaSensorEntityDescription(
key=HabiticaSensorEntity.MANA_MAX,
translation_key=HabiticaSensorEntity.MANA_MAX,
value_fn=lambda user, _: user.stats.maxMP,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.EXPERIENCE,
translation_key=HabitipySensorEntity.EXPERIENCE,
HabiticaSensorEntityDescription(
key=HabiticaSensorEntity.EXPERIENCE,
translation_key=HabiticaSensorEntity.EXPERIENCE,
value_fn=lambda user, _: user.stats.exp,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.EXPERIENCE_MAX,
translation_key=HabitipySensorEntity.EXPERIENCE_MAX,
HabiticaSensorEntityDescription(
key=HabiticaSensorEntity.EXPERIENCE_MAX,
translation_key=HabiticaSensorEntity.EXPERIENCE_MAX,
value_fn=lambda user, _: user.stats.toNextLevel,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.LEVEL,
translation_key=HabitipySensorEntity.LEVEL,
HabiticaSensorEntityDescription(
key=HabiticaSensorEntity.LEVEL,
translation_key=HabiticaSensorEntity.LEVEL,
value_fn=lambda user, _: user.stats.lvl,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.GOLD,
translation_key=HabitipySensorEntity.GOLD,
HabiticaSensorEntityDescription(
key=HabiticaSensorEntity.GOLD,
translation_key=HabiticaSensorEntity.GOLD,
suggested_display_precision=2,
value_fn=lambda user, _: user.stats.gp,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.CLASS,
translation_key=HabitipySensorEntity.CLASS,
HabiticaSensorEntityDescription(
key=HabiticaSensorEntity.CLASS,
translation_key=HabiticaSensorEntity.CLASS,
value_fn=lambda user, _: user.stats.Class.value if user.stats.Class else None,
device_class=SensorDeviceClass.ENUM,
options=[item.value for item in HabiticaClass],
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.GEMS,
translation_key=HabitipySensorEntity.GEMS,
HabiticaSensorEntityDescription(
key=HabiticaSensorEntity.GEMS,
translation_key=HabiticaSensorEntity.GEMS,
value_fn=lambda user, _: round(user.balance * 4) if user.balance else None,
suggested_display_precision=0,
entity_picture="shop_gem.png",
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.TRINKETS,
translation_key=HabitipySensorEntity.TRINKETS,
HabiticaSensorEntityDescription(
key=HabiticaSensorEntity.TRINKETS,
translation_key=HabiticaSensorEntity.TRINKETS,
value_fn=lambda user, _: user.purchased.plan.consecutive.trinkets or 0,
suggested_display_precision=0,
native_unit_of_measurement="",
entity_picture="notif_subscriber_reward.png",
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.STRENGTH,
translation_key=HabitipySensorEntity.STRENGTH,
HabiticaSensorEntityDescription(
key=HabiticaSensorEntity.STRENGTH,
translation_key=HabiticaSensorEntity.STRENGTH,
value_fn=lambda user, content: get_attributes_total(user, content, "Str"),
attributes_fn=lambda user, content: get_attribute_points(user, content, "Str"),
suggested_display_precision=0,
native_unit_of_measurement="STR",
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.INTELLIGENCE,
translation_key=HabitipySensorEntity.INTELLIGENCE,
HabiticaSensorEntityDescription(
key=HabiticaSensorEntity.INTELLIGENCE,
translation_key=HabiticaSensorEntity.INTELLIGENCE,
value_fn=lambda user, content: get_attributes_total(user, content, "Int"),
attributes_fn=lambda user, content: get_attribute_points(user, content, "Int"),
suggested_display_precision=0,
native_unit_of_measurement="INT",
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.PERCEPTION,
translation_key=HabitipySensorEntity.PERCEPTION,
HabiticaSensorEntityDescription(
key=HabiticaSensorEntity.PERCEPTION,
translation_key=HabiticaSensorEntity.PERCEPTION,
value_fn=lambda user, content: get_attributes_total(user, content, "per"),
attributes_fn=lambda user, content: get_attribute_points(user, content, "per"),
suggested_display_precision=0,
native_unit_of_measurement="PER",
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.CONSTITUTION,
translation_key=HabitipySensorEntity.CONSTITUTION,
HabiticaSensorEntityDescription(
key=HabiticaSensorEntity.CONSTITUTION,
translation_key=HabiticaSensorEntity.CONSTITUTION,
value_fn=lambda user, content: get_attributes_total(user, content, "con"),
attributes_fn=lambda user, content: get_attribute_points(user, content, "con"),
suggested_display_precision=0,
@@ -221,31 +212,15 @@ TASKS_MAP = {
}
TASK_SENSOR_DESCRIPTION: tuple[HabitipyTaskSensorEntityDescription, ...] = (
HabitipyTaskSensorEntityDescription(
key=HabitipySensorEntity.HABITS,
translation_key=HabitipySensorEntity.HABITS,
TASK_SENSOR_DESCRIPTION: tuple[HabiticaTaskSensorEntityDescription, ...] = (
HabiticaTaskSensorEntityDescription(
key=HabiticaSensorEntity.HABITS,
translation_key=HabiticaSensorEntity.HABITS,
value_fn=lambda tasks: [r for r in tasks if r.Type is TaskType.HABIT],
),
HabitipyTaskSensorEntityDescription(
key=HabitipySensorEntity.DAILIES,
translation_key=HabitipySensorEntity.DAILIES,
value_fn=lambda tasks: [r for r in tasks if r.Type is TaskType.DAILY],
entity_registry_enabled_default=False,
),
HabitipyTaskSensorEntityDescription(
key=HabitipySensorEntity.TODOS,
translation_key=HabitipySensorEntity.TODOS,
value_fn=(
lambda tasks: [
r for r in tasks if r.Type is TaskType.TODO and not r.completed
]
),
entity_registry_enabled_default=False,
),
HabitipyTaskSensorEntityDescription(
key=HabitipySensorEntity.REWARDS,
translation_key=HabitipySensorEntity.REWARDS,
HabiticaTaskSensorEntityDescription(
key=HabiticaSensorEntity.REWARDS,
translation_key=HabiticaSensorEntity.REWARDS,
value_fn=lambda tasks: [r for r in tasks if r.Type is TaskType.REWARD],
),
)
@@ -261,19 +236,19 @@ async def async_setup_entry(
coordinator = config_entry.runtime_data
entities: list[SensorEntity] = [
HabitipySensor(coordinator, description) for description in SENSOR_DESCRIPTIONS
HabiticaSensor(coordinator, description) for description in SENSOR_DESCRIPTIONS
]
entities.extend(
HabitipyTaskSensor(coordinator, description)
HabiticaTaskSensor(coordinator, description)
for description in TASK_SENSOR_DESCRIPTION
)
async_add_entities(entities, True)
class HabitipySensor(HabiticaBase, SensorEntity):
class HabiticaSensor(HabiticaBase, SensorEntity):
"""A generic Habitica sensor."""
entity_description: HabitipySensorEntityDescription
entity_description: HabiticaSensorEntityDescription
@property
def native_value(self) -> StateType:
@@ -298,10 +273,10 @@ class HabitipySensor(HabiticaBase, SensorEntity):
return None
class HabitipyTaskSensor(HabiticaBase, SensorEntity):
class HabiticaTaskSensor(HabiticaBase, SensorEntity):
"""A Habitica task sensor."""
entity_description: HabitipyTaskSensorEntityDescription
entity_description: HabiticaTaskSensorEntityDescription
@property
def native_value(self) -> StateType:
@@ -324,37 +299,3 @@ class HabitipyTaskSensor(HabiticaBase, SensorEntity):
task[map_key] = value
attrs[str(task_id)] = task
return attrs
async def async_added_to_hass(self) -> None:
"""Raise issue when entity is registered and was not disabled."""
if TYPE_CHECKING:
assert self.unique_id
if entity_id := er.async_get(self.hass).async_get_entity_id(
SENSOR_DOMAIN, DOMAIN, self.unique_id
):
if (
self.enabled
and self.entity_description.key
in (HabitipySensorEntity.TODOS, HabitipySensorEntity.DAILIES)
and entity_used_in(self.hass, entity_id)
):
async_create_issue(
self.hass,
DOMAIN,
f"deprecated_task_entity_{self.entity_description.key}",
breaks_in_ha_version="2025.2.0",
is_fixable=False,
severity=IssueSeverity.WARNING,
translation_key="deprecated_task_entity",
translation_placeholders={
"task_name": str(self.name),
"entity": entity_id,
},
)
else:
async_delete_issue(
self.hass,
DOMAIN,
f"deprecated_task_entity_{self.entity_description.key}",
)
await super().async_added_to_hass()

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
from dataclasses import asdict
import logging
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, Any
from aiohttp import ClientError
from habiticalib import (
@@ -13,6 +13,9 @@ from habiticalib import (
NotAuthorizedError,
NotFoundError,
Skill,
TaskData,
TaskPriority,
TaskType,
TooManyRequestsError,
)
import voluptuous as vol
@@ -36,10 +39,14 @@ from .const import (
ATTR_DATA,
ATTR_DIRECTION,
ATTR_ITEM,
ATTR_KEYWORD,
ATTR_PATH,
ATTR_PRIORITY,
ATTR_SKILL,
ATTR_TAG,
ATTR_TARGET,
ATTR_TASK,
ATTR_TYPE,
DOMAIN,
EVENT_API_CALL_SUCCESS,
SERVICE_ABORT_QUEST,
@@ -47,6 +54,7 @@ from .const import (
SERVICE_API_CALL,
SERVICE_CANCEL_QUEST,
SERVICE_CAST_SKILL,
SERVICE_GET_TASKS,
SERVICE_LEAVE_QUEST,
SERVICE_REJECT_QUEST,
SERVICE_SCORE_HABIT,
@@ -96,6 +104,21 @@ SERVICE_TRANSFORMATION_SCHEMA = vol.Schema(
}
)
SERVICE_GET_TASKS_SCHEMA = vol.Schema(
{
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
vol.Optional(ATTR_TYPE): vol.All(
cv.ensure_list, [vol.All(vol.Upper, vol.In({x.name for x in TaskType}))]
),
vol.Optional(ATTR_PRIORITY): vol.All(
cv.ensure_list, [vol.All(vol.Upper, vol.In({x.name for x in TaskPriority}))]
),
vol.Optional(ATTR_TASK): vol.All(cv.ensure_list, [str]),
vol.Optional(ATTR_TAG): vol.All(cv.ensure_list, [str]),
vol.Optional(ATTR_KEYWORD): cv.string,
}
)
SKILL_MAP = {
"pickpocket": Skill.PICKPOCKET,
"backstab": Skill.BACKSTAB,
@@ -403,6 +426,52 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
else:
return asdict(response.data)
async def get_tasks(call: ServiceCall) -> ServiceResponse:
"""Get tasks action."""
entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY])
coordinator = entry.runtime_data
response: list[TaskData] = coordinator.data.tasks
if types := {TaskType[x] for x in call.data.get(ATTR_TYPE, [])}:
response = [task for task in response if task.Type in types]
if priority := {TaskPriority[x] for x in call.data.get(ATTR_PRIORITY, [])}:
response = [task for task in response if task.priority in priority]
if tasks := call.data.get(ATTR_TASK):
response = [
task
for task in response
if str(task.id) in tasks or task.alias in tasks or task.text in tasks
]
if tags := call.data.get(ATTR_TAG):
tag_ids = {
tag.id
for tag in coordinator.data.user.tags
if (tag.name and tag.name.lower())
in (tag.lower() for tag in tags) # Case-insensitive matching
and tag.id
}
response = [
task
for task in response
if any(tag_id in task.tags for tag_id in tag_ids if task.tags)
]
if keyword := call.data.get(ATTR_KEYWORD):
keyword = keyword.lower()
response = [
task
for task in response
if (task.text and keyword in task.text.lower())
or (task.notes and keyword in task.notes.lower())
or any(keyword in item.text.lower() for item in task.checklist)
]
result: dict[str, Any] = {"tasks": response}
return result
hass.services.async_register(
DOMAIN,
SERVICE_API_CALL,
@@ -440,3 +509,10 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
schema=SERVICE_TRANSFORMATION_SCHEMA,
supports_response=SupportsResponse.ONLY,
)
hass.services.async_register(
DOMAIN,
SERVICE_GET_TASKS,
get_tasks,
schema=SERVICE_GET_TASKS_SCHEMA,
supports_response=SupportsResponse.ONLY,
)

View File

@@ -94,3 +94,49 @@ transformation:
required: true
selector:
text:
get_tasks:
fields:
config_entry: *config_entry
filter:
collapsed: true
fields:
type:
required: false
selector:
select:
options:
- "habit"
- "daily"
- "todo"
- "reward"
mode: dropdown
translation_key: "type"
multiple: true
sort: true
priority:
required: false
selector:
select:
options:
- "trivial"
- "easy"
- "medium"
- "hard"
mode: dropdown
translation_key: "priority"
multiple: true
sort: false
task:
required: false
selector:
text:
multiple: true
tag:
required: false
selector:
text:
multiple: true
keyword:
required: false
selector:
text:

View File

@@ -3,6 +3,7 @@
"todos": "To-Do's",
"dailies": "Dailies",
"config_entry_name": "Select character",
"task_name": "Task name",
"unit_tasks": "tasks",
"unit_health_points": "HP",
"unit_mana_points": "MP",
@@ -166,6 +167,11 @@
"name": "Daily reminders"
}
},
"image": {
"avatar": {
"name": "Avatar"
}
},
"sensor": {
"display_name": {
"name": "Display name"
@@ -217,14 +223,6 @@
"rogue": "Rogue"
}
},
"todos": {
"name": "[%key:component::habitica::common::todos%]",
"unit_of_measurement": "[%key:component::habitica::common::unit_tasks%]"
},
"dailys": {
"name": "[%key:component::habitica::common::dailies%]",
"unit_of_measurement": "[%key:component::habitica::common::unit_tasks%]"
},
"habits": {
"name": "Habits",
"unit_of_measurement": "[%key:component::habitica::common::unit_tasks%]"
@@ -403,10 +401,6 @@
}
},
"issues": {
"deprecated_task_entity": {
"title": "The Habitica {task_name} sensor is deprecated",
"description": "The Habitica entity `{entity}` is deprecated and will be removed in a future release.\nPlease update your automations and scripts to replace the sensor entity with the newly added todo entity.\nWhen you are done migrating you can disable `{entity}`."
},
"deprecated_api_call": {
"title": "The Habitica action habitica.api_call is deprecated",
"description": "The Habitica action `habitica.api_call` is deprecated and will be removed in Home Assistant 2025.5.0.\n\nPlease update your automations and scripts to use other Habitica actions and entities."
@@ -433,7 +427,7 @@
},
"cast_skill": {
"name": "Cast a skill",
"description": "Use a skill or spell from your Habitica character on a specific task to affect its progress or status.",
"description": "Uses a skill or spell from your Habitica character on a specific task to affect its progress or status.",
"fields": {
"config_entry": {
"name": "[%key:component::habitica::common::config_entry_name%]",
@@ -444,14 +438,14 @@
"description": "Select the skill or spell you want to cast on the task. Only skills corresponding to your character's class can be used."
},
"task": {
"name": "Task name",
"name": "[%key:component::habitica::common::task_name%]",
"description": "The name (or task ID) of the task you want to target with the skill or spell."
}
}
},
"accept_quest": {
"name": "Accept a quest invitation",
"description": "Accept a pending invitation to a quest.",
"description": "Accepts a pending invitation to a quest.",
"fields": {
"config_entry": {
"name": "[%key:component::habitica::common::config_entry_name%]",
@@ -461,7 +455,7 @@
},
"reject_quest": {
"name": "Reject a quest invitation",
"description": "Reject a pending invitation to a quest.",
"description": "Rejects a pending invitation to a quest.",
"fields": {
"config_entry": {
"name": "[%key:component::habitica::common::config_entry_name%]",
@@ -471,7 +465,7 @@
},
"leave_quest": {
"name": "Leave a quest",
"description": "Leave the current quest you are participating in.",
"description": "Leaves the current quest you are participating in.",
"fields": {
"config_entry": {
"name": "[%key:component::habitica::common::config_entry_name%]",
@@ -481,7 +475,7 @@
},
"abort_quest": {
"name": "Abort an active quest",
"description": "Terminate your party's ongoing quest. All progress will be lost and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.",
"description": "Terminates your party's ongoing quest. All progress will be lost and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.",
"fields": {
"config_entry": {
"name": "[%key:component::habitica::common::config_entry_name%]",
@@ -491,7 +485,7 @@
},
"cancel_quest": {
"name": "Cancel a pending quest",
"description": "Cancel a quest that has not yet startet. All accepted and pending invitations will be canceled and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.",
"description": "Cancels a quest that has not yet startet. All accepted and pending invitations will be canceled and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.",
"fields": {
"config_entry": {
"name": "[%key:component::habitica::common::config_entry_name%]",
@@ -501,7 +495,7 @@
},
"start_quest": {
"name": "Force-start a pending quest",
"description": "Begin the quest immediately, bypassing any pending invitations that haven't been accepted or rejected. Only quest leader or group leader can perform this action.",
"description": "Begins the quest immediately, bypassing any pending invitations that haven't been accepted or rejected. Only quest leader or group leader can perform this action.",
"fields": {
"config_entry": {
"name": "[%key:component::habitica::common::config_entry_name%]",
@@ -511,7 +505,7 @@
},
"score_habit": {
"name": "Track a habit",
"description": "Increase the positive or negative streak of a habit to track its progress.",
"description": "Increases the positive or negative streak of a habit to track its progress.",
"fields": {
"config_entry": {
"name": "[%key:component::habitica::common::config_entry_name%]",
@@ -529,7 +523,7 @@
},
"score_reward": {
"name": "Buy a reward",
"description": "Reward yourself and buy one of your custom rewards with gold earned by fulfilling tasks.",
"description": "Buys one of your custom rewards with gold earned by fulfilling tasks.",
"fields": {
"config_entry": {
"name": "[%key:component::habitica::common::config_entry_name%]",
@@ -543,7 +537,7 @@
},
"transformation": {
"name": "Use a transformation item",
"description": "Use a transformation item from your Habitica character's inventory on a member of your party or yourself.",
"description": "Uses a transformation item from your Habitica character's inventory on a member of your party or yourself.",
"fields": {
"config_entry": {
"name": "Select character",
@@ -558,6 +552,42 @@
"description": "The name of the character you want to use the transformation item on. You can also specify the players username or user ID."
}
}
},
"get_tasks": {
"name": "Get tasks",
"description": "Retrieves tasks from your Habitica character.",
"fields": {
"config_entry": {
"name": "[%key:component::habitica::common::config_entry_name%]",
"description": "Choose the Habitica character to retrieve tasks from."
},
"type": {
"name": "Task type",
"description": "Filter tasks by type."
},
"priority": {
"name": "Difficulty",
"description": "Filter tasks by difficulty."
},
"task": {
"name": "[%key:component::habitica::common::task_name%]",
"description": "Select tasks by matching their name (or task ID)."
},
"tag": {
"name": "Tag",
"description": "Filter tasks that have one or more of the selected tags."
},
"keyword": {
"name": "Keyword",
"description": "Filter tasks by keyword, searching across titles, notes, and checklists."
}
},
"sections": {
"filter": {
"name": "Filter options",
"description": "Use the optional filters to narrow the returned tasks."
}
}
}
},
"selector": {
@@ -576,6 +606,22 @@
"seafoam": "Seafoam",
"shiny_seed": "Shiny seed"
}
},
"type": {
"options": {
"daily": "Daily",
"habit": "Habit",
"todo": "To-do",
"reward": "Reward"
}
},
"priority": {
"options": {
"trivial": "Trivial",
"easy": "Easy",
"medium": "Medium",
"hard": "Hard"
}
}
}
}

View File

@@ -10,6 +10,7 @@ from typing import Any, cast
from aiohasupervisor.exceptions import (
SupervisorBadRequestError,
SupervisorError,
SupervisorNotFoundError,
)
from aiohasupervisor.models import (
@@ -23,8 +24,10 @@ from homeassistant.components.backup import (
AgentBackup,
BackupAgent,
BackupReaderWriter,
BackupReaderWriterError,
CreateBackupEvent,
Folder,
IncorrectPasswordError,
NewBackup,
WrittenBackup,
)
@@ -213,6 +216,10 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
password: str | None,
) -> tuple[NewBackup, asyncio.Task[WrittenBackup]]:
"""Create a backup."""
if not include_homeassistant and include_database:
raise HomeAssistantError(
"Cannot create a backup with database but without Home Assistant"
)
manager = self._hass.data[DATA_MANAGER]
include_addons_set: supervisor_backups.AddonSet | set[str] | None = None
@@ -233,20 +240,23 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
]
locations = [agent.location for agent in hassio_agents]
backup = await self._client.backups.partial_backup(
supervisor_backups.PartialBackupOptions(
addons=include_addons_set,
folders=include_folders_set,
homeassistant=include_homeassistant,
name=backup_name,
password=password,
compressed=True,
location=locations or LOCATION_CLOUD_BACKUP,
homeassistant_exclude_database=not include_database,
background=True,
extra=extra_metadata,
try:
backup = await self._client.backups.partial_backup(
supervisor_backups.PartialBackupOptions(
addons=include_addons_set,
folders=include_folders_set,
homeassistant=include_homeassistant,
name=backup_name,
password=password,
compressed=True,
location=locations or LOCATION_CLOUD_BACKUP,
homeassistant_exclude_database=not include_database,
background=True,
extra=extra_metadata,
)
)
)
except SupervisorError as err:
raise BackupReaderWriterError(f"Error creating backup: {err}") from err
backup_task = self._hass.async_create_task(
self._async_wait_for_backup(
backup, remove_after_upload=not bool(locations)
@@ -278,22 +288,35 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
finally:
unsub()
if not backup_id:
raise HomeAssistantError("Backup failed")
raise BackupReaderWriterError("Backup failed")
async def open_backup() -> AsyncIterator[bytes]:
return await self._client.backups.download_backup(backup_id)
try:
return await self._client.backups.download_backup(backup_id)
except SupervisorError as err:
raise BackupReaderWriterError(
f"Error downloading backup: {err}"
) from err
async def remove_backup() -> None:
if not remove_after_upload:
return
await self._client.backups.remove_backup(
backup_id,
options=supervisor_backups.RemoveBackupOptions(
location={LOCATION_CLOUD_BACKUP}
),
)
try:
await self._client.backups.remove_backup(
backup_id,
options=supervisor_backups.RemoveBackupOptions(
location={LOCATION_CLOUD_BACKUP}
),
)
except SupervisorError as err:
raise BackupReaderWriterError(f"Error removing backup: {err}") from err
details = await self._client.backups.backup_info(backup_id)
try:
details = await self._client.backups.backup_info(backup_id)
except SupervisorError as err:
raise BackupReaderWriterError(
f"Error getting backup details: {err}"
) from err
return WrittenBackup(
backup=_backup_details_to_agent_backup(details),
@@ -359,8 +382,16 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
restore_homeassistant: bool,
) -> None:
"""Restore a backup."""
if restore_homeassistant and not restore_database:
raise HomeAssistantError("Cannot restore Home Assistant without database")
manager = self._hass.data[DATA_MANAGER]
# The backup manager has already checked that the backup exists so we don't need to
# check that here.
backup = await manager.backup_agents[agent_id].async_get_backup(backup_id)
if (
backup
and restore_homeassistant
and restore_database != backup.database_included
):
raise HomeAssistantError("Restore database must match backup")
if not restore_homeassistant and restore_database:
raise HomeAssistantError("Cannot restore database without Home Assistant")
restore_addons_set = set(restore_addons) if restore_addons else None
@@ -370,7 +401,6 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
else None
)
manager = self._hass.data[DATA_MANAGER]
restore_location: str | None
if manager.backup_agents[agent_id].domain != DOMAIN:
# Download the backup to the supervisor. Supervisor will clean up the backup
@@ -385,17 +415,24 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
agent = cast(SupervisorBackupAgent, manager.backup_agents[agent_id])
restore_location = agent.location
job = await self._client.backups.partial_restore(
backup_id,
supervisor_backups.PartialRestoreOptions(
addons=restore_addons_set,
folders=restore_folders_set,
homeassistant=restore_homeassistant,
password=password,
background=True,
location=restore_location,
),
)
try:
job = await self._client.backups.partial_restore(
backup_id,
supervisor_backups.PartialRestoreOptions(
addons=restore_addons_set,
folders=restore_folders_set,
homeassistant=restore_homeassistant,
password=password,
background=True,
location=restore_location,
),
)
except SupervisorBadRequestError as err:
# Supervisor currently does not transmit machine parsable error types
message = err.args[0]
if message.startswith("Invalid password for backup"):
raise IncorrectPasswordError(message) from err
raise HomeAssistantError(message) from err
restore_complete = asyncio.Event()

View File

@@ -362,7 +362,7 @@
},
"addons": {
"name": "Add-ons",
"description": "List of add-ons to include in the backup. Use the name slug of the add-on."
"description": "List of add-ons to include in the backup. Use the name slug of each add-on."
},
"folders": {
"name": "Folders",
@@ -418,11 +418,11 @@
},
"folders": {
"name": "[%key:component::hassio::services::backup_partial::fields::folders::name%]",
"description": "[%key:component::hassio::services::backup_partial::fields::folders::description%]"
"description": "List of directories to restore from the backup."
},
"addons": {
"name": "[%key:component::hassio::services::backup_partial::fields::addons::name%]",
"description": "[%key:component::hassio::services::backup_partial::fields::addons::description%]"
"description": "List of add-ons to restore from the backup. Use the name slug of each add-on."
},
"password": {
"name": "[%key:common::config_flow::data::password%]",

View File

@@ -7,17 +7,32 @@ from dataclasses import dataclass
from datetime import timedelta
import logging
from pyheos import Heos, HeosError, HeosOptions, HeosPlayer, const as heos_const
from pyheos import (
Credentials,
Heos,
HeosError,
HeosOptions,
HeosPlayer,
const as heos_const,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP, Platform
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_USERNAME,
EVENT_HOMEASSISTANT_STOP,
Platform,
)
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
from homeassistant.helpers import device_registry as dr, entity_registry as er
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import Throttle
from . import services
@@ -33,6 +48,8 @@ PLATFORMS = [Platform.MEDIA_PLAYER]
MIN_UPDATE_SOURCES = timedelta(seconds=1)
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
_LOGGER = logging.getLogger(__name__)
@@ -49,6 +66,12 @@ class HeosRuntimeData:
type HeosConfigEntry = ConfigEntry[HeosRuntimeData]
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the HEOS component."""
services.register(hass)
return True
async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool:
"""Initialize config entry which represents the HEOS controller."""
# For backwards compat
@@ -56,12 +79,37 @@ async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool
hass.config_entries.async_update_entry(entry, unique_id=DOMAIN)
host = entry.data[CONF_HOST]
credentials: Credentials | None = None
if entry.options:
credentials = Credentials(
entry.options[CONF_USERNAME], entry.options[CONF_PASSWORD]
)
# Setting all_progress_events=False ensures that we only receive a
# media position update upon start of playback or when media changes
controller = Heos(HeosOptions(host, all_progress_events=False, auto_reconnect=True))
controller = Heos(
HeosOptions(
host,
all_progress_events=False,
auto_reconnect=True,
credentials=credentials,
)
)
# Auth failure handler must be added before connecting to the host, otherwise
# the event will be missed when login fails during connection.
async def auth_failure(event: str) -> None:
"""Handle authentication failure."""
if event == heos_const.EVENT_USER_CREDENTIALS_INVALID:
entry.async_start_reauth(hass)
entry.async_on_unload(
controller.dispatcher.connect(heos_const.SIGNAL_HEOS_EVENT, auth_failure)
)
try:
# Auto reconnect only operates if initial connection was successful.
await controller.connect()
# Auto reconnect only operates if initial connection was successful.
except HeosError as error:
await controller.disconnect()
_LOGGER.debug("Unable to connect to controller %s: %s", host, error)
@@ -83,12 +131,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool
favorites = await controller.get_favorites()
else:
_LOGGER.warning(
(
"%s is not logged in to a HEOS account and will be unable to"
" retrieve HEOS favorites: Use the 'heos.sign_in' service to"
" sign-in to a HEOS account"
),
host,
"The HEOS System is not logged in: Enter credentials in the integration options to access favorites and streaming services"
)
inputs = await controller.get_input_sources()
except HeosError as error:
@@ -108,7 +151,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool
controller_manager, group_manager, source_manager, players
)
services.register(hass, controller)
group_manager.connect_update()
entry.async_on_unload(group_manager.disconnect_update)
@@ -120,9 +162,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool
async def async_unload_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool:
"""Unload a config entry."""
await entry.runtime_data.controller_manager.disconnect()
services.remove(hass)
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
@@ -135,7 +174,6 @@ class ControllerManager:
self._device_registry = None
self._entity_registry = None
self.controller = controller
self._signals = []
async def connect_listeners(self):
"""Subscribe to events of interest."""
@@ -143,23 +181,17 @@ class ControllerManager:
self._entity_registry = er.async_get(self._hass)
# Handle controller events
self._signals.append(
self.controller.dispatcher.connect(
heos_const.SIGNAL_CONTROLLER_EVENT, self._controller_event
)
self.controller.dispatcher.connect(
heos_const.SIGNAL_CONTROLLER_EVENT, self._controller_event
)
# Handle connection-related events
self._signals.append(
self.controller.dispatcher.connect(
heos_const.SIGNAL_HEOS_EVENT, self._heos_event
)
self.controller.dispatcher.connect(
heos_const.SIGNAL_HEOS_EVENT, self._heos_event
)
async def disconnect(self):
"""Disconnect subscriptions."""
for signal_remove in self._signals:
signal_remove()
self._signals.clear()
self.controller.dispatcher.disconnect_all()
await self.controller.disconnect()

View File

@@ -1,17 +1,37 @@
"""Config flow to configure Heos."""
from typing import TYPE_CHECKING, Any
from collections.abc import Mapping
import logging
from typing import TYPE_CHECKING, Any, cast
from urllib.parse import urlparse
from pyheos import Heos, HeosError, HeosOptions
from pyheos import CommandFailedError, Heos, HeosError, HeosOptions
import voluptuous as vol
from homeassistant.components import ssdp
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_HOST
from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
OptionsFlow,
)
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import callback
from homeassistant.helpers import selector
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
AUTH_SCHEMA = vol.Schema(
{
vol.Optional(CONF_USERNAME): selector.TextSelector(),
vol.Optional(CONF_PASSWORD): selector.TextSelector(
selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD)
),
}
)
def format_title(host: str) -> str:
"""Format the title for config entries."""
@@ -31,11 +51,65 @@ async def _validate_host(host: str, errors: dict[str, str]) -> bool:
return True
async def _validate_auth(
user_input: dict[str, str], heos: Heos, errors: dict[str, str]
) -> bool:
"""Validate authentication by signing in or out, otherwise populate errors if needed."""
if not user_input:
# Log out (neither username nor password provided)
try:
await heos.sign_out()
except HeosError:
errors["base"] = "unknown"
_LOGGER.exception("Unexpected error occurred during sign-out")
return False
else:
_LOGGER.debug("Successfully signed-out of HEOS Account")
return True
# Ensure both username and password are provided
authentication = CONF_USERNAME in user_input or CONF_PASSWORD in user_input
if authentication and CONF_USERNAME not in user_input:
errors[CONF_USERNAME] = "username_missing"
return False
if authentication and CONF_PASSWORD not in user_input:
errors[CONF_PASSWORD] = "password_missing"
return False
# Attempt to login (both username and password provided)
try:
await heos.sign_in(user_input[CONF_USERNAME], user_input[CONF_PASSWORD])
except CommandFailedError as err:
if err.error_id in (6, 8, 10): # Auth-specific errors
errors["base"] = "invalid_auth"
_LOGGER.warning("Failed to sign-in to HEOS Account: %s", err)
else:
errors["base"] = "unknown"
_LOGGER.exception("Unexpected error occurred during sign-in")
return False
except HeosError:
errors["base"] = "unknown"
_LOGGER.exception("Unexpected error occurred during sign-in")
return False
else:
_LOGGER.debug(
"Successfully signed-in to HEOS Account: %s",
heos.signed_in_username,
)
return True
class HeosFlowHandler(ConfigFlow, domain=DOMAIN):
"""Define a flow for HEOS."""
VERSION = 1
@staticmethod
@callback
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
"""Create the options flow."""
return HeosOptionsFlowHandler()
async def async_step_ssdp(
self, discovery_info: ssdp.SsdpServiceInfo
) -> ConfigFlowResult:
@@ -100,3 +174,52 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN):
data_schema=vol.Schema({vol.Required(CONF_HOST, default=host): str}),
errors=errors,
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauthentication after auth failure event."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Validate account credentials and update options."""
errors: dict[str, str] = {}
entry = self._get_reauth_entry()
if user_input is not None:
heos = cast(Heos, entry.runtime_data.controller_manager.controller)
if await _validate_auth(user_input, heos, errors):
return self.async_update_reload_and_abort(entry, options=user_input)
return self.async_show_form(
step_id="reauth_confirm",
errors=errors,
data_schema=self.add_suggested_values_to_schema(
AUTH_SCHEMA, user_input or entry.options
),
)
class HeosOptionsFlowHandler(OptionsFlow):
"""Define HEOS options flow."""
async def async_step_init(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Manage the options."""
errors: dict[str, str] = {}
if user_input is not None:
heos = cast(
Heos, self.config_entry.runtime_data.controller_manager.controller
)
if await _validate_auth(user_input, heos, errors):
return self.async_create_entry(data=user_input)
return self.async_show_form(
errors=errors,
step_id="init",
data_schema=self.add_suggested_values_to_schema(
AUTH_SCHEMA, user_input or self.config_entry.options
),
)

View File

@@ -123,7 +123,6 @@ class HeosMediaPlayer(MediaPlayerEntity):
"""Initialize."""
self._media_position_updated_at = None
self._player = player
self._signals: list = []
self._source_manager = source_manager
self._group_manager = group_manager
self._attr_unique_id = str(player.player_id)
@@ -150,13 +149,13 @@ class HeosMediaPlayer(MediaPlayerEntity):
async def async_added_to_hass(self) -> None:
"""Device added to hass."""
# Update state when attributes of the player change
self._signals.append(
self.async_on_remove(
self._player.heos.dispatcher.connect(
heos_const.SIGNAL_PLAYER_EVENT, self._player_update
)
)
# Update state when heos changes
self._signals.append(
self.async_on_remove(
async_dispatcher_connect(self.hass, SIGNAL_HEOS_UPDATED, self._heos_updated)
)
# Register this player's entity_id so it can be resolved by the group manager
@@ -304,12 +303,6 @@ class HeosMediaPlayer(MediaPlayerEntity):
self._player.player_id, self.entity_id
)
async def async_will_remove_from_hass(self) -> None:
"""Disconnect the device when removed."""
for signal_remove in self._signals:
signal_remove()
self._signals.clear()
@property
def available(self) -> bool:
"""Return True if the device is available."""

Some files were not shown because too many files have changed in this diff Show More