Compare commits

...

132 Commits

Author SHA1 Message Date
Franck Nijhof
efe91815fb 2024.4.3 (#115463) 2024-04-12 13:20:48 +02:00
Franck Nijhof
62eee52aed Bump version to 2024.4.3 2024-04-12 12:00:16 +02:00
Bram Kragten
7f6514b03c Update frontend to 20240404.2 (#115460) 2024-04-12 11:59:53 +02:00
Santobert
2ed1cfd68d Bump pybotvac to 0.0.25 (#115435)
Bump pybotvac
2024-04-12 11:59:50 +02:00
Allen Porter
a455e142ac Fix bug in rainbird switch when turning off a switch that is already off (#115421)
Fix big in rainbird switch when turning off a switch that is already off

Co-authored-by: J. Nick Koston <nick@koston.org>
2024-04-12 11:59:45 +02:00
Jessica Smith
5fa06e5a9c Bump whirlpool-sixth-sense to 0.18.8 (#115393)
bump whirlpool to 0.18.8
2024-04-12 11:59:42 +02:00
J. Nick Koston
4aca39b49e Fix deadlock in holidays dynamic loading (#115385) 2024-04-12 11:59:39 +02:00
jan iversen
d055f98736 Solve modbus test problem (#115376)
Fix test.
2024-04-12 11:59:35 +02:00
jan iversen
98bc7c0ed2 Secure against resetting a non active modbus (#115364) 2024-04-12 11:59:32 +02:00
J. Nick Koston
0d62e2e92a Bump bleak-retry-connector 3.5.0 (#115328) 2024-04-12 11:59:28 +02:00
J. Nick Koston
db5343164f Ensure automations do not execute from a trigger if they are disabled (#115305)
* Ensure automations are stopped as soon as the stop future is set

* revert script changes and move them to #115325
2024-04-12 11:59:24 +02:00
TheJulianJES
5c2e9142fa Bump zha-quirks to 0.0.114 (#115299) 2024-04-12 11:59:21 +02:00
Shay Levy
f941e5d5bb Fix Aranet failure when the Bluetooth proxy is not providing a device name (#115298)
Co-authored-by: J. Nick Koston <nick@koston.org>
2024-04-12 11:59:17 +02:00
Joost Lekkerkerker
150145c9b1 Bump yt-dlp to 2024.04.09 (#115295) 2024-04-12 11:59:14 +02:00
jan iversen
db2005d4ec Bump pymodbus v3.6.7 (#115279)
Bump pymodbus v3.6.7.
2024-04-12 11:59:11 +02:00
Stefan Agner
08bd269696 Support backup of add-ons with hyphens (#115274)
Co-authored-by: J. Nick Koston <nick@koston.org>
2024-04-12 11:59:07 +02:00
Klaas Schoute
5723ed28d3 Bump forecast-solar lib to v3.1.0 (#115272) 2024-04-12 11:59:04 +02:00
Allen Porter
14da34cd4d Fix Google Tasks parsing of remove responses (#115258) 2024-04-12 11:59:01 +02:00
J. Nick Koston
f284273ef6 Fix misssing timeout in caldav (#115247) 2024-04-12 11:58:57 +02:00
On Freund
fc60426213 Improve Risco exception logging (#115232) 2024-04-12 11:58:54 +02:00
On Freund
922cc81a62 Configurable maximum concurrency in Risco local (#115226)
* Configurable maximum concurrency in Risco local

* Show advanced Risco options in advanced mode
2024-04-12 11:58:50 +02:00
Mike Degatano
4c6fad8dc3 Add support for adopt data disk repair (#114891) 2024-04-12 11:58:46 +02:00
J. Nick Koston
733e2ec57a Bump aiohttp to 3.9.4 (#110730)
* Bump aiohttp to 3.9.4

This is rc0 for now but will be updated when the full release it out

* cleanup cruft

* regen

* fix tests (these changes are fine)

* chunk size is too small to read since boundry is now enforced

* chunk size is too small to read since boundry is now enforced
2024-04-12 11:55:52 +02:00
Franck Nijhof
04072cb3c1 2024.4.2 (#115186) 2024-04-08 14:17:03 +02:00
Franck Nijhof
05082fcceb Bump version to 2024.4.2 2024-04-08 10:41:50 +02:00
gibwar
8c0b44d6d5 Only reset requested utility meter with no tariff (#115170) 2024-04-08 10:41:41 +02:00
J. Nick Koston
9a342f87c0 Avoid checking for polling if an entity fails to add (#115159)
* Avoid checking for polling if an entity fails to add

* no need to do protected access

* no need to do protected access

* no need to do protected access

* no need to do protected access

* coverage

* fix test

* fix

* broken one must be first
2024-04-08 10:41:38 +02:00
J. Nick Koston
d062ef357b Write timer entity state before firing events (#115151) 2024-04-08 10:41:35 +02:00
rappenze
265d04c593 Fix fibaro sensor additional sensor lookup (#115148) 2024-04-08 10:41:32 +02:00
dontinelli
fc9653581a Bump fyta_cli to 0.3.5 (#115143)
bump fyta_cli to 0.3.5
2024-04-08 10:41:29 +02:00
dontinelli
7bea6eface improve handling of incorrect values in fyta integration (#115134)
* improve handling of incorrect values

* Changes based on review comment

* Apply suggestions from code review

Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>

* update value_fn

* ruff

---------

Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2024-04-08 10:41:26 +02:00
J. Nick Koston
19f3ef763d Terminate scripts with until and while conditions that execute more than 10000 times (#115110) 2024-04-08 10:41:23 +02:00
Maikel Punie
fa88975055 Bump velbus-aio to 2024.4.1 (#115109)
bump velbusaio to 2024.4.1
2024-04-08 10:41:19 +02:00
Joakim Sørensen
1322f38911 Downgrade hass-nabucasa from 0.80.0 to 0.78.0 (#115078) 2024-04-08 10:40:42 +02:00
J. Nick Koston
75127105b9 Fix synology_dsm availablity (#115073)
* Remove reload on update failure from synology_dsm

fixes #115062

The coordinator will retry on its own later, there is no
reason to reload here.  This was added in #42697

* fix available checks
2024-04-08 10:34:59 +02:00
Matthias Alphart
d6793a756f Update xknxproject to 3.7.1 (#115053) 2024-04-08 10:34:55 +02:00
Matrix
20e88255df Bump yolink-api to 0.4.2 (#115026) 2024-04-08 10:34:52 +02:00
Maciej Bieniek
2f9f1008a5 Bump brother to version 4.1.0 (#115021)
Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com>
2024-04-08 10:34:49 +02:00
J. Nick Koston
90bc21b7f6 Fix dictionary changed size during iteration in prometheus (#115005)
Fixes #104803
2024-04-08 10:34:45 +02:00
Øyvind Matheson Wergeland
b29eb317bd Upgrade to pynobo 1.8.1 (#114982)
pynobo 1.8.1
2024-04-08 10:34:42 +02:00
Joost Lekkerkerker
dcd9d987a7 Filter out fuzzy translations from Lokalise (#114968) 2024-04-08 10:34:38 +02:00
Robert Svensson
e26ea40570 Bump axis to v61 (#114964) 2024-04-08 10:34:35 +02:00
Joost Lekkerkerker
0a2d79f63e Fix Snapcast Config flow (#114952) 2024-04-08 10:34:31 +02:00
Nathan Spencer
d1b1d6388f Bump pylitterbot to 2023.4.11 (#114918) 2024-04-08 10:34:28 +02:00
Benjamin
93569e3827 Fix missing if statement in homematic (#114832)
* homematic fix issue #114807

Update climate.py

* Update homeassistant/components/homematic/climate.py

---------

Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2024-04-08 10:34:24 +02:00
Niccolò Maggioni
61a359e4d2 Fix hang in SNMP device_tracker implementation (#112815)
Co-authored-by: J. Nick Koston <nick@koston.org>
2024-04-08 10:34:20 +02:00
Franck Nijhof
b1fb77cb4d 2024.4.1 (#114934) 2024-04-05 16:18:02 +02:00
Joost Lekkerkerker
95606135a6 Fix ROVA validation (#114938)
* Fix ROVA validation

* Fix ROVA validation
2024-04-05 14:53:21 +02:00
Aidan Timson
47d9879c0c Pin systembridgemodels to 4.0.4 (#114842) 2024-04-05 14:53:17 +02:00
Franck Nijhof
e3c111b1dd Bump version to 2024.4.1 2024-04-05 12:34:07 +02:00
Joost Lekkerkerker
9937743863 Fix cast dashboard in media browser (#114924) 2024-04-05 12:33:49 +02:00
Joost Lekkerkerker
ed3daed869 Create right import issues in Downloader (#114922)
* Create right import issues in Downloader

* Create right import issues in Downloader

* Create right import issues in Downloader

* Create right import issues in Downloader

* Fix

* Fix

* Fix

* Fix

* Apply suggestions from code review

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>

* Fix

---------

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2024-04-05 12:33:46 +02:00
Åke Strandberg
5d5dc24b33 Show correct model string in myuplink (#114921) 2024-04-05 12:33:43 +02:00
J. Nick Koston
c39d6f0730 Reduce august polling frequency (#114904)
Co-authored-by: TheJulianJES <TheJulianJES@users.noreply.github.com>
2024-04-05 12:33:40 +02:00
J. Nick Koston
87ffd5ac56 Ensure all tables have the default table args in the db_schema (#114895) 2024-04-05 12:33:36 +02:00
Bram Kragten
71877fdeda Update frontend to 20240404.1 (#114890) 2024-04-05 12:33:33 +02:00
Robert Svensson
2434a22e4e Fix Axis reconfigure step not providing protocols as alternatives but as string (#114889) 2024-04-05 12:33:30 +02:00
Jeef
618fa08ab2 Bump weatherflow4py to 0.2.20 (#114888) 2024-04-05 12:33:27 +02:00
Robert Svensson
96003e3562 Fix Axis camera platform support HTTPS (#114886) 2024-04-05 12:33:24 +02:00
Bram Kragten
411e55d059 Update frontend to 20240404.0 (#114859) 2024-04-05 12:33:21 +02:00
Joost Lekkerkerker
58533f02af Fix Downloader YAML import (#114844) 2024-04-05 12:33:18 +02:00
Joost Lekkerkerker
aa14793479 Avoid blocking IO in downloader initialization (#114841)
* Avoid blocking IO in downloader initialization

* Avoid blocking IO in downloader initialization
2024-04-05 12:33:15 +02:00
J. Nick Koston
0191d3e41b Refactor ConfigStore to avoid needing to pass config_dir (#114827)
Co-authored-by: Erik <erik@montnemery.com>
2024-04-05 12:33:12 +02:00
tronikos
319f76cdc8 Bump opower to 0.4.3 (#114826)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2024-04-05 12:33:09 +02:00
J. Nick Koston
530725bbfa Handle ambiguous script actions by using action map order (#114825) 2024-04-05 12:33:06 +02:00
Lex Li
d8ae7d6955 Fix type cast in snmp (#114795) 2024-04-05 12:33:03 +02:00
cdheiser
3d0bafbdc9 Fix Lutron light brightness values (#114794)
Fix brightness values in light.py

Bugfix to set the brightness to 0-100 which is what Lutron expects.
2024-04-05 12:33:00 +02:00
Aaron Bach
ef8e54877f Fix unhandled KeyError during Notion setup (#114787) 2024-04-05 12:32:57 +02:00
Manuel Dipolt
a39e1a6428 Update romy to 0.0.10 (#114785) 2024-04-05 12:32:53 +02:00
Marc Mueller
450be67406 Update romy to 0.0.9 (#114360) 2024-04-05 10:14:00 +02:00
Åke Strandberg
25289e0ca1 Bump myuplink dependency to 0.6.0 (#114767) 2024-04-05 10:06:39 +02:00
Álvaro Fernández Rojas
d983fa6da7 Update aioairzone-cloud to v0.4.7 (#114761) 2024-04-05 10:06:35 +02:00
Franck Nijhof
b61397656c 2024.4.0 (#114764) 2024-04-03 20:38:11 +02:00
Jan-Philipp Benecke
590546a9a5 Use setup_test_component_platform helper for sensor entity component tests instead of hass.components (#114316)
* Use `setup_test_component_platform` helper for sensor entity component tests instead of `hass.components`

* Missing file

* Fix import

* Remove invalid device class
2024-04-03 20:00:56 +02:00
IngoK1
9ba4d26abd Fix for Sonos URL encoding problem #102557 (#109518)
* Fix for URL encoding problem #102557

Fixes the problem "Cannot play media with spaces in folder names to Sonos #102557" removing the encoding of the strings in the music library.

* Fix type casting problem

* Update media_browser.py to fix pr check findings

Added required casting for all unquote statements to avoid further casting findings in the pr checks

* Update media_browser.py

Checked on linting, lets give it another try

* Update media_browser.py

Updated ruff run

* Update media_browser.py - added version run through ruff

* Update media_browser.py - ruff changes

* Apply ruff formatting

* Update homeassistant/components/sonos/media_browser.py

Co-authored-by: jjlawren <jjlawren@users.noreply.github.com>

* Update homeassistant/components/sonos/media_browser.py

Co-authored-by: jjlawren <jjlawren@users.noreply.github.com>

* Update homeassistant/components/sonos/media_browser.py

Co-authored-by: jjlawren <jjlawren@users.noreply.github.com>

* Update homeassistant/components/sonos/media_browser.py

Co-authored-by: jjlawren <jjlawren@users.noreply.github.com>

---------

Co-authored-by: computeq-admin <51021172+computeq-admin@users.noreply.github.com>
Co-authored-by: Jason Lawrence <jjlawren@users.noreply.github.com>
2024-04-03 19:31:02 +02:00
Franck Nijhof
aa33da546d Bump version to 2024.4.0 2024-04-03 19:09:39 +02:00
Franck Nijhof
3845523a27 Bump version to 2024.4.0b9 2024-04-03 17:55:24 +02:00
Michael
6a7fad0228 Fix Synology DSM setup in case no Surveillance Station permission (#114757) 2024-04-03 17:55:12 +02:00
Bram Kragten
33f07ce035 Update frontend to 20240403.1 (#114756) 2024-04-03 17:55:09 +02:00
Michael Hansen
4302c5c273 Bump intents (#114755) 2024-04-03 17:55:05 +02:00
Robert Resch
b2df1b1c03 Allow passing area/device/entity IDs to floor_id and floor_name (#114748) 2024-04-03 17:55:01 +02:00
Franck Nijhof
0aa134459b Bump version to 2024.4.0b8 2024-04-03 15:35:53 +02:00
Bram Kragten
0ca3700c16 Update frontend to 20240403.0 (#114747) 2024-04-03 15:35:40 +02:00
Joost Lekkerkerker
35ff633d99 Avoid blocking IO in downloader config flow (#114741) 2024-04-03 15:35:36 +02:00
Joost Lekkerkerker
7a2f6ce430 Fix Downloader config flow (#114718) 2024-04-03 15:35:32 +02:00
David F. Mulcahey
7cb603a226 Import zha quirks in the executor (#114685) 2024-04-03 15:35:29 +02:00
Jonas Fors Lellky
43562289e4 Bump flexit_bacnet to 2.2.1 (#114641) 2024-04-03 15:35:26 +02:00
Lenn
79fa7caa41 Rename Motionblinds BLE integration to Motionblinds Bluetooth (#114584) 2024-04-03 15:35:20 +02:00
Franck Nijhof
8bdb27c88b Bump version to 2024.4.0b7 2024-04-03 00:14:07 +02:00
Bram Kragten
f676448f27 Update frontend to 20240402.2 (#114683) 2024-04-03 00:13:57 +02:00
J. Nick Koston
639c4a843b Avoid trying to load platform that are known to not exist in async_prepare_setup_platform (#114659) 2024-04-03 00:13:53 +02:00
G Johansson
02dee34338 Bump holidays to 0.46 (#114657) 2024-04-03 00:13:49 +02:00
Maciej Bieniek
4e0290ce0e Add missing state to the Tractive tracker state sensor (#114654)
Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com>
2024-04-03 00:13:45 +02:00
Robert Svensson
fa2f49693c Bump aiounifi to v74 (#114649) 2024-04-03 00:13:39 +02:00
Pete Sage
2ce784105d Fix Sonos play imported playlists (#113934) 2024-04-03 00:13:35 +02:00
Franck Nijhof
85fb4a27a3 Bump version to 2024.4.0b6 2024-04-02 17:35:01 +02:00
Bram Kragten
8cbedbe26b Update frontend to 20240402.1 (#114646) 2024-04-02 17:34:29 +02:00
Steven B
5bd52da13a Bump ring_doorbell integration to 0.8.9 (#114631) 2024-04-02 17:33:24 +02:00
dotvav
d53848aae4 Fix Overkiz Hitachi OVP air-to-air heat pump (#114611) 2024-04-02 17:23:51 +02:00
puddly
4e0d6f287e Reduce ZHA OTA logbook entries and extraneous updates (#114591) 2024-04-02 17:23:45 +02:00
Franck Nijhof
5af5f3694e Bump version to 2024.4.0b5 2024-04-02 12:28:20 +02:00
Bram Kragten
b539b25682 Update frontend to 20240402.0 (#114627) 2024-04-02 12:28:07 +02:00
Fexiven
ca31479d29 Fix Starlink integration startup issue (#114615) 2024-04-02 12:28:04 +02:00
Franck Nijhof
92dfec3c98 Add floor selector (#114614) 2024-04-02 12:28:00 +02:00
max2697
230c29edbe Bump opower to 0.4.2 (#114608) 2024-04-02 12:27:57 +02:00
Jack Boswell
559fe65471 Catch potential ValueError when getting or setting Starlink sleep values (#114607) 2024-04-02 12:27:54 +02:00
mkmer
384d10a51d Add diagnostic platform to Whirlpool (#114578)
* Add diagnostic platform and tests

* lowercase variable

* Correc doc string
2024-04-02 12:27:50 +02:00
Brett Adams
e5a620545c Fix battery heater in Tessie (#114568) 2024-04-02 12:27:47 +02:00
Maciej Bieniek
7b84e86f89 Improve Shelly RPC device update progress (#114566)
Co-authored-by: Shay Levy <levyshay1@gmail.com>
Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com>
2024-04-02 12:27:44 +02:00
Joost Lekkerkerker
18b6de567d Bump roombapy to 1.8.1 (#114478)
* Bump roombapy to 1.7.0

* Bump

* Bump

* Fix
2024-04-02 12:27:40 +02:00
Pete Sage
a6076a0d33 Display sonos album title with URL encoding (#113693)
* unescape the title

When extracting the title from the item_id, it needs to be unescaped.

* sort imports
2024-04-02 12:27:36 +02:00
Paulus Schoutsen
7164993562 Bump version to 2024.4.0b4 2024-04-02 01:51:42 +00:00
mkmer
bc21836e7e Bump whirlpool-sixth-sense to 0.18.7 (#114606)
Bump sixth-sense to 0.18.7
2024-04-02 01:51:35 +00:00
J. Nick Koston
52612b10fd Avoid storing raw extracted traceback in system_log (#114603)
This is never actually used and takes up quite a bit of ram
2024-04-02 01:51:35 +00:00
J. Nick Koston
623d85ecaa Fix memory leak when importing a platform fails (#114602)
* Fix memory leak when importing a platform fails

re-raising ImportError would trigger a memory leak

* fixes, coverage

* Apply suggestions from code review
2024-04-02 01:51:33 +00:00
J. Nick Koston
43631d5944 Add missing platforms_exist guard to check_config (#114600)
* Add missing platforms_exist guard to check_config

related issue #112811

When the exception hits, the config will end up being saved in the traceback
so the memory is never released.

This matches the check_config code to homeassistant.config to avoid having
the exception thrown.

* patch

* merge branch
2024-04-02 01:51:33 +00:00
J. Nick Koston
112aab47fb Bump zeroconf to 0.132.0 (#114596)
changelog: https://github.com/python-zeroconf/python-zeroconf/compare/0.131.0...0.132.0
2024-04-02 01:51:32 +00:00
Martin Hjelmare
ea13f102e0 Fix reolink media source data access (#114593)
* Add test

* Fix reolink media source data access
2024-04-02 01:51:31 +00:00
jjlawren
bb33725e7f Bump plexapi to 4.15.11 (#114581) 2024-04-02 01:51:31 +00:00
Michael
bd6890ab83 Filter out ignored entries in ssdp step of AVM Fritz!SmartHome (#114574)
filter out ignored entries in ssdp step
2024-04-02 01:51:30 +00:00
Michael
25c611ffc4 Reduce usage of executer threads in AVM Fritz!Tools (#114570)
* call entity state update calls in one executer task

* remove not needed wrapping

* mark as "non-public" method

* add guard against changes on _entity_update_functions
2024-04-02 01:51:29 +00:00
Maikel Punie
fc24b61859 Bump velbusaio to 2024.4.0 (#114569)
Bump valbusaio to 2024.4.0
2024-04-02 01:51:28 +00:00
Joost Lekkerkerker
71588b5c22 Fix wrong icons (#114567)
* Fix wrong icons

* Fix wrong icons
2024-04-02 01:51:27 +00:00
Robert Svensson
14dfb6a255 Bump axis to v60 (#114544)
* Improve Axis MQTT support

* Bump axis to v60
2024-04-02 01:51:27 +00:00
G Johansson
ef97255d9c Fix server update from breaking setup in Speedtest.NET (#114524) 2024-04-02 01:51:26 +00:00
J. Nick Koston
e8afdd67d0 Fix workday doing blocking I/O in the event loop (#114492) 2024-04-02 01:51:25 +00:00
J. Nick Koston
008e4413b5 Fix late load of anyio doing blocking I/O in the event loop (#114491)
* Fix late load of anyio doing blocking I/O in the event loop

httpx loads anyio which loads the asyncio backend in the event loop
as soon as httpx makes the first request

* tweak
2024-04-02 01:51:24 +00:00
dotvav
c373d40e34 Fix Overkiz Hitachi OVP air-to-air heat pump (#114487)
Unpack command parameters instead of passing a list
2024-04-02 01:51:24 +00:00
J. Nick Koston
bdf51553ef Improve sonos test synchronization (#114468) 2024-04-02 01:51:23 +00:00
Michael Hansen
f2edc15687 Add initial support for floors to intents (#114456)
* Add initial support for floors to intents

* Fix climate intent

* More tests

* No return value

* Add requested changes

* Reuse event handler
2024-04-02 01:51:22 +00:00
J. Nick Koston
286a09d737 Mark executor jobs as background unless created from a tracked task (#114450)
* Mark executor jobs as background unless created from a tracked task

If the current task is not tracked the executor job should not
be a background task to avoid delaying startup and shutdown.

Currently any executor job created in a untracked task or
background task would end up being tracked and delaying
startup/shutdown

* import exec has the same issue

* Avoid tracking import executor jobs

There is no reason to track these jobs as they are always awaited
and we do not want to support fire and forget import executor jobs

* fix xiaomi_miio

* lots of fire time changed without background await

* revert changes moved to other PR

* more

* more

* more

* m

* m

* p

* fix fire and forget tests

* scrape

* sonos

* system

* more

* capture callback before block

* coverage

* more

* more races

* more races

* more

* missed some

* more fixes

* missed some more

* fix

* remove unneeded

* one more race

* two
2024-04-02 01:51:21 +00:00
Shay Levy
e8ee2fd25c Cleanup Shelly RGBW light entities (#114410) 2024-04-02 01:51:21 +00:00
244 changed files with 3321 additions and 1061 deletions

View File

@@ -1249,6 +1249,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/sms/ @ocalvo
/homeassistant/components/snapcast/ @luar123
/tests/components/snapcast/ @luar123
/homeassistant/components/snmp/ @nmaggioni
/tests/components/snmp/ @nmaggioni
/homeassistant/components/snooz/ @AustinBrunkhorst
/tests/components/snooz/ @AustinBrunkhorst
/homeassistant/components/solaredge/ @frenck

View File

@@ -93,6 +93,11 @@ from .util.async_ import create_eager_task
from .util.logging import async_activate_log_queue_handler
from .util.package import async_get_user_site, is_virtual_env
with contextlib.suppress(ImportError):
# Ensure anyio backend is imported to avoid it being imported in the event loop
from anyio._backends import _asyncio # noqa: F401
if TYPE_CHECKING:
from .runner import RuntimeConfig

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
"iot_class": "cloud_push",
"loggers": ["aioairzone_cloud"],
"requirements": ["aioairzone-cloud==0.4.6"]
"requirements": ["aioairzone-cloud==0.4.7"]
}

View File

@@ -2,10 +2,10 @@
from __future__ import annotations
import logging
from typing import Any
from aranet4.client import Aranet4Advertisement, Version as AranetVersion
from bluetooth_data_tools import human_readable_name
import voluptuous as vol
from homeassistant.components.bluetooth import (
@@ -18,11 +18,15 @@ from homeassistant.data_entry_flow import AbortFlow
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
MIN_VERSION = AranetVersion(1, 2, 0)
def _title(discovery_info: BluetoothServiceInfoBleak) -> str:
return discovery_info.device.name or human_readable_name(
None, "Aranet", discovery_info.address
)
class AranetConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Aranet."""
@@ -61,11 +65,8 @@ class AranetConfigFlow(ConfigFlow, domain=DOMAIN):
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm discovery."""
assert self._discovered_device is not None
adv = self._discovered_device
assert self._discovery_info is not None
discovery_info = self._discovery_info
title = adv.readings.name if adv.readings else discovery_info.name
title = _title(self._discovery_info)
if user_input is not None:
return self.async_create_entry(title=title, data={})
@@ -101,10 +102,7 @@ class AranetConfigFlow(ConfigFlow, domain=DOMAIN):
discovery_info.device, discovery_info.advertisement
)
if adv.manufacturer_data:
self._discovered_devices[address] = (
adv.readings.name if adv.readings else discovery_info.name,
adv,
)
self._discovered_devices[address] = (_title(discovery_info), adv)
if not self._discovered_devices:
return self.async_abort(reason="no_devices_found")

View File

@@ -19,5 +19,5 @@
"documentation": "https://www.home-assistant.io/integrations/aranet",
"integration_type": "device",
"iot_class": "local_push",
"requirements": ["aranet4==2.2.2"]
"requirements": ["aranet4==2.3.3"]
}

View File

@@ -5,6 +5,7 @@ from __future__ import annotations
from datetime import datetime
from functools import partial
import logging
from time import monotonic
from aiohttp import ClientError
from yalexs.activity import Activity, ActivityType
@@ -26,9 +27,11 @@ _LOGGER = logging.getLogger(__name__)
ACTIVITY_STREAM_FETCH_LIMIT = 10
ACTIVITY_CATCH_UP_FETCH_LIMIT = 2500
INITIAL_LOCK_RESYNC_TIME = 60
# If there is a storm of activity (ie lock, unlock, door open, door close, etc)
# we want to debounce the updates so we don't hammer the activity api too much.
ACTIVITY_DEBOUNCE_COOLDOWN = 3
ACTIVITY_DEBOUNCE_COOLDOWN = 4
@callback
@@ -62,6 +65,7 @@ class ActivityStream(AugustSubscriberMixin):
self.pubnub = pubnub
self._update_debounce: dict[str, Debouncer] = {}
self._update_debounce_jobs: dict[str, HassJob] = {}
self._start_time: float | None = None
@callback
def _async_update_house_id_later(self, debouncer: Debouncer, _: datetime) -> None:
@@ -70,6 +74,7 @@ class ActivityStream(AugustSubscriberMixin):
async def async_setup(self) -> None:
"""Token refresh check and catch up the activity stream."""
self._start_time = monotonic()
update_debounce = self._update_debounce
update_debounce_jobs = self._update_debounce_jobs
for house_id in self._house_ids:
@@ -140,11 +145,25 @@ class ActivityStream(AugustSubscriberMixin):
debouncer = self._update_debounce[house_id]
debouncer.async_schedule_call()
# Schedule two updates past the debounce time
# to ensure we catch the case where the activity
# api does not update right away and we need to poll
# it again. Sometimes the lock operator or a doorbell
# will not show up in the activity stream right away.
# Only do additional polls if we are past
# the initial lock resync time to avoid a storm
# of activity at setup.
if (
not self._start_time
or monotonic() - self._start_time < INITIAL_LOCK_RESYNC_TIME
):
_LOGGER.debug(
"Skipping additional updates due to ongoing initial lock resync time"
)
return
_LOGGER.debug("Scheduling additional updates for house id %s", house_id)
job = self._update_debounce_jobs[house_id]
for step in (1, 2):
future_updates.append(

View File

@@ -40,7 +40,7 @@ ATTR_OPERATION_TAG = "tag"
# Limit battery, online, and hardware updates to hourly
# in order to reduce the number of api requests and
# avoid hitting rate limits
MIN_TIME_BETWEEN_DETAIL_UPDATES = timedelta(hours=1)
MIN_TIME_BETWEEN_DETAIL_UPDATES = timedelta(hours=24)
# Activity needs to be checked more frequently as the
# doorbell motion and rings are included here

View File

@@ -49,9 +49,17 @@ class AugustSubscriberMixin:
"""Call the refresh method."""
self._hass.async_create_task(self._async_refresh(now), eager_start=True)
@callback
def _async_cancel_update_interval(self, _: Event | None = None) -> None:
"""Cancel the scheduled update."""
if self._unsub_interval:
self._unsub_interval()
self._unsub_interval = None
@callback
def _async_setup_listeners(self) -> None:
"""Create interval and stop listeners."""
self._async_cancel_update_interval()
self._unsub_interval = async_track_time_interval(
self._hass,
self._async_scheduled_refresh,
@@ -59,17 +67,12 @@ class AugustSubscriberMixin:
name="august refresh",
)
@callback
def _async_cancel_update_interval(_: Event) -> None:
self._stop_interval = None
if self._unsub_interval:
self._unsub_interval()
self._stop_interval = self._hass.bus.async_listen(
EVENT_HOMEASSISTANT_STOP,
_async_cancel_update_interval,
run_immediately=True,
)
if not self._stop_interval:
self._stop_interval = self._hass.bus.async_listen(
EVENT_HOMEASSISTANT_STOP,
self._async_cancel_update_interval,
run_immediately=True,
)
@callback
def async_unsubscribe_device_id(
@@ -82,13 +85,7 @@ class AugustSubscriberMixin:
if self._subscriptions:
return
if self._unsub_interval:
self._unsub_interval()
self._unsub_interval = None
if self._stop_interval:
self._stop_interval()
self._stop_interval = None
self._async_cancel_update_interval()
@callback
def async_signal_device_id_update(self, device_id: str) -> None:

View File

@@ -812,6 +812,22 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
"""Log helper callback."""
self._logger.log(level, "%s %s", msg, self.name, **kwargs)
async def _async_trigger_if_enabled(
self,
run_variables: dict[str, Any],
context: Context | None = None,
skip_condition: bool = False,
) -> ScriptRunResult | None:
"""Trigger automation if enabled.
If the trigger starts but has a delay, the automation will be triggered
when the delay has passed so we need to make sure its still enabled before
executing the action.
"""
if not self._is_enabled:
return None
return await self.async_trigger(run_variables, context, skip_condition)
async def _async_attach_triggers(
self, home_assistant_start: bool
) -> Callable[[], None] | None:
@@ -835,7 +851,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
return await async_initialize_triggers(
self.hass,
self._trigger_config,
self.async_trigger,
self._async_trigger_if_enabled,
DOMAIN,
str(self.name),
self._log_callback,

View File

@@ -56,6 +56,7 @@ class AxisCamera(AxisEntity, MjpegCamera):
mjpeg_url=self.mjpeg_source,
still_image_url=self.image_source,
authentication=HTTP_DIGEST_AUTHENTICATION,
verify_ssl=False,
unique_id=f"{hub.unique_id}-camera",
)
@@ -74,16 +75,18 @@ class AxisCamera(AxisEntity, MjpegCamera):
Additionally used when device change IP address.
"""
proto = self.hub.config.protocol
host = self.hub.config.host
port = self.hub.config.port
image_options = self.generate_options(skip_stream_profile=True)
self._still_image_url = (
f"http://{self.hub.config.host}:{self.hub.config.port}/axis-cgi"
f"/jpg/image.cgi{image_options}"
f"{proto}://{host}:{port}/axis-cgi/jpg/image.cgi{image_options}"
)
mjpeg_options = self.generate_options()
self._mjpeg_url = (
f"http://{self.hub.config.host}:{self.hub.config.port}/axis-cgi"
f"/mjpg/video.cgi{mjpeg_options}"
f"{proto}://{host}:{port}/axis-cgi/mjpg/video.cgi{mjpeg_options}"
)
stream_options = self.generate_options(add_video_codec_h264=True)
@@ -95,10 +98,7 @@ class AxisCamera(AxisEntity, MjpegCamera):
self.hub.additional_diagnostics["camera_sources"] = {
"Image": self._still_image_url,
"MJPEG": self._mjpeg_url,
"Stream": (
f"rtsp://user:pass@{self.hub.config.host}/axis-media"
f"/media.amp{stream_options}"
),
"Stream": (f"rtsp://user:pass@{host}/axis-media/media.amp{stream_options}"),
}
@property

View File

@@ -168,16 +168,13 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
self, entry_data: Mapping[str, Any], keep_password: bool
) -> ConfigFlowResult:
"""Re-run configuration step."""
protocol = entry_data.get(CONF_PROTOCOL, "http")
password = entry_data[CONF_PASSWORD] if keep_password else ""
self.discovery_schema = {
vol.Required(
CONF_PROTOCOL, default=entry_data.get(CONF_PROTOCOL, "http")
): str,
vol.Required(CONF_PROTOCOL, default=protocol): vol.In(PROTOCOL_CHOICES),
vol.Required(CONF_HOST, default=entry_data[CONF_HOST]): str,
vol.Required(CONF_USERNAME, default=entry_data[CONF_USERNAME]): str,
vol.Required(
CONF_PASSWORD,
default=entry_data[CONF_PASSWORD] if keep_password else "",
): str,
vol.Required(CONF_PASSWORD, default=password): str,
vol.Required(CONF_PORT, default=entry_data[CONF_PORT]): int,
}

View File

@@ -12,6 +12,7 @@ from homeassistant.const import (
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_PROTOCOL,
CONF_TRIGGER_TIME,
CONF_USERNAME,
)
@@ -31,6 +32,7 @@ class AxisConfig:
entry: ConfigEntry
protocol: str
host: str
port: int
username: str
@@ -54,6 +56,7 @@ class AxisConfig:
options = config_entry.options
return cls(
entry=config_entry,
protocol=config.get(CONF_PROTOCOL, "http"),
host=config[CONF_HOST],
username=config[CONF_USERNAME],
password=config[CONF_PASSWORD],

View File

@@ -116,7 +116,7 @@ class AxisHub:
if status.status.state == ClientState.ACTIVE:
self.config.entry.async_on_unload(
await mqtt.async_subscribe(
hass, f"{self.api.vapix.serial_number}/#", self.mqtt_message
hass, f"{status.config.device_topic_prefix}/#", self.mqtt_message
)
)
@@ -124,7 +124,8 @@ class AxisHub:
def mqtt_message(self, message: ReceiveMessage) -> None:
"""Receive Axis MQTT message."""
self.disconnect_from_stream()
if message.topic.endswith("event/connection"):
return
event = mqtt_json_to_event(message.payload)
self.api.event.handler(event)

View File

@@ -26,7 +26,7 @@
"iot_class": "local_push",
"loggers": ["axis"],
"quality_scale": "platinum",
"requirements": ["axis==59"],
"requirements": ["axis==61"],
"ssdp": [
{
"manufacturer": "AXIS"

View File

@@ -15,7 +15,7 @@
"quality_scale": "internal",
"requirements": [
"bleak==0.21.1",
"bleak-retry-connector==3.4.0",
"bleak-retry-connector==3.5.0",
"bluetooth-adapters==0.18.0",
"bluetooth-auto-recovery==1.4.0",
"bluetooth-data-tools==1.19.0",

View File

@@ -8,7 +8,7 @@
"iot_class": "local_polling",
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
"quality_scale": "platinum",
"requirements": ["brother==4.0.2"],
"requirements": ["brother==4.1.0"],
"zeroconf": [
{
"type": "_printer._tcp.local.",

View File

@@ -34,6 +34,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
username=entry.data[CONF_USERNAME],
password=entry.data[CONF_PASSWORD],
ssl_verify_cert=entry.data[CONF_VERIFY_SSL],
timeout=10,
)
try:
await hass.async_add_executor_job(client.principal)

View File

@@ -58,6 +58,7 @@ class GetTemperatureIntent(intent.IntentHandler):
raise intent.NoStatesMatchedError(
name=entity_text or entity_name,
area=area_name or area_id,
floor=None,
domains={DOMAIN},
device_classes=None,
)
@@ -75,6 +76,7 @@ class GetTemperatureIntent(intent.IntentHandler):
raise intent.NoStatesMatchedError(
name=entity_name,
area=None,
floor=None,
domains={DOMAIN},
device_classes=None,
)

View File

@@ -8,5 +8,5 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["hass_nabucasa"],
"requirements": ["hass-nabucasa==0.79.0"]
"requirements": ["hass-nabucasa==0.78.0"]
}

View File

@@ -34,6 +34,7 @@ from homeassistant.helpers import (
area_registry as ar,
device_registry as dr,
entity_registry as er,
floor_registry as fr,
intent,
start,
template,
@@ -163,7 +164,12 @@ class DefaultAgent(AbstractConversationAgent):
self.hass.bus.async_listen(
ar.EVENT_AREA_REGISTRY_UPDATED,
self._async_handle_area_registry_changed,
self._async_handle_area_floor_registry_changed,
run_immediately=True,
)
self.hass.bus.async_listen(
fr.EVENT_FLOOR_REGISTRY_UPDATED,
self._async_handle_area_floor_registry_changed,
run_immediately=True,
)
self.hass.bus.async_listen(
@@ -696,10 +702,13 @@ class DefaultAgent(AbstractConversationAgent):
return lang_intents
@core.callback
def _async_handle_area_registry_changed(
self, event: core.Event[ar.EventAreaRegistryUpdatedData]
def _async_handle_area_floor_registry_changed(
self,
event: core.Event[
ar.EventAreaRegistryUpdatedData | fr.EventFloorRegistryUpdatedData
],
) -> None:
"""Clear area area cache when the area registry has changed."""
"""Clear area/floor list cache when the area registry has changed."""
self._slot_lists = None
@core.callback
@@ -773,6 +782,8 @@ class DefaultAgent(AbstractConversationAgent):
# Default name
entity_names.append((state.name, state.name, context))
_LOGGER.debug("Exposed entities: %s", entity_names)
# Expose all areas.
#
# We pass in area id here with the expectation that no two areas will
@@ -788,11 +799,25 @@ class DefaultAgent(AbstractConversationAgent):
area_names.append((alias, area.id))
_LOGGER.debug("Exposed entities: %s", entity_names)
# Expose all floors.
#
# We pass in floor id here with the expectation that no two floors will
# share the same name or alias.
floors = fr.async_get(self.hass)
floor_names = []
for floor in floors.async_list_floors():
floor_names.append((floor.name, floor.floor_id))
if floor.aliases:
for alias in floor.aliases:
if not alias.strip():
continue
floor_names.append((alias, floor.floor_id))
self._slot_lists = {
"area": TextSlotList.from_tuples(area_names, allow_template=False),
"name": TextSlotList.from_tuples(entity_names, allow_template=False),
"floor": TextSlotList.from_tuples(floor_names, allow_template=False),
}
return self._slot_lists
@@ -953,6 +978,10 @@ def _get_unmatched_response(result: RecognizeResult) -> tuple[ErrorKey, dict[str
# area only
return ErrorKey.NO_AREA, {"area": unmatched_area}
if unmatched_floor := unmatched_text.get("floor"):
# floor only
return ErrorKey.NO_FLOOR, {"floor": unmatched_floor}
# Area may still have matched
matched_area: str | None = None
if matched_area_entity := result.entities.get("area"):
@@ -1000,6 +1029,13 @@ def _get_no_states_matched_response(
"area": no_states_error.area,
}
if no_states_error.floor:
# domain in floor
return ErrorKey.NO_DOMAIN_IN_FLOOR, {
"domain": domain,
"floor": no_states_error.floor,
}
# domain only
return ErrorKey.NO_DOMAIN, {"domain": domain}

View File

@@ -7,5 +7,5 @@
"integration_type": "system",
"iot_class": "local_push",
"quality_scale": "internal",
"requirements": ["hassil==1.6.1", "home-assistant-intents==2024.3.27"]
"requirements": ["hassil==1.6.1", "home-assistant-intents==2024.4.3"]
}

View File

@@ -11,7 +11,11 @@ import requests
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import (
DOMAIN as HOMEASSISTANT_DOMAIN,
HomeAssistant,
ServiceCall,
)
from homeassistant.data_entry_flow import FlowResultType
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
@@ -43,6 +47,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
if DOMAIN not in config:
return True
hass.async_create_task(_async_import_config(hass, config))
return True
async def _async_import_config(hass: HomeAssistant, config: ConfigType) -> None:
"""Import the Downloader component from the YAML file."""
import_result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
@@ -51,28 +62,40 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
},
)
translation_key = "deprecated_yaml"
if (
import_result["type"] == FlowResultType.ABORT
and import_result["reason"] == "import_failed"
and import_result["reason"] != "single_instance_allowed"
):
translation_key = "import_failed"
async_create_issue(
hass,
DOMAIN,
f"deprecated_yaml_{DOMAIN}",
breaks_in_ha_version="2024.9.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=IssueSeverity.WARNING,
translation_key=translation_key,
translation_placeholders={
"domain": DOMAIN,
"integration_title": "Downloader",
},
)
return True
async_create_issue(
hass,
DOMAIN,
f"deprecated_yaml_{DOMAIN}",
breaks_in_ha_version="2024.10.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=IssueSeverity.WARNING,
translation_key="directory_does_not_exist",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "Downloader",
"url": "/config/integrations/dashboard/add?domain=downloader",
},
)
else:
async_create_issue(
hass,
HOMEASSISTANT_DOMAIN,
f"deprecated_yaml_{DOMAIN}",
breaks_in_ha_version="2024.10.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=IssueSeverity.WARNING,
translation_key="deprecated_yaml",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "Downloader",
},
)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
@@ -83,7 +106,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
if not os.path.isabs(download_path):
download_path = hass.config.path(download_path)
if not os.path.isdir(download_path):
if not await hass.async_add_executor_job(os.path.isdir, download_path):
_LOGGER.error(
"Download path %s does not exist. File Downloader not active", download_path
)

View File

@@ -46,19 +46,24 @@ class DownloaderConfigFlow(ConfigFlow, domain=DOMAIN):
errors=errors,
)
async def async_step_import(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult:
"""Handle a flow initiated by configuration file."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
return await self.async_step_user(user_input)
try:
await self._validate_input(user_input)
except DirectoryDoesNotExist:
return self.async_abort(reason="directory_does_not_exist")
return self.async_create_entry(title=DEFAULT_NAME, data=user_input)
async def _validate_input(self, user_input: dict[str, Any]) -> None:
"""Validate the user input if the directory exists."""
if not os.path.isabs(user_input[CONF_DOWNLOAD_DIR]):
download_path = self.hass.config.path(user_input[CONF_DOWNLOAD_DIR])
download_path = user_input[CONF_DOWNLOAD_DIR]
if not os.path.isabs(download_path):
download_path = self.hass.config.path(download_path)
if not os.path.isdir(download_path):
if not await self.hass.async_add_executor_job(os.path.isdir, download_path):
_LOGGER.error(
"Download path %s does not exist. File Downloader not active",
download_path,

View File

@@ -37,13 +37,9 @@
}
},
"issues": {
"deprecated_yaml": {
"title": "The {integration_title} YAML configuration is being removed",
"description": "Configuring {integration_title} using YAML is being removed.\n\nYour configuration is already imported.\n\nRemove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue."
},
"import_failed": {
"directory_does_not_exist": {
"title": "The {integration_title} failed to import",
"description": "The {integration_title} integration failed to import.\n\nPlease check the logs for more details."
"description": "The {integration_title} integration failed to import because the configured directory does not exist.\n\nEnsure the directory exists and restart Home Assistant to try again or remove the {integration_title} configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually."
}
}
}

View File

@@ -1,7 +1,7 @@
{
"services": {
"restart": "mdi:restart",
"start": "mdi:start",
"start": "mdi:play",
"stop": "mdi:stop"
}
}

View File

@@ -121,6 +121,7 @@ async def async_setup_entry(
Platform.COVER,
Platform.LIGHT,
Platform.LOCK,
Platform.SENSOR,
Platform.SWITCH,
)
for device in controller.fibaro_devices[platform]

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/flexit_bacnet",
"integration_type": "device",
"iot_class": "local_polling",
"requirements": ["flexit_bacnet==2.1.0"]
"requirements": ["flexit_bacnet==2.2.1"]
}

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"quality_scale": "platinum",
"requirements": ["forecast-solar==3.0.0"]
"requirements": ["forecast-solar==3.1.0"]
}

View File

@@ -311,6 +311,17 @@ class FritzBoxTools(
)
return unregister_entity_updates
def _entity_states_update(self) -> dict:
"""Run registered entity update calls."""
entity_states = {}
for key in list(self._entity_update_functions):
if (update_fn := self._entity_update_functions.get(key)) is not None:
_LOGGER.debug("update entity %s", key)
entity_states[key] = update_fn(
self.fritz_status, self.data["entity_states"].get(key)
)
return entity_states
async def _async_update_data(self) -> UpdateCoordinatorDataType:
"""Update FritzboxTools data."""
entity_data: UpdateCoordinatorDataType = {
@@ -319,15 +330,9 @@ class FritzBoxTools(
}
try:
await self.async_scan_devices()
for key in list(self._entity_update_functions):
_LOGGER.debug("update entity %s", key)
entity_data["entity_states"][
key
] = await self.hass.async_add_executor_job(
self._entity_update_functions[key],
self.fritz_status,
self.data["entity_states"].get(key),
)
entity_data["entity_states"] = await self.hass.async_add_executor_job(
self._entity_states_update
)
if self.has_call_deflections:
entity_data[
"call_deflections"

View File

@@ -141,7 +141,7 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_abort(reason="already_in_progress")
# update old and user-configured config entries
for entry in self._async_current_entries():
for entry in self._async_current_entries(include_ignore=False):
if entry.data[CONF_HOST] == host:
if uuid and not entry.unique_id:
self.hass.config_entries.async_update_entry(entry, unique_id=uuid)

View File

@@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20240329.1"]
"requirements": ["home-assistant-frontend==20240404.2"]
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/fyta",
"integration_type": "hub",
"iot_class": "cloud_polling",
"requirements": ["fyta_cli==0.3.3"]
"requirements": ["fyta_cli==0.3.5"]
}

View File

@@ -46,35 +46,35 @@ SENSORS: Final[list[FytaSensorEntityDescription]] = [
translation_key="plant_status",
device_class=SensorDeviceClass.ENUM,
options=PLANT_STATUS_LIST,
value_fn=lambda value: PLANT_STATUS[value],
value_fn=PLANT_STATUS.get,
),
FytaSensorEntityDescription(
key="temperature_status",
translation_key="temperature_status",
device_class=SensorDeviceClass.ENUM,
options=PLANT_STATUS_LIST,
value_fn=lambda value: PLANT_STATUS[value],
value_fn=PLANT_STATUS.get,
),
FytaSensorEntityDescription(
key="light_status",
translation_key="light_status",
device_class=SensorDeviceClass.ENUM,
options=PLANT_STATUS_LIST,
value_fn=lambda value: PLANT_STATUS[value],
value_fn=PLANT_STATUS.get,
),
FytaSensorEntityDescription(
key="moisture_status",
translation_key="moisture_status",
device_class=SensorDeviceClass.ENUM,
options=PLANT_STATUS_LIST,
value_fn=lambda value: PLANT_STATUS[value],
value_fn=PLANT_STATUS.get,
),
FytaSensorEntityDescription(
key="salinity_status",
translation_key="salinity_status",
device_class=SensorDeviceClass.ENUM,
options=PLANT_STATUS_LIST,
value_fn=lambda value: PLANT_STATUS[value],
value_fn=PLANT_STATUS.get,
),
FytaSensorEntityDescription(
key="temperature",

View File

@@ -112,8 +112,9 @@ class AsyncConfigEntryAuth:
raise GoogleTasksApiError(
f"Google Tasks API responded with error ({exception.status_code})"
) from exception
data = json.loads(response)
_raise_if_error(data)
if response:
data = json.loads(response)
_raise_if_error(data)
for task_id in task_ids:
batch.add(

View File

@@ -196,7 +196,7 @@ SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
{
vol.Optional(ATTR_HOMEASSISTANT): cv.boolean,
vol.Optional(ATTR_FOLDERS): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [cv.slug]),
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [VALID_ADDON_SLUG]),
}
)
@@ -211,7 +211,7 @@ SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
{
vol.Optional(ATTR_HOMEASSISTANT): cv.boolean,
vol.Optional(ATTR_FOLDERS): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [cv.slug]),
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [VALID_ADDON_SLUG]),
}
)

View File

@@ -22,7 +22,7 @@ from .const import (
from .handler import async_apply_suggestion
from .issues import Issue, Suggestion
SUGGESTION_CONFIRMATION_REQUIRED = {"system_execute_reboot"}
SUGGESTION_CONFIRMATION_REQUIRED = {"system_adopt_data_disk", "system_execute_reboot"}
EXTRA_PLACEHOLDERS = {
"issue_mount_mount_failed": {

View File

@@ -51,8 +51,15 @@
"title": "Multiple data disks detected",
"fix_flow": {
"step": {
"system_rename_data_disk": {
"description": "`{reference}` is a filesystem with the name hassos-data and is not the active data disk. This can cause Home Assistant to choose the wrong data disk at system reboot.\n\nUse the fix option to rename the filesystem to prevent this. Alternatively you can move the data disk to the drive (overwriting its contents) or remove the drive from the system."
"fix_menu": {
"description": "`{reference}` is a filesystem with the name hassos-data and is not the active data disk. This can cause Home Assistant to choose the wrong data disk at system reboot.\n\nUse the 'Rename' option to rename the filesystem to prevent this. Use the 'Adopt' option to make that your data disk and rename the existing one. Alternatively you can move the data disk to the drive (overwriting its contents) or remove the drive from the system.",
"menu_options": {
"system_rename_data_disk": "Rename",
"system_adopt_data_disk": "Adopt"
}
},
"system_adopt_data_disk": {
"description": "This fix will initiate a system reboot which will make Home Assistant and all the Add-ons inaccessible for a brief period. After the reboot `{reference}` will be the data disk of Home Assistant and your existing data disk will be renamed and ignored."
}
},
"abort": {

View File

@@ -2,15 +2,36 @@
from __future__ import annotations
from functools import partial
from holidays import country_holidays
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.const import CONF_COUNTRY, Platform
from homeassistant.core import HomeAssistant
from homeassistant.setup import SetupPhases, async_pause_setup
from .const import CONF_PROVINCE
PLATFORMS: list[Platform] = [Platform.CALENDAR]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Holiday from a config entry."""
country: str = entry.data[CONF_COUNTRY]
province: str | None = entry.data.get(CONF_PROVINCE)
# We only import here to ensure that that its not imported later
# in the event loop since the platforms will call country_holidays
# which loads python code from disk.
with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PACKAGES):
# import executor job is used here because multiple integrations use
# the holidays library and it is not thread safe to import it in parallel
# https://github.com/python/cpython/issues/83065
await hass.async_add_import_executor_job(
partial(country_holidays, country, subdiv=province)
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True

View File

@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/holiday",
"iot_class": "local_polling",
"requirements": ["holidays==0.45", "babel==2.13.1"]
"requirements": ["holidays==0.46", "babel==2.13.1"]
}

View File

@@ -113,7 +113,11 @@ class HMThermostat(HMDevice, ClimateEntity):
@property
def preset_modes(self):
"""Return a list of available preset modes."""
return [HM_PRESET_MAP[mode] for mode in self._hmdevice.ACTIONNODE]
return [
HM_PRESET_MAP[mode]
for mode in self._hmdevice.ACTIONNODE
if mode in HM_PRESET_MAP
]
@property
def current_humidity(self):

View File

@@ -1,6 +1,6 @@
{
"services": {
"select_next": "mdi:skip",
"select_next": "mdi:skip-next",
"select_option": "mdi:check",
"select_previous": "mdi:skip-previous",
"select_first": "mdi:skip-backward",

View File

@@ -12,7 +12,7 @@
"quality_scale": "platinum",
"requirements": [
"xknx==2.12.2",
"xknxproject==3.7.0",
"xknxproject==3.7.1",
"knx-frontend==2024.1.20.105944"
],
"single_config_entry": true

View File

@@ -12,5 +12,5 @@
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["pylitterbot"],
"requirements": ["pylitterbot==2023.4.9"]
"requirements": ["pylitterbot==2023.4.11"]
}

View File

@@ -35,6 +35,7 @@ LITTER_BOX_STATUS_STATE_MAP = {
LitterBoxStatus.CLEAN_CYCLE: STATE_CLEANING,
LitterBoxStatus.EMPTY_CYCLE: STATE_CLEANING,
LitterBoxStatus.CLEAN_CYCLE_COMPLETE: STATE_DOCKED,
LitterBoxStatus.CAT_DETECTED: STATE_DOCKED,
LitterBoxStatus.CAT_SENSOR_TIMING: STATE_DOCKED,
LitterBoxStatus.DRAWER_FULL_1: STATE_DOCKED,
LitterBoxStatus.DRAWER_FULL_2: STATE_DOCKED,

View File

@@ -179,7 +179,7 @@ async def _get_dashboard_info(hass, url_path):
"views": views,
}
if config is None:
if config is None or "views" not in config:
return data
for idx, view in enumerate(config["views"]):

View File

@@ -141,7 +141,7 @@ class LutronLight(LutronDevice, LightEntity):
else:
brightness = self._prev_brightness
self._prev_brightness = brightness
args = {"new_level": brightness}
args = {"new_level": to_lutron_level(brightness)}
if ATTR_TRANSITION in kwargs:
args["fade_time_seconds"] = kwargs[ATTR_TRANSITION]
self._lutron_device.set_level(**args)

View File

@@ -7,5 +7,5 @@
"iot_class": "calculated",
"loggers": ["yt_dlp"],
"quality_scale": "internal",
"requirements": ["yt-dlp==2024.03.10"]
"requirements": ["yt-dlp==2024.04.09"]
}

View File

@@ -52,7 +52,7 @@
"unjoin": "mdi:ungroup",
"volume_down": "mdi:volume-minus",
"volume_mute": "mdi:volume-mute",
"volume_set": "mdi:volume",
"volume_set": "mdi:volume-medium",
"volume_up": "mdi:volume-plus"
}
}

View File

@@ -440,6 +440,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def async_reset_platform(hass: HomeAssistant, integration_name: str) -> None:
"""Release modbus resources."""
if DOMAIN not in hass.data:
_LOGGER.error("Modbus cannot reload, because it was never loaded")
return
_LOGGER.info("Modbus reloading")
hubs = hass.data[DOMAIN]
for name in hubs:

View File

@@ -6,5 +6,5 @@
"iot_class": "local_polling",
"loggers": ["pymodbus"],
"quality_scale": "platinum",
"requirements": ["pymodbus==3.6.6"]
"requirements": ["pymodbus==3.6.7"]
}

View File

@@ -1,4 +1,4 @@
"""Motionblinds BLE integration."""
"""Motionblinds Bluetooth integration."""
from __future__ import annotations
@@ -34,9 +34,9 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up Motionblinds BLE integration."""
"""Set up Motionblinds Bluetooth integration."""
_LOGGER.debug("Setting up Motionblinds BLE integration")
_LOGGER.debug("Setting up Motionblinds Bluetooth integration")
# The correct time is needed for encryption
_LOGGER.debug("Setting timezone for encryption: %s", hass.config.time_zone)
@@ -46,7 +46,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Motionblinds BLE device from a config entry."""
"""Set up Motionblinds Bluetooth device from a config entry."""
_LOGGER.debug("(%s) Setting up device", entry.data[CONF_MAC_CODE])
@@ -94,7 +94,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload Motionblinds BLE device from a config entry."""
"""Unload Motionblinds Bluetooth device from a config entry."""
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
hass.data[DOMAIN].pop(entry.entry_id)

View File

@@ -1,4 +1,4 @@
"""Button entities for the Motionblinds BLE integration."""
"""Button entities for the Motionblinds Bluetooth integration."""
from __future__ import annotations

View File

@@ -1,4 +1,4 @@
"""Config flow for Motionblinds BLE integration."""
"""Config flow for Motionblinds Bluetooth integration."""
from __future__ import annotations
@@ -38,7 +38,7 @@ CONFIG_SCHEMA = vol.Schema({vol.Required(CONF_MAC_CODE): str})
class FlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Motionblinds BLE."""
"""Handle a config flow for Motionblinds Bluetooth."""
def __init__(self) -> None:
"""Initialize a ConfigFlow."""

View File

@@ -1,4 +1,4 @@
"""Constants for the Motionblinds BLE integration."""
"""Constants for the Motionblinds Bluetooth integration."""
ATTR_CONNECT = "connect"
ATTR_DISCONNECT = "disconnect"

View File

@@ -1,4 +1,4 @@
"""Cover entities for the Motionblinds BLE integration."""
"""Cover entities for the Motionblinds Bluetooth integration."""
from __future__ import annotations

View File

@@ -1,4 +1,4 @@
"""Base entities for the Motionblinds BLE integration."""
"""Base entities for the Motionblinds Bluetooth integration."""
import logging
@@ -16,7 +16,7 @@ _LOGGER = logging.getLogger(__name__)
class MotionblindsBLEEntity(Entity):
"""Base class for Motionblinds BLE entities."""
"""Base class for Motionblinds Bluetooth entities."""
_attr_has_entity_name = True
_attr_should_poll = False

View File

@@ -1,6 +1,6 @@
{
"domain": "motionblinds_ble",
"name": "Motionblinds BLE",
"name": "Motionblinds Bluetooth",
"bluetooth": [
{
"local_name": "MOTION_*",

View File

@@ -1,4 +1,4 @@
"""Select entities for the Motionblinds BLE integration."""
"""Select entities for the Motionblinds Bluetooth integration."""
from __future__ import annotations

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
from http import HTTPStatus
from aiohttp import ClientError, ClientResponseError
from myuplink import MyUplinkAPI, get_manufacturer, get_system_name
from myuplink import MyUplinkAPI, get_manufacturer, get_model, get_system_name
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
@@ -92,7 +92,7 @@ def create_devices(
identifiers={(DOMAIN, device_id)},
name=get_system_name(system),
manufacturer=get_manufacturer(device),
model=device.productName,
model=get_model(device),
sw_version=device.firmwareCurrent,
serial_number=device.product_serial_number,
)

View File

@@ -6,5 +6,5 @@
"dependencies": ["application_credentials"],
"documentation": "https://www.home-assistant.io/integrations/myuplink",
"iot_class": "cloud_polling",
"requirements": ["myuplink==0.5.0"]
"requirements": ["myuplink==0.6.0"]
}

View File

@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/neato",
"iot_class": "cloud_polling",
"loggers": ["pybotvac"],
"requirements": ["pybotvac==0.0.24"]
"requirements": ["pybotvac==0.0.25"]
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/nobo_hub",
"integration_type": "hub",
"iot_class": "local_push",
"requirements": ["pynobo==1.8.0"]
"requirements": ["pynobo==1.8.1"]
}

View File

@@ -108,7 +108,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
(CONF_REFRESH_TOKEN, client.refresh_token),
(CONF_USER_UUID, client.user_uuid),
):
if entry.data[key] == value:
if entry.data.get(key) == value:
continue
entry_updates["data"][key] = value

View File

@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/opower",
"iot_class": "cloud_polling",
"loggers": ["opower"],
"requirements": ["opower==0.4.1"]
"requirements": ["opower==0.4.3"]
}

View File

@@ -298,6 +298,11 @@ class HitachiAirToAirHeatPumpOVP(OverkizEntity, ClimateEntity):
OverkizState.OVP_FAN_SPEED,
OverkizCommandParam.AUTO,
)
# Sanitize fan mode: Overkiz is sometimes providing a state that
# cannot be used as a command. Convert it to HA space and back to Overkiz
if fan_mode not in FAN_MODES_TO_OVERKIZ.values():
fan_mode = FAN_MODES_TO_OVERKIZ[OVERKIZ_TO_FAN_MODES[fan_mode]]
hvac_mode = self._control_backfill(
hvac_mode,
OverkizState.OVP_MODE_CHANGE,
@@ -357,5 +362,5 @@ class HitachiAirToAirHeatPumpOVP(OverkizEntity, ClimateEntity):
]
await self.executor.async_execute_command(
OverkizCommand.GLOBAL_CONTROL, command_data
OverkizCommand.GLOBAL_CONTROL, *command_data
)

View File

@@ -8,7 +8,7 @@
"iot_class": "local_push",
"loggers": ["plexapi", "plexwebsocket"],
"requirements": [
"PlexAPI==4.15.10",
"PlexAPI==4.15.11",
"plexauth==0.0.6",
"plexwebsocket==0.0.14"
],

View File

@@ -258,7 +258,7 @@ class PrometheusMetrics:
self, entity_id: str, friendly_name: str | None = None
) -> None:
"""Remove labelsets matching the given entity id from all metrics."""
for metric in self._metrics.values():
for metric in list(self._metrics.values()):
for sample in cast(list[prometheus_client.Metric], metric.collect())[
0
].samples:

View File

@@ -123,7 +123,8 @@ class RainBirdSwitch(CoordinatorEntity[RainbirdUpdateCoordinator], SwitchEntity)
# The device reflects the old state for a few moments. Update the
# state manually and trigger a refresh after a short debounced delay.
self.coordinator.data.active_zones.remove(self._zone)
if self.is_on:
self.coordinator.data.active_zones.remove(self._zone)
self.async_write_ha_state()
await self.coordinator.async_request_refresh()

View File

@@ -715,6 +715,7 @@ class Statistics(Base, StatisticsBase):
"start_ts",
unique=True,
),
_DEFAULT_TABLE_ARGS,
)
__tablename__ = TABLE_STATISTICS
@@ -732,6 +733,7 @@ class StatisticsShortTerm(Base, StatisticsBase):
"start_ts",
unique=True,
),
_DEFAULT_TABLE_ARGS,
)
__tablename__ = TABLE_STATISTICS_SHORT_TERM
@@ -760,7 +762,10 @@ class StatisticsMeta(Base):
class RecorderRuns(Base):
"""Representation of recorder run."""
__table_args__ = (Index("ix_recorder_runs_start_end", "start", "end"),)
__table_args__ = (
Index("ix_recorder_runs_start_end", "start", "end"),
_DEFAULT_TABLE_ARGS,
)
__tablename__ = TABLE_RECORDER_RUNS
run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
start: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
@@ -789,6 +794,7 @@ class MigrationChanges(Base):
"""Representation of migration changes."""
__tablename__ = TABLE_MIGRATION_CHANGES
__table_args__ = (_DEFAULT_TABLE_ARGS,)
migration_id: Mapped[str] = mapped_column(String(255), primary_key=True)
version: Mapped[int] = mapped_column(SmallInteger)
@@ -798,6 +804,8 @@ class SchemaChanges(Base):
"""Representation of schema version changes."""
__tablename__ = TABLE_SCHEMA_CHANGES
__table_args__ = (_DEFAULT_TABLE_ARGS,)
change_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
schema_version: Mapped[int | None] = mapped_column(Integer)
changed: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
@@ -816,6 +824,8 @@ class StatisticsRuns(Base):
"""Representation of statistics run."""
__tablename__ = TABLE_STATISTICS_RUNS
__table_args__ = (_DEFAULT_TABLE_ARGS,)
run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
start: Mapped[datetime] = mapped_column(DATETIME_TYPE, index=True)

View File

@@ -46,7 +46,6 @@ class ReolinkVODMediaSource(MediaSource):
"""Initialize ReolinkVODMediaSource."""
super().__init__(DOMAIN)
self.hass = hass
self.data: dict[str, ReolinkData] = hass.data[DOMAIN]
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
"""Resolve media to a url."""
@@ -57,7 +56,8 @@ class ReolinkVODMediaSource(MediaSource):
_, config_entry_id, channel_str, stream_res, filename = identifier
channel = int(channel_str)
host = self.data[config_entry_id].host
data: dict[str, ReolinkData] = self.hass.data[DOMAIN]
host = data[config_entry_id].host
vod_type = VodRequestType.RTMP
if host.api.is_nvr:
@@ -130,7 +130,8 @@ class ReolinkVODMediaSource(MediaSource):
if config_entry.state != ConfigEntryState.LOADED:
continue
channels: list[str] = []
host = self.data[config_entry.entry_id].host
data: dict[str, ReolinkData] = self.hass.data[DOMAIN]
host = data[config_entry.entry_id].host
entities = er.async_entries_for_config_entry(
entity_reg, config_entry.entry_id
)
@@ -187,7 +188,8 @@ class ReolinkVODMediaSource(MediaSource):
self, config_entry_id: str, channel: int
) -> BrowseMediaSource:
"""Allow the user to select the high or low playback resolution, (low loads faster)."""
host = self.data[config_entry_id].host
data: dict[str, ReolinkData] = self.hass.data[DOMAIN]
host = data[config_entry_id].host
main_enc = await host.api.get_encoding(channel, "main")
if main_enc == "h265":
@@ -236,7 +238,8 @@ class ReolinkVODMediaSource(MediaSource):
self, config_entry_id: str, channel: int, stream: str
) -> BrowseMediaSource:
"""Return all days on which recordings are available for a reolink camera."""
host = self.data[config_entry_id].host
data: dict[str, ReolinkData] = self.hass.data[DOMAIN]
host = data[config_entry_id].host
# We want today of the camera, not necessarily today of the server
now = host.api.time() or await host.api.async_get_time()
@@ -288,7 +291,8 @@ class ReolinkVODMediaSource(MediaSource):
day: int,
) -> BrowseMediaSource:
"""Return all recording files on a specific day of a Reolink camera."""
host = self.data[config_entry_id].host
data: dict[str, ReolinkData] = self.hass.data[DOMAIN]
host = data[config_entry_id].host
start = dt.datetime(year, month, day, hour=0, minute=0, second=0)
end = dt.datetime(year, month, day, hour=23, minute=59, second=59)

View File

@@ -13,5 +13,5 @@
"documentation": "https://www.home-assistant.io/integrations/ring",
"iot_class": "cloud_polling",
"loggers": ["ring_doorbell"],
"requirements": ["ring-doorbell[listen]==0.8.8"]
"requirements": ["ring-doorbell[listen]==0.8.9"]
}

View File

@@ -38,7 +38,9 @@ from homeassistant.helpers.storage import Store
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import (
CONF_CONCURRENCY,
DATA_COORDINATOR,
DEFAULT_CONCURRENCY,
DEFAULT_SCAN_INTERVAL,
DOMAIN,
EVENTS_COORDINATOR,
@@ -85,7 +87,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def _async_setup_local_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
data = entry.data
risco = RiscoLocal(data[CONF_HOST], data[CONF_PORT], data[CONF_PIN])
concurrency = entry.options.get(CONF_CONCURRENCY, DEFAULT_CONCURRENCY)
risco = RiscoLocal(
data[CONF_HOST], data[CONF_PORT], data[CONF_PIN], concurrency=concurrency
)
try:
await risco.connect()
@@ -96,7 +101,7 @@ async def _async_setup_local_entry(hass: HomeAssistant, entry: ConfigEntry) -> b
return False
async def _error(error: Exception) -> None:
_LOGGER.error("Error in Risco library: %s", error)
_LOGGER.error("Error in Risco library", exc_info=error)
entry.async_on_unload(risco.add_error_handler(_error))

View File

@@ -35,8 +35,10 @@ from .const import (
CONF_CODE_ARM_REQUIRED,
CONF_CODE_DISARM_REQUIRED,
CONF_COMMUNICATION_DELAY,
CONF_CONCURRENCY,
CONF_HA_STATES_TO_RISCO,
CONF_RISCO_STATES_TO_HA,
DEFAULT_ADVANCED_OPTIONS,
DEFAULT_OPTIONS,
DOMAIN,
MAX_COMMUNICATION_DELAY,
@@ -225,11 +227,8 @@ class RiscoOptionsFlowHandler(OptionsFlow):
self._data = {**DEFAULT_OPTIONS, **config_entry.options}
def _options_schema(self) -> vol.Schema:
return vol.Schema(
schema = vol.Schema(
{
vol.Required(
CONF_SCAN_INTERVAL, default=self._data[CONF_SCAN_INTERVAL]
): int,
vol.Required(
CONF_CODE_ARM_REQUIRED, default=self._data[CONF_CODE_ARM_REQUIRED]
): bool,
@@ -239,6 +238,19 @@ class RiscoOptionsFlowHandler(OptionsFlow):
): bool,
}
)
if self.show_advanced_options:
self._data = {**DEFAULT_ADVANCED_OPTIONS, **self._data}
schema = schema.extend(
{
vol.Required(
CONF_SCAN_INTERVAL, default=self._data[CONF_SCAN_INTERVAL]
): int,
vol.Required(
CONF_CONCURRENCY, default=self._data[CONF_CONCURRENCY]
): int,
}
)
return schema
async def async_step_init(
self, user_input: dict[str, Any] | None = None

View File

@@ -14,6 +14,7 @@ DATA_COORDINATOR = "risco"
EVENTS_COORDINATOR = "risco_events"
DEFAULT_SCAN_INTERVAL = 30
DEFAULT_CONCURRENCY = 4
TYPE_LOCAL = "local"
@@ -25,6 +26,7 @@ CONF_CODE_DISARM_REQUIRED = "code_disarm_required"
CONF_RISCO_STATES_TO_HA = "risco_states_to_ha"
CONF_HA_STATES_TO_RISCO = "ha_states_to_risco"
CONF_COMMUNICATION_DELAY = "communication_delay"
CONF_CONCURRENCY = "concurrency"
RISCO_GROUPS = ["A", "B", "C", "D"]
RISCO_ARM = "arm"
@@ -44,9 +46,13 @@ DEFAULT_HA_STATES_TO_RISCO = {
}
DEFAULT_OPTIONS = {
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
CONF_CODE_ARM_REQUIRED: False,
CONF_CODE_DISARM_REQUIRED: False,
CONF_RISCO_STATES_TO_HA: DEFAULT_RISCO_STATES_TO_HA,
CONF_HA_STATES_TO_RISCO: DEFAULT_HA_STATES_TO_RISCO,
}
DEFAULT_ADVANCED_OPTIONS = {
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
CONF_CONCURRENCY: DEFAULT_CONCURRENCY,
}

View File

@@ -36,7 +36,8 @@
"init": {
"title": "Configure options",
"data": {
"scan_interval": "How often to poll Risco (in seconds)",
"scan_interval": "How often to poll Risco Cloud (in seconds)",
"concurrency": "Maximum concurrent requests in Risco local",
"code_arm_required": "Require PIN to arm",
"code_disarm_required": "Require PIN to disarm"
}

View File

@@ -5,6 +5,6 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/romy",
"iot_class": "local_polling",
"requirements": ["romy==0.0.7"],
"requirements": ["romy==0.0.10"],
"zeroconf": ["_aicu-http._tcp.local."]
}

View File

@@ -24,7 +24,7 @@
"documentation": "https://www.home-assistant.io/integrations/roomba",
"iot_class": "local_push",
"loggers": ["paho_mqtt", "roombapy"],
"requirements": ["roombapy==1.6.13"],
"requirements": ["roombapy==1.8.1"],
"zeroconf": [
{
"type": "_amzn-alexa._tcp.local.",

View File

@@ -54,7 +54,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
vol.Optional(CONF_HOUSE_NUMBER_SUFFIX, default=""): cv.string,
vol.Optional(CONF_NAME, default="Rova"): cv.string,
vol.Optional(CONF_MONITORED_CONDITIONS, default=["bio"]): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
cv.ensure_list, [vol.In(["bio", "paper", "plastic", "residual"])]
),
}
)

View File

@@ -234,3 +234,5 @@ DEVICES_WITHOUT_FIRMWARE_CHANGELOG = (
)
CONF_GEN = "gen"
SHELLY_PLUS_RGBW_CHANNELS = 4

View File

@@ -14,6 +14,7 @@ from homeassistant.components.light import (
ATTR_RGB_COLOR,
ATTR_RGBW_COLOR,
ATTR_TRANSITION,
DOMAIN as LIGHT_DOMAIN,
ColorMode,
LightEntity,
LightEntityFeature,
@@ -34,12 +35,14 @@ from .const import (
RGBW_MODELS,
RPC_MIN_TRANSITION_TIME_SEC,
SHBLB_1_RGB_EFFECTS,
SHELLY_PLUS_RGBW_CHANNELS,
STANDARD_RGB_EFFECTS,
)
from .coordinator import ShellyBlockCoordinator, ShellyRpcCoordinator, get_entry_data
from .entity import ShellyBlockEntity, ShellyRpcEntity
from .utils import (
async_remove_shelly_entity,
async_remove_shelly_rpc_entities,
brightness_to_percentage,
get_device_entry_gen,
get_rpc_key_ids,
@@ -118,14 +121,28 @@ def async_setup_rpc_entry(
return
if light_key_ids := get_rpc_key_ids(coordinator.device.status, "light"):
# Light mode remove RGB & RGBW entities, add light entities
async_remove_shelly_rpc_entities(
hass, LIGHT_DOMAIN, coordinator.mac, ["rgb:0", "rgbw:0"]
)
async_add_entities(RpcShellyLight(coordinator, id_) for id_ in light_key_ids)
return
light_keys = [f"light:{i}" for i in range(SHELLY_PLUS_RGBW_CHANNELS)]
if rgb_key_ids := get_rpc_key_ids(coordinator.device.status, "rgb"):
# RGB mode remove light & RGBW entities, add RGB entity
async_remove_shelly_rpc_entities(
hass, LIGHT_DOMAIN, coordinator.mac, [*light_keys, "rgbw:0"]
)
async_add_entities(RpcShellyRgbLight(coordinator, id_) for id_ in rgb_key_ids)
return
if rgbw_key_ids := get_rpc_key_ids(coordinator.device.status, "rgbw"):
# RGBW mode remove light & RGB entities, add RGBW entity
async_remove_shelly_rpc_entities(
hass, LIGHT_DOMAIN, coordinator.mac, [*light_keys, "rgb:0"]
)
async_add_entities(RpcShellyRgbwLight(coordinator, id_) for id_ in rgbw_key_ids)

View File

@@ -222,7 +222,7 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity):
) -> None:
"""Initialize update entity."""
super().__init__(coordinator, key, attribute, description)
self._ota_in_progress: bool = False
self._ota_in_progress: bool | int = False
self._attr_release_url = get_release_url(
coordinator.device.gen, coordinator.model, description.beta
)
@@ -237,14 +237,13 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity):
@callback
def _ota_progress_callback(self, event: dict[str, Any]) -> None:
"""Handle device OTA progress."""
if self._ota_in_progress:
if self.in_progress is not False:
event_type = event["event"]
if event_type == OTA_BEGIN:
self._attr_in_progress = 0
self._ota_in_progress = 0
elif event_type == OTA_PROGRESS:
self._attr_in_progress = event["progress_percent"]
self._ota_in_progress = event["progress_percent"]
elif event_type in (OTA_ERROR, OTA_SUCCESS):
self._attr_in_progress = False
self._ota_in_progress = False
self.async_write_ha_state()
@@ -262,6 +261,11 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity):
return self.installed_version
@property
def in_progress(self) -> bool | int:
"""Update installation in progress."""
return self._ota_in_progress
async def async_install(
self, version: str | None, backup: bool, **kwargs: Any
) -> None:
@@ -292,7 +296,7 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity):
await self.coordinator.async_shutdown_device_and_start_reauth()
else:
self._ota_in_progress = True
LOGGER.debug("OTA update call successful")
LOGGER.info("OTA update call for %s successful", self.coordinator.name)
class RpcSleepingUpdateEntity(

View File

@@ -488,3 +488,15 @@ async def async_shutdown_device(device: BlockDevice | RpcDevice) -> None:
await device.shutdown()
if isinstance(device, BlockDevice):
device.shutdown()
@callback
def async_remove_shelly_rpc_entities(
hass: HomeAssistant, domain: str, mac: str, keys: list[str]
) -> None:
"""Remove RPC based Shelly entity."""
entity_reg = er_async_get(hass)
for key in keys:
if entity_id := entity_reg.async_get_entity_id(domain, DOMAIN, f"{mac}-{key}"):
LOGGER.debug("Removing entity: %s", entity_id)
entity_reg.async_remove(entity_id)

View File

@@ -45,7 +45,7 @@ class SnapcastConfigFlow(ConfigFlow, domain=DOMAIN):
except OSError:
errors["base"] = "cannot_connect"
else:
await client.stop()
client.stop()
return self.async_create_entry(title=DEFAULT_TITLE, data=user_input)
return self.async_show_form(
step_id="user", data_schema=SNAPCAST_SCHEMA, errors=errors

View File

@@ -5,8 +5,19 @@ from __future__ import annotations
import binascii
import logging
from pysnmp.entity import config as cfg
from pysnmp.entity.rfc3413.oneliner import cmdgen
from pysnmp.error import PySnmpError
from pysnmp.hlapi.asyncio import (
CommunityData,
ContextData,
ObjectIdentity,
ObjectType,
SnmpEngine,
Udp6TransportTarget,
UdpTransportTarget,
UsmUserData,
bulkWalkCmd,
isEndOfMib,
)
import voluptuous as vol
from homeassistant.components.device_tracker import (
@@ -24,7 +35,13 @@ from .const import (
CONF_BASEOID,
CONF_COMMUNITY,
CONF_PRIV_KEY,
DEFAULT_AUTH_PROTOCOL,
DEFAULT_COMMUNITY,
DEFAULT_PORT,
DEFAULT_PRIV_PROTOCOL,
DEFAULT_TIMEOUT,
DEFAULT_VERSION,
SNMP_VERSIONS,
)
_LOGGER = logging.getLogger(__name__)
@@ -40,9 +57,12 @@ PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend(
)
def get_scanner(hass: HomeAssistant, config: ConfigType) -> SnmpScanner | None:
async def async_get_scanner(
hass: HomeAssistant, config: ConfigType
) -> SnmpScanner | None:
"""Validate the configuration and return an SNMP scanner."""
scanner = SnmpScanner(config[DOMAIN])
await scanner.async_init()
return scanner if scanner.success_init else None
@@ -51,39 +71,75 @@ class SnmpScanner(DeviceScanner):
"""Queries any SNMP capable Access Point for connected devices."""
def __init__(self, config):
"""Initialize the scanner."""
"""Initialize the scanner and test the target device."""
host = config[CONF_HOST]
community = config[CONF_COMMUNITY]
baseoid = config[CONF_BASEOID]
authkey = config.get(CONF_AUTH_KEY)
authproto = DEFAULT_AUTH_PROTOCOL
privkey = config.get(CONF_PRIV_KEY)
privproto = DEFAULT_PRIV_PROTOCOL
self.snmp = cmdgen.CommandGenerator()
try:
# Try IPv4 first.
target = UdpTransportTarget((host, DEFAULT_PORT), timeout=DEFAULT_TIMEOUT)
except PySnmpError:
# Then try IPv6.
try:
target = Udp6TransportTarget(
(host, DEFAULT_PORT), timeout=DEFAULT_TIMEOUT
)
except PySnmpError as err:
_LOGGER.error("Invalid SNMP host: %s", err)
return
self.host = cmdgen.UdpTransportTarget((config[CONF_HOST], 161))
if CONF_AUTH_KEY not in config or CONF_PRIV_KEY not in config:
self.auth = cmdgen.CommunityData(config[CONF_COMMUNITY])
if authkey is not None or privkey is not None:
if not authkey:
authproto = "none"
if not privkey:
privproto = "none"
request_args = [
SnmpEngine(),
UsmUserData(
community,
authKey=authkey or None,
privKey=privkey or None,
authProtocol=authproto,
privProtocol=privproto,
),
target,
ContextData(),
]
else:
self.auth = cmdgen.UsmUserData(
config[CONF_COMMUNITY],
config[CONF_AUTH_KEY],
config[CONF_PRIV_KEY],
authProtocol=cfg.usmHMACSHAAuthProtocol,
privProtocol=cfg.usmAesCfb128Protocol,
)
self.baseoid = cmdgen.MibVariable(config[CONF_BASEOID])
self.last_results = []
request_args = [
SnmpEngine(),
CommunityData(community, mpModel=SNMP_VERSIONS[DEFAULT_VERSION]),
target,
ContextData(),
]
# Test the router is accessible
data = self.get_snmp_data()
self.request_args = request_args
self.baseoid = baseoid
self.last_results = []
self.success_init = False
async def async_init(self):
"""Make a one-off read to check if the target device is reachable and readable."""
data = await self.async_get_snmp_data()
self.success_init = data is not None
def scan_devices(self):
async def async_scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
await self._async_update_info()
return [client["mac"] for client in self.last_results if client.get("mac")]
def get_device_name(self, device):
async def async_get_device_name(self, device):
"""Return the name of the given device or None if we don't know."""
# We have no names
return None
def _update_info(self):
async def _async_update_info(self):
"""Ensure the information from the device is up to date.
Return boolean if scanning successful.
@@ -91,38 +147,42 @@ class SnmpScanner(DeviceScanner):
if not self.success_init:
return False
if not (data := self.get_snmp_data()):
if not (data := await self.async_get_snmp_data()):
return False
self.last_results = data
return True
def get_snmp_data(self):
async def async_get_snmp_data(self):
"""Fetch MAC addresses from access point via SNMP."""
devices = []
errindication, errstatus, errindex, restable = self.snmp.nextCmd(
self.auth, self.host, self.baseoid
walker = bulkWalkCmd(
*self.request_args,
0,
50,
ObjectType(ObjectIdentity(self.baseoid)),
lexicographicMode=False,
)
async for errindication, errstatus, errindex, res in walker:
if errindication:
_LOGGER.error("SNMPLIB error: %s", errindication)
return
if errstatus:
_LOGGER.error(
"SNMP error: %s at %s",
errstatus.prettyPrint(),
errindex and res[int(errindex) - 1][0] or "?",
)
return
if errindication:
_LOGGER.error("SNMPLIB error: %s", errindication)
return
if errstatus:
_LOGGER.error(
"SNMP error: %s at %s",
errstatus.prettyPrint(),
errindex and restable[int(errindex) - 1][0] or "?",
)
return
for resrow in restable:
for _, val in resrow:
try:
mac = binascii.hexlify(val.asOctets()).decode("utf-8")
except AttributeError:
continue
_LOGGER.debug("Found MAC address: %s", mac)
mac = ":".join([mac[i : i + 2] for i in range(0, len(mac), 2)])
devices.append({"mac": mac})
for _oid, value in res:
if not isEndOfMib(res):
try:
mac = binascii.hexlify(value.asOctets()).decode("utf-8")
except AttributeError:
continue
_LOGGER.debug("Found MAC address: %s", mac)
mac = ":".join([mac[i : i + 2] for i in range(0, len(mac), 2)])
devices.append({"mac": mac})
return devices

View File

@@ -1,7 +1,7 @@
{
"domain": "snmp",
"name": "SNMP",
"codeowners": [],
"codeowners": ["@nmaggioni"],
"documentation": "https://www.home-assistant.io/integrations/snmp",
"iot_class": "local_polling",
"loggers": ["pyasn1", "pysmi", "pysnmp"],

View File

@@ -270,7 +270,7 @@ class SnmpData:
"SNMP OID %s received type=%s and data %s",
self._baseoid,
type(value),
bytes(value),
value,
)
if isinstance(value, NoSuchObject):
_LOGGER.error(

View File

@@ -7,6 +7,7 @@ from contextlib import suppress
from functools import partial
import logging
from typing import cast
import urllib.parse
from soco.data_structures import DidlObject
from soco.ms_data_structures import MusicServiceItem
@@ -60,12 +61,14 @@ def get_thumbnail_url_full(
media_content_id,
media_content_type,
)
return getattr(item, "album_art_uri", None)
return urllib.parse.unquote(getattr(item, "album_art_uri", ""))
return get_browse_image_url(
media_content_type,
media_content_id,
media_image_id,
return urllib.parse.unquote(
get_browse_image_url(
media_content_type,
media_content_id,
media_image_id,
)
)
@@ -166,6 +169,7 @@ def build_item_response(
payload["idstring"] = "A:ALBUMARTIST/" + "/".join(
payload["idstring"].split("/")[2:]
)
payload["idstring"] = urllib.parse.unquote(payload["idstring"])
try:
search_type = MEDIA_TYPES_TO_SONOS[payload["search_type"]]
@@ -201,7 +205,7 @@ def build_item_response(
if not title:
try:
title = payload["idstring"].split("/")[1]
title = urllib.parse.unquote(payload["idstring"].split("/")[1])
except IndexError:
title = LIBRARY_TITLES_MAPPING[payload["idstring"]]
@@ -493,10 +497,24 @@ def get_media(
"""Fetch media/album."""
search_type = MEDIA_TYPES_TO_SONOS.get(search_type, search_type)
if search_type == "playlists":
# Format is S:TITLE or S:ITEM_ID
splits = item_id.split(":")
title = splits[1] if len(splits) > 1 else None
playlist = next(
(
p
for p in media_library.get_playlists()
if (item_id == p.item_id or title == p.title)
),
None,
)
return playlist
if not item_id.startswith("A:ALBUM") and search_type == SONOS_ALBUM:
item_id = "A:ALBUMARTIST/" + "/".join(item_id.split("/")[2:])
search_term = item_id.split("/")[-1]
search_term = urllib.parse.unquote(item_id.split("/")[-1])
matches = media_library.get_music_library_information(
search_type, search_term=search_term, full_album_art_uri=True
)

View File

@@ -626,13 +626,13 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
soco.play_uri(media_id, force_radio=is_radio)
elif media_type == MediaType.PLAYLIST:
if media_id.startswith("S:"):
item = media_browser.get_media(self.media.library, media_id, media_type)
soco.play_uri(item.get_uri())
return
try:
playlist = media_browser.get_media(
self.media.library, media_id, media_type
)
else:
playlists = soco.get_sonos_playlists(complete_result=True)
playlist = next(p for p in playlists if p.title == media_id)
except StopIteration:
playlist = next((p for p in playlists if p.title == media_id), None)
if not playlist:
_LOGGER.error('Could not find a Sonos playlist named "%s"', media_id)
else:
soco.clear_queue()

View File

@@ -25,10 +25,11 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
partial(speedtest.Speedtest, secure=True)
)
coordinator = SpeedTestDataCoordinator(hass, config_entry, api)
await hass.async_add_executor_job(coordinator.update_servers)
except speedtest.SpeedtestException as err:
raise ConfigEntryNotReady from err
hass.data[DOMAIN] = coordinator
async def _async_finish_startup(hass: HomeAssistant) -> None:
"""Run this only when HA has finished its startup."""
await coordinator.async_config_entry_first_refresh()
@@ -36,8 +37,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
# Don't start a speedtest during startup
async_at_started(hass, _async_finish_startup)
hass.data[DOMAIN] = coordinator
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
config_entry.async_on_unload(config_entry.add_update_listener(update_listener))

View File

@@ -58,14 +58,14 @@ class StarlinkUpdateCoordinator(DataUpdateCoordinator[StarlinkData]):
async def _async_update_data(self) -> StarlinkData:
async with asyncio.timeout(4):
try:
status, location, sleep = await asyncio.gather(
self.hass.async_add_executor_job(status_data, self.channel_context),
self.hass.async_add_executor_job(
location_data, self.channel_context
),
self.hass.async_add_executor_job(
get_sleep_config, self.channel_context
),
status = await self.hass.async_add_executor_job(
status_data, self.channel_context
)
location = await self.hass.async_add_executor_job(
location_data, self.channel_context
)
sleep = await self.hass.async_add_executor_job(
get_sleep_config, self.channel_context
)
return StarlinkData(location, sleep, *status)
except GrpcError as exc:

View File

@@ -10,6 +10,7 @@ import math
from homeassistant.components.time import TimeEntity, TimeEntityDescription
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import DOMAIN
@@ -62,14 +63,22 @@ class StarlinkTimeEntity(StarlinkEntity, TimeEntity):
def _utc_minutes_to_time(utc_minutes: int, timezone: tzinfo) -> time:
hour = math.floor(utc_minutes / 60)
minute = utc_minutes % 60
utc = datetime.now(UTC).replace(hour=hour, minute=minute, second=0, microsecond=0)
try:
utc = datetime.now(UTC).replace(
hour=hour, minute=minute, second=0, microsecond=0
)
except ValueError as exc:
raise HomeAssistantError from exc
return utc.astimezone(timezone).time()
def _time_to_utc_minutes(t: time, timezone: tzinfo) -> int:
zoned_time = datetime.now(timezone).replace(
hour=t.hour, minute=t.minute, second=0, microsecond=0
)
try:
zoned_time = datetime.now(timezone).replace(
hour=t.hour, minute=t.minute, second=0, microsecond=0
)
except ValueError as exc:
raise HomeAssistantError from exc
utc_time = zoned_time.astimezone(UTC).time()
return (utc_time.hour * 60) + utc_time.minute

View File

@@ -105,6 +105,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
if (
SynoSurveillanceStation.INFO_API_KEY in available_apis
and SynoSurveillanceStation.HOME_MODE_API_KEY in available_apis
and api.surveillance_station is not None
):
coordinator_switches = SynologyDSMSwitchUpdateCoordinator(hass, entry, api)
await coordinator_switches.async_config_entry_first_refresh()

View File

@@ -116,7 +116,7 @@ class SynoDSMSecurityBinarySensor(SynoDSMBinarySensor):
@property
def available(self) -> bool:
"""Return True if entity is available."""
return bool(self._api.security)
return bool(self._api.security) and super().available
@property
def extra_state_attributes(self) -> dict[str, str]:

View File

@@ -108,7 +108,7 @@ class SynoDSMCamera(SynologyDSMBaseEntity[SynologyDSMCameraUpdateCoordinator], C
@property
def available(self) -> bool:
"""Return the availability of the camera."""
return self.camera_data.is_enabled and self.coordinator.last_update_success
return self.camera_data.is_enabled and super().available
@property
def is_recording(self) -> bool:

View File

@@ -286,18 +286,7 @@ class SynoApi:
async def async_update(self) -> None:
"""Update function for updating API information."""
try:
await self._update()
except SYNOLOGY_CONNECTION_EXCEPTIONS as err:
LOGGER.debug(
"Connection error during update of '%s' with exception: %s",
self._entry.unique_id,
err,
)
LOGGER.warning(
"Connection error during update, fallback by reloading the entry"
)
await self._hass.config_entries.async_reload(self._entry.entry_id)
await self._update()
async def _update(self) -> None:
"""Update function for updating API information."""

View File

@@ -75,7 +75,7 @@
}
},
"services": {
"reboot": "mdi:reboot",
"reboot": "mdi:restart",
"shutdown": "mdi:power"
}
}

View File

@@ -366,7 +366,7 @@ class SynoDSMUtilSensor(SynoDSMSensor):
@property
def available(self) -> bool:
"""Return True if entity is available."""
return bool(self._api.utilisation)
return bool(self._api.utilisation) and super().available
class SynoDSMStorageSensor(SynologyDSMDeviceEntity, SynoDSMSensor):

View File

@@ -98,7 +98,7 @@ class SynoDSMSurveillanceHomeModeToggle(
@property
def available(self) -> bool:
"""Return True if entity is available."""
return bool(self._api.surveillance_station)
return bool(self._api.surveillance_station) and super().available
@property
def device_info(self) -> DeviceInfo:

View File

@@ -59,7 +59,7 @@ class SynoDSMUpdateEntity(
@property
def available(self) -> bool:
"""Return True if entity is available."""
return bool(self._api.upgrade)
return bool(self._api.upgrade) and super().available
@property
def installed_version(self) -> str | None:

View File

@@ -10,6 +10,6 @@
"iot_class": "local_push",
"loggers": ["systembridgeconnector"],
"quality_scale": "silver",
"requirements": ["systembridgeconnector==4.0.3"],
"requirements": ["systembridgeconnector==4.0.3", "systembridgemodels==4.0.4"],
"zeroconf": ["_system-bridge._tcp.local."]
}

Some files were not shown because too many files have changed in this diff Show More