Compare commits

...

149 Commits

Author SHA1 Message Date
Paulus Schoutsen
516f3295bf 2024.8.3 (#124569) 2024-08-25 16:06:09 +02:00
J. Nick Koston
2d5289e7dd Revert "Exclude aiohappyeyeballs from license check" (#124116) 2024-08-25 13:41:47 +00:00
Paulus Schoutsen
18efd84a35 Bump version to 2024.8.3 2024-08-25 13:26:00 +00:00
Jan Bouwhuis
b34c90b189 Only support remote activity on Alexa if feature is set and at least one feature is in the activity_list (#124567)
Only support remote activity on Alexa if feaure is set and at least one feature is in the activity_list
2024-08-25 13:24:42 +00:00
Mr. Bubbles
a45c1a3914 Fix missing id in Habitica completed todos API response (#124565)
* Fix missing id in completed todos API response

* Copy id only if none

* Update homeassistant/components/habitica/coordinator.py

Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>

---------

Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
2024-08-25 13:24:42 +00:00
J. Nick Koston
1bdf9d657e Bump yalexs to 8.4.1 (#124553)
changelog: https://github.com/bdraco/yalexs/compare/v8.4.0...v8.4.1
2024-08-25 13:24:41 +00:00
J. Nick Koston
b294a92ad2 Bump yalexs to 8.4.0 (#124520) 2024-08-25 13:24:40 +00:00
J. Nick Koston
2db362ab3d Bump yalexs to 8.3.3 (#124492)
* Bump yalexs to 8.2.0

changelog: https://github.com/bdraco/yalexs/compare/v8.1.4...v8.2.0

* bump to 8.3.1

* bump

* one more bump to ensure we do not hit the ratelimit/shutdown cleanly

* empty commit to restart ci since close/open did not work in flight
2024-08-25 13:24:39 +00:00
Joost Lekkerkerker
5f275a6b9c Don't raise WLED user flow unique_id check (#124481) 2024-08-25 13:22:59 +00:00
Ino Dekker
fa914b2811 Bump aiohue to version 4.7.3 (#124436) 2024-08-25 13:22:58 +00:00
J. Nick Koston
a128e2e4fc Bump yalexs to 8.1.4 (#124425)
changelog: https://github.com/bdraco/yalexs/compare/v8.1.2...v8.1.4
2024-08-25 13:22:57 +00:00
Penny Wood
03c7f2cf5b Add supported features for iZone (#124416)
* Fix for #123462

* Set outside of constructor
2024-08-25 13:22:57 +00:00
Angel Nunez Mencias
102528e5d3 update ttn_client - fix crash with SenseCAP devices (#124370)
update ttn_client
2024-08-25 13:22:56 +00:00
Pete Sage
8f4af4f7c2 Fix Spotify Media Browsing fails for new config entries (#124368)
* initial commit

* tests

* tests

* update tests

* update tests

* update tests
2024-08-25 13:22:55 +00:00
karwosts
667af10017 Add missing strings for riemann options flow (#124317) 2024-08-25 13:22:54 +00:00
G Johansson
e5a64a1e0a Bump python-holidays to 0.55 (#124314) 2024-08-25 13:22:54 +00:00
G Johansson
236fa8e238 Bump python-holidays to 0.54 (#124170) 2024-08-25 13:22:53 +00:00
J. Nick Koston
70a58a0bb0 Bump yalexs to 8.1.2 (#124303) 2024-08-25 13:20:54 +00:00
Joost Lekkerkerker
769c7f1ea3 Don't abort airgradient user flow if flow in progress (#124300) 2024-08-25 13:20:53 +00:00
Marcel van der Veldt
5a8045d1fb Prevent KeyError when Matter device sends invalid value for StartUpOnOff (#124280) 2024-08-25 13:20:52 +00:00
Allen Porter
5a73b636e3 Bump python-roborock to 2.6.0 (#124268) 2024-08-25 13:20:51 +00:00
Matthias Alphart
524e09b45e Update xknx to 3.1.1 (#124257) 2024-08-25 13:20:51 +00:00
J. Nick Koston
1f46670266 Bump aiohttp to 3.10.5 (#124254) 2024-08-25 13:20:50 +00:00
Erik Montnemery
a857f603c8 Bump pyhomeworks to 1.1.2 (#124199) 2024-08-25 13:20:49 +00:00
J. Nick Koston
b7d8f3d005 Fix shelly available check when device is not initialized (#124182)
* Fix shelly available check when device is not initialized

available needs to check for device.initialized or if the device
is sleepy as calls to status will raise NotInitialized which results
in many unretrieved exceptions while writing state

fixes
```
2024-08-18 09:33:03.757 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved (None)
Traceback (most recent call last):
  File "/usr/src/homeassistant/homeassistant/helpers/update_coordinator.py", line 258, in _handle_refresh_interval
    await self._async_refresh(log_failures=True, scheduled=True)
  File "/usr/src/homeassistant/homeassistant/helpers/update_coordinator.py", line 453, in _async_refresh
    self.async_update_listeners()
  File "/usr/src/homeassistant/homeassistant/helpers/update_coordinator.py", line 168, in async_update_listeners
    update_callback()
  File "/config/custom_components/shelly/entity.py", line 374, in _update_callback
    self.async_write_ha_state()
  File "/usr/src/homeassistant/homeassistant/helpers/entity.py", line 1005, in async_write_ha_state
    self._async_write_ha_state()
  File "/usr/src/homeassistant/homeassistant/helpers/entity.py", line 1130, in _async_write_ha_state
    self.__async_calculate_state()
  File "/usr/src/homeassistant/homeassistant/helpers/entity.py", line 1067, in __async_calculate_state
    state = self._stringify_state(available)
            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/usr/src/homeassistant/homeassistant/helpers/entity.py", line 1011, in _stringify_state
    if (state := self.state) is None:
                 ^^^^^^^^^^
  File "/usr/src/homeassistant/homeassistant/components/binary_sensor/__init__.py", line 293, in state
    if (is_on := self.is_on) is None:
                 ^^^^^^^^^^
  File "/config/custom_components/shelly/binary_sensor.py", line 331, in is_on
    return bool(self.attribute_value)
                ^^^^^^^^^^^^^^^^^^^^
  File "/config/custom_components/shelly/entity.py", line 545, in attribute_value
    self._last_value = self.sub_status
                       ^^^^^^^^^^^^^^^
  File "/config/custom_components/shelly/entity.py", line 534, in sub_status
    return self.status[self.entity_description.sub_key]
           ^^^^^^^^^^^
  File "/config/custom_components/shelly/entity.py", line 364, in status
    return cast(dict, self.coordinator.device.status[self.key])
                      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/usr/local/lib/python3.12/site-packages/aioshelly/rpc_device/device.py", line 390, in status
    raise NotInitialized
aioshelly.exceptions.NotInitialized
```

* tweak

* cover

* fix

* cover

* fixes
2024-08-25 13:19:57 +00:00
Shay Levy
129035967b Shelly RPC - do not stop BLE scanner if a sleeping device (#124147) 2024-08-25 13:17:46 +00:00
Christopher Maio
45b44f8a59 Update Matter light transition blocklist to include GE Cync Undercabinet Lights (#124138) 2024-08-25 13:17:45 +00:00
J. Nick Koston
e80dc52175 Bump aiohttp to 3.10.4 (#124137)
changelog: https://github.com/aio-libs/aiohttp/compare/v3.10.3...v3.10.4
2024-08-25 13:17:45 +00:00
MarkGodwin
22bb3e5477 Bump tplink-omada-api to 1.4.2 (#124136)
Fix for bad pre-registered clients
2024-08-25 13:17:44 +00:00
Allen Porter
f89e8e6ceb Bump nest to 4.0.7 to increase subscriber deadline (#124131)
Bump nest to 4.0.7
2024-08-25 13:17:43 +00:00
J. Nick Koston
157a61845b Bump aiohomekit to 3.2.3 (#124115) 2024-08-25 13:17:43 +00:00
Artem Draft
0fcdc3c200 Bump pybravia to 0.3.4 (#124113) 2024-08-25 13:17:42 +00:00
cdnninja
d1f09ecd0c Add Alt Core300s model to vesync integration (#124091) 2024-08-25 13:17:41 +00:00
J. Nick Koston
3484ab3c0c Bump aioshelly to 11.2.4 (#124080) 2024-08-25 13:17:40 +00:00
J. Nick Koston
80df582ebd Bump yalexs to 8.0.2 (#123817) 2024-08-25 13:17:39 +00:00
J. Nick Koston
dc967e2ef2 Bump yalexs to 6.5.0 (#123739) 2024-08-25 13:17:39 +00:00
Daniel Rozycki
e2c1a38d87 Skip NextBus update if integration is still loading (#123564)
* Skip NextBus update if integration is still loading

Fixes a race between the loading thread and
update thread leading to an unrecoverable error

* Use async_at_started

* Use local copy of _route_stops to avoid NextBus race condition

* Update homeassistant/components/nextbus/coordinator.py

---------

Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2024-08-25 13:17:38 +00:00
Franck Nijhof
94516de724 2024.8.2 (#124069) 2024-08-16 18:43:41 +02:00
Joost Lekkerkerker
a2027fc78c Exclude aiohappyeyeballs from license check (#124041) 2024-08-16 18:13:33 +02:00
Franck Nijhof
be5577c2f9 Bump version to 2024.8.2 2024-08-16 18:02:52 +02:00
Joost Lekkerkerker
93dc08a05f Bump aiomealie to 0.8.1 (#124047) 2024-08-16 18:02:41 +02:00
Matthias Alphart
def2ace4ec Fix loading KNX integration actions when not using YAML (#124027)
* Fix loading KNX integration services when not using YAML

* remove unnecessary comment

* Remove unreachable test
2024-08-16 18:02:38 +02:00
J. Nick Koston
4f0261d739 Bump bluetooth-adapters to 0.19.4 (#124018)
Fixes a call to enumerate USB devices that did blocking
I/O
2024-08-16 18:02:35 +02:00
Brett Adams
6103811de8 Fix rear trunk logic in Tessie (#124011)
Allow open to be anything not zero
2024-08-16 18:02:32 +02:00
Robert Svensson
fd904c65a7 Bump aiounifi to v80 (#124004) 2024-08-16 18:02:29 +02:00
Joost Lekkerkerker
04bf8482b2 Re-enable concord232 (#124000) 2024-08-16 18:02:26 +02:00
Sid
f5fd5e0457 Bump openwebifpy to 4.2.7 (#123995)
* Bump openwebifpy to 4.2.6

* Bump openwebifpy to 4.2.7

---------

Co-authored-by: J. Nick Koston <nick@koston.org>
2024-08-16 18:02:23 +02:00
J. Nick Koston
0de89b42aa Ensure event entities are allowed for linked homekit config via YAML (#123994) 2024-08-16 18:02:20 +02:00
Erik Montnemery
e8914552b1 Bump pyhomeworks to 1.1.1 (#123981) 2024-08-16 18:02:17 +02:00
Glenn Waters
bfd302109e Environment Canada weather format fix (#123960)
* Add missing isoformat.

* Move fixture loading to common conftest.py

* Add deepcopy.
2024-08-16 18:02:14 +02:00
Andre Lengwenus
796ad47dd0 Bump pypck to 0.7.20 (#123948) 2024-08-16 18:02:11 +02:00
IceBotYT
e9915463a9 Bump LaCrosse View to 1.0.2, fixes blocking call (#123935) 2024-08-16 18:02:07 +02:00
Michael
59aecda8cf Fix PI-Hole update entity when no update available (#123930)
show installed version when no update available
2024-08-16 17:58:24 +02:00
J. Nick Koston
7d00ccbbbc Bump pylutron_caseta to 0.21.1 (#123924) 2024-08-16 17:58:21 +02:00
Álvaro Fernández Rojas
55a911120c Handle timeouts on Airzone DHCP config flow (#123869)
airzone: config_flow: dhcp: catch timeout exception

Signed-off-by: Álvaro Fernández Rojas <noltari@gmail.com>
2024-08-16 17:58:18 +02:00
Michael
80abf90c87 Fix translation for integration not found repair issue (#123868)
* correct setp id in strings

* add issue_ignored string
2024-08-16 17:58:15 +02:00
Robert Resch
8539591307 Fix blocking I/O of SSLContext.load_default_certs in Ecovacs (#123856) 2024-08-16 17:58:12 +02:00
Michael
6234deeee1 Bump py-synologydsm-api to 2.4.5 (#123815)
bump py-synologydsm-api to 2.4.5
2024-08-16 17:57:59 +02:00
Louis Christ
81fabb1bfa Fix status update loop in bluesound integration (#123790)
* Fix retry loop for status update

* Use 'available' instead of _is_online

* Fix tests
2024-08-16 17:56:23 +02:00
Matthias Alphart
ff4e5859cf Fix KNX UI Light color temperature DPT (#123778) 2024-08-16 17:13:31 +02:00
Matthias Alphart
f2e42eafc7 Update xknx to 3.1.0 and fix climate read only mode (#123776) 2024-08-16 17:13:28 +02:00
Allen Porter
63f28ae2fe Bump python-nest-sdm to 4.0.6 (#123762) 2024-08-16 17:13:25 +02:00
Ian
5b6c6141c5 Bump py-nextbusnext to 2.0.4 (#123750) 2024-08-16 17:13:22 +02:00
Michael
396ef7a642 Fix error message in html5 (#123749) 2024-08-16 17:13:19 +02:00
Franck Nijhof
17f59a5665 Update wled to 0.20.2 (#123746) 2024-08-16 17:13:16 +02:00
David F. Mulcahey
10846dc97b Bump ZHA lib to 0.0.31 (#123743) 2024-08-16 17:13:13 +02:00
Álvaro Fernández Rojas
17bb00727d Update aioqsw to v0.4.1 (#123721) 2024-08-16 17:13:10 +02:00
Álvaro Fernández Rojas
bc021dbbc6 Update aioairzone-cloud to v0.6.2 (#123719) 2024-08-16 17:13:06 +02:00
Álvaro Fernández Rojas
e3cb9c0844 Update AEMET-OpenData to v0.5.4 (#123716) 2024-08-16 17:13:03 +02:00
David Knowles
050e2c9404 Bump pyschlage to 2024.8.0 (#123714) 2024-08-16 17:13:00 +02:00
Cyrill Raccaud
5ea447ba48 Fix startup block from Swiss public transport (#123704) 2024-08-16 17:12:57 +02:00
J. Nick Koston
a23b063922 Bump aiohomekit to 3.2.2 (#123669) 2024-08-16 17:12:53 +02:00
Aidan Timson
c269d57259 System Bridge package updates (#123657) 2024-08-16 17:12:50 +02:00
kingy444
d512f327c5 Bump pydaikin to 2.13.4 (#123623)
* bump pydaikin to 2.13.3

* bump pydaikin to 2.13.4
2024-08-16 17:12:46 +02:00
Maciej Bieniek
9bf8c5a54b Bump aioshelly to version 11.2.0 (#123602)
Bump aioshelly to version 11.2.0
2024-08-16 17:12:43 +02:00
J. Nick Koston
725e2f16f5 Ensure HomeKit connection is kept alive for devices that timeout too quickly (#123601) 2024-08-16 17:12:21 +02:00
G Johansson
d98d0cdad0 Change WoL to be secondary on device info (#123591) 2024-08-16 17:07:24 +02:00
Noah Husby
e2f4aa893f Fix secondary russound controller discovery failure (#123590) 2024-08-16 17:07:21 +02:00
Matthias Alphart
6b81fa89d3 Update knx-frontend to 2024.8.9.225351 (#123557) 2024-08-16 17:07:18 +02:00
J. Nick Koston
c886587915 Bump aiohttp to 3.10.3 (#123549) 2024-08-16 17:07:15 +02:00
Phill (pssc)
059d3eed98 Handle Yamaha ValueError (#123547)
* fix yamaha remove info logging

* ruff

* fix yamnaha supress rxv.find UnicodeDecodeError

* fix formatting

* make more realistic

* make more realistic and use parms

* add value error after more feedback

* ruff format

* Update homeassistant/components/yamaha/media_player.py

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>

* remove unused method

* add more debugging

* Increase discovery timeout add more debug allow config to overrite dicovery for name

---------

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2024-08-16 17:07:12 +02:00
Erik Montnemery
f9ae2b4453 Drop violating rows before adding foreign constraints in DB schema 44 migration (#123454)
* Drop violating rows before adding foreign constraints

* Don't delete rows with null-references

* Only delete rows when integrityerror is caught

* Move restore of dropped foreign key constraints to a separate migration step

* Use aliases for tables

* Update homeassistant/components/recorder/migration.py

* Update test

* Don't use alias for table we're deleting from, improve test

* Fix MySQL

* Update instead of deleting in case of self references

* Improve log messages

* Batch updates

* Add workaround for unsupported LIMIT in PostgreSQL

* Simplify

---------

Co-authored-by: J. Nick Koston <nick@koston.org>
2024-08-16 17:07:09 +02:00
ilan
742c7ba23f Fix Madvr sensor values on startup (#122479)
* fix: add startup values

* fix: update snap

* fix: use native value to show None
2024-08-16 17:07:06 +02:00
wittypluck
e7ae5c5c24 Avoid Exception on Glances missing key (#114628)
* Handle case of sensors removed server side

* Update available state on value update

* Set uptime to None if key is missing

* Replace _attr_available by _data_valid
2024-08-16 17:07:02 +02:00
Franck Nijhof
ae4fc9504a 2024.8.1 (#123544) 2024-08-10 19:32:02 +02:00
Franck Nijhof
2ef337ec2e Bump version to 2024.8.1 2024-08-10 18:41:57 +02:00
cnico
723b7bd532 Upgrade chacon_dio_api to version 1.2.0 (#123528)
Upgrade api version 1.2.0 with the first user feedback improvement
2024-08-10 18:41:39 +02:00
Joost Lekkerkerker
4fdb11b0d8 Bump AirGradient to 0.8.0 (#123527) 2024-08-10 18:41:36 +02:00
Matt Way
fe2e6c37f4 Bump pydaikin to 2.13.2 (#123519) 2024-08-10 18:41:32 +02:00
Michael
4a75c55a8f Fix cleanup of old orphan device entries in AVM Fritz!Tools (#123516)
fix cleanup of old orphan device entries
2024-08-10 18:41:29 +02:00
Duco Sebel
dfb59469cf Bumb python-homewizard-energy to 6.2.0 (#123514) 2024-08-10 18:41:26 +02:00
David F. Mulcahey
bdb2e1e2e9 Bump zha lib to 0.0.30 (#123499) 2024-08-10 18:41:22 +02:00
Franck Nijhof
c4f6f1e3d8 Update frontend to 20240809.0 (#123485) 2024-08-10 18:41:19 +02:00
Louis Christ
fb3eae54ea Fix startup blocked by bluesound integration (#123483) 2024-08-10 18:41:16 +02:00
Jake Martin
d3f8fce788 Bump monzopy to 1.3.2 (#123480) 2024-08-10 18:41:13 +02:00
Steve Easley
44e58a8c87 Bump pyjvcprojector to 1.0.12 to fix blocking call (#123473) 2024-08-10 18:41:09 +02:00
puddly
3d3879b0db Bump ZHA library to 0.0.29 (#123464)
* Bump zha to 0.0.29

* Pass the Core timezone to ZHA

* Add a unit test
2024-08-10 18:41:06 +02:00
Franck Nijhof
a8b1eb34f3 Support action YAML syntax in old-style notify groups (#123457) 2024-08-10 18:41:03 +02:00
Matrix
fd77058def Bump YoLink API to 0.4.7 (#123441) 2024-08-10 18:41:00 +02:00
Brett Adams
b147ca6c5b Add missing logger to Tessie (#123413) 2024-08-10 18:40:57 +02:00
dupondje
670c4cacfa Also migrate dsmr entries for devices with correct serial (#123407)
dsmr: also migrate entries for devices with correct serial

When the dsmr code could not find the serial_nr for the gas meter,
it creates the gas meter device with the entry_id as identifier.

But when there is a correct serial_nr, it will use that as identifier
for the dsmr gas device.

Now the migration code did not take this into account, so migration to
the new name failed since it didn't look for the device with correct
serial_nr.

This commit fixes this and adds a test for this.
2024-08-10 18:40:53 +02:00
J. Nick Koston
1ed0a89303 Bump aiohttp to 3.10.2 (#123394) 2024-08-10 18:40:50 +02:00
J. Nick Koston
ab0597da7b Ensure legacy event foreign key is removed from the states table when a previous rebuild failed (#123388)
* Ensure legacy event foreign key is removed from the states table

If the system ran out of disk space removing the FK, it would
fail. #121938 fixed that to try again, however that PR was made
ineffective by #122069 since it will never reach the check.

To solve this, the migration version is incremented to 2, and
the migration is no longer marked as done unless the rebuild
/fk removal is successful.

* fix logic for mysql

* fix test

* asserts

* coverage

* coverage

* narrow test

* fixes

* split tests

* should have skipped

* fixture must be used
2024-08-10 18:40:47 +02:00
Erik Montnemery
a3db6bc8fa Revert "Fix blocking I/O while validating config schema" (#123377) 2024-08-10 18:40:44 +02:00
Noah Husby
9bfc8f6e27 Bump aiorussound to 2.2.2 (#123319) 2024-08-10 18:40:41 +02:00
J. Nick Koston
6fddef2dc5 Fix doorbird with externally added events (#123313) 2024-08-10 18:40:38 +02:00
fustom
ec08a85aa0 Fix limit and order property for transmission integration (#123305) 2024-08-10 18:40:35 +02:00
Evgeny
de7af575c5 Bump OpenWeatherMap to 0.1.1 (#120178)
* add owm modes

* fix tests

* fix modes

* remove sensors

* Update homeassistant/components/openweathermap/sensor.py

Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>

---------

Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2024-08-10 18:40:32 +02:00
Tom Brien
d3831bae4e Add support for v3 Coinbase API (#116345)
* Add support for v3 Coinbase API

* Add deps

* Move tests
2024-08-10 18:40:28 +02:00
Franck Nijhof
86722ba05e 2024.8.0 (#123276) 2024-08-07 20:20:43 +02:00
Franck Nijhof
be4810731a Bump version to 2024.8.0 2024-08-07 19:04:33 +02:00
Franck Nijhof
ac6abb363c Bump version to 2024.8.0b9 2024-08-07 18:24:15 +02:00
Michael Hansen
5367886732 Bump intents to 2024.8.7 (#123295) 2024-08-07 18:24:08 +02:00
Stefan Agner
7a51d4ff62 Drop Matter Microwave Oven Mode select entity (#123294) 2024-08-07 18:24:05 +02:00
ashalita
ef564c537d Revert "Upgrade pycoolmasternet-async to 0.2.0" (#123286) 2024-08-07 18:24:02 +02:00
Franck Nijhof
082290b092 Bump version to 2024.8.0b8 2024-08-07 13:15:23 +02:00
Franck Nijhof
4a212791a2 Update wled to 0.20.1 (#123283) 2024-08-07 13:15:12 +02:00
Brett Adams
6bb55ce79e Add missing application credential to Tesla Fleet (#123271)
Co-authored-by: Franck Nijhof <frenck@frenck.nl>
2024-08-07 13:15:04 +02:00
Franck Nijhof
782ff12e6e Bump version to 2024.8.0b7 2024-08-07 11:26:03 +02:00
lunmay
af6f78a784 Fix typo on one of islamic_prayer_times calculation_method option (#123281) 2024-08-07 11:25:55 +02:00
Paulus Schoutsen
db32460f3b Reload conversation entries on update (#123279) 2024-08-07 11:25:52 +02:00
Erwin Douna
270990fe39 Tado change repair issue (#123256) 2024-08-07 11:25:48 +02:00
Franck Nijhof
a10fed9d72 Bump version to 2024.8.0b6 2024-08-07 10:22:39 +02:00
tronikos
cc5699bf08 Fix Google Cloud TTS not respecting config values (#123275) 2024-08-07 10:22:30 +02:00
Jesse Hills
ad674a1c2b Update ESPHome voice assistant pipeline log warning (#123269) 2024-08-07 10:22:27 +02:00
J. Nick Koston
b0269faae4 Allow non-admins to subscribe to newer registry update events (#123267) 2024-08-07 10:22:24 +02:00
starkillerOG
1143efedc5 Bump reolink-aio to 0.9.7 (#123263) 2024-08-07 10:22:21 +02:00
Matthias Alphart
9e75b63925 Update knx-frontend to 2024.8.6.211307 (#123261) 2024-08-07 10:22:18 +02:00
puddly
940327dccf Bump ZHA to 0.0.28 (#123259)
* Bump ZHA to 0.0.28

* Drop redundant radio schema conversion
2024-08-07 10:22:14 +02:00
Steve Repsher
0270026f7c Adapt static resource handler to aiohttp 3.10 (#123166) 2024-08-07 10:22:11 +02:00
Franck Nijhof
b636096ac3 Bump version to 2024.8.0b5 2024-08-06 18:08:19 +02:00
Franck Nijhof
a243ed5b23 Update frontend to 20240806.1 (#123252) 2024-08-06 18:07:49 +02:00
Joost Lekkerkerker
3cf3780587 Bump mficlient to 0.5.0 (#123250) 2024-08-06 18:06:50 +02:00
Robert Resch
3d0a0cf376 Bump deebot-client to 8.3.0 (#123249) 2024-08-06 18:05:00 +02:00
J. Nick Koston
7aae9d9ad3 Fix sense doing blocking I/O in the event loop (#123247) 2024-08-06 18:04:57 +02:00
Franck Nijhof
870bb7efd4 Mark FFmpeg integration as system type (#123241) 2024-08-06 18:04:53 +02:00
Robert Resch
35a6679ae9 Delete mobile_app cloudhook if not logged into the cloud (#123234) 2024-08-06 18:04:49 +02:00
Yehazkel
a09d0117b1 Fix Tami4 device name is None (#123156)
Co-authored-by: Robert Resch <robert@resch.dev>
2024-08-06 18:04:44 +02:00
Franck Nijhof
e9fe98f7f9 Bump version to 2024.8.0b4 2024-08-06 13:22:46 +02:00
Franck Nijhof
5b2e188b52 Mark Google Assistant integration as system type (#123233) 2024-08-06 13:22:03 +02:00
Franck Nijhof
c1953e938d Mark Alexa integration as system type (#123232) 2024-08-06 13:21:59 +02:00
Franck Nijhof
77bcbbcf53 Update frontend to 20240806.0 (#123230) 2024-08-06 12:51:24 +02:00
Joost Lekkerkerker
97587fae08 Bump yt-dlp to 2023.08.06 (#123229) 2024-08-06 12:51:21 +02:00
Matthias Alphart
01b54fe1a9 Update knx-frontend to 2024.8.6.85349 (#123226) 2024-08-06 12:51:17 +02:00
Clifford Roche
f796950493 Update greeclimate to 2.1.0 (#123210) 2024-08-06 12:51:14 +02:00
flopp999
495fd946bc Fix growatt server tlx battery api key (#123191) 2024-08-06 12:51:10 +02:00
Jesse Hills
6af1e25d7e Show project version as sw_version in ESPHome (#123183) 2024-08-06 12:51:07 +02:00
Jesse Hills
6d47a4d7e4 Add support for ESPHome update entities to be checked on demand (#123161) 2024-08-06 12:51:04 +02:00
Petro31
fd5533d719 Fix yamaha legacy receivers (#122985) 2024-08-06 12:50:59 +02:00
197 changed files with 3099 additions and 882 deletions

View File

@@ -18,9 +18,12 @@ from homeassistant.const import (
EVENT_THEMES_UPDATED,
)
from homeassistant.helpers.area_registry import EVENT_AREA_REGISTRY_UPDATED
from homeassistant.helpers.category_registry import EVENT_CATEGORY_REGISTRY_UPDATED
from homeassistant.helpers.device_registry import EVENT_DEVICE_REGISTRY_UPDATED
from homeassistant.helpers.entity_registry import EVENT_ENTITY_REGISTRY_UPDATED
from homeassistant.helpers.floor_registry import EVENT_FLOOR_REGISTRY_UPDATED
from homeassistant.helpers.issue_registry import EVENT_REPAIRS_ISSUE_REGISTRY_UPDATED
from homeassistant.helpers.label_registry import EVENT_LABEL_REGISTRY_UPDATED
from homeassistant.util.event_type import EventType
# These are events that do not contain any sensitive data
@@ -41,4 +44,7 @@ SUBSCRIBE_ALLOWLIST: Final[set[EventType[Any] | str]] = {
EVENT_SHOPPING_LIST_UPDATED,
EVENT_STATE_CHANGED,
EVENT_THEMES_UPDATED,
EVENT_LABEL_REGISTRY_UPDATED,
EVENT_CATEGORY_REGISTRY_UPDATED,
EVENT_FLOOR_REGISTRY_UPDATED,
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/aemet",
"iot_class": "cloud_polling",
"loggers": ["aemet_opendata"],
"requirements": ["AEMET-OpenData==0.5.3"]
"requirements": ["AEMET-OpenData==0.5.4"]
}

View File

@@ -92,7 +92,9 @@ class AirGradientConfigFlow(ConfigFlow, domain=DOMAIN):
except AirGradientError:
errors["base"] = "cannot_connect"
else:
await self.async_set_unique_id(current_measures.serial_number)
await self.async_set_unique_id(
current_measures.serial_number, raise_on_progress=False
)
self._abort_if_unique_id_configured()
await self.set_configuration_source()
return self.async_create_entry(

View File

@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/airgradient",
"integration_type": "device",
"iot_class": "local_polling",
"requirements": ["airgradient==0.7.1"],
"requirements": ["airgradient==0.8.0"],
"zeroconf": ["_airgradient._tcp.local."]
}

View File

@@ -114,7 +114,7 @@ class AirZoneConfigFlow(ConfigFlow, domain=DOMAIN):
)
try:
await airzone.get_version()
except AirzoneError as err:
except (AirzoneError, TimeoutError) as err:
raise AbortFlow("cannot_connect") from err
return await self.async_step_discovered_connection()

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
"iot_class": "cloud_push",
"loggers": ["aioairzone_cloud"],
"requirements": ["aioairzone-cloud==0.6.1"]
"requirements": ["aioairzone-cloud==0.6.2"]
}

View File

@@ -661,9 +661,12 @@ class RemoteCapabilities(AlexaEntity):
def interfaces(self) -> Generator[AlexaCapability]:
"""Yield the supported interfaces."""
yield AlexaPowerController(self.entity)
yield AlexaModeController(
self.entity, instance=f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}"
)
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
activities = self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST) or []
if activities and supported & remote.RemoteEntityFeature.ACTIVITY:
yield AlexaModeController(
self.entity, instance=f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}"
)
yield AlexaEndpointHealth(self.hass, self.entity)
yield Alexa(self.entity)

View File

@@ -5,5 +5,6 @@
"codeowners": ["@home-assistant/cloud", "@ochlocracy", "@jbouwh"],
"dependencies": ["http"],
"documentation": "https://www.home-assistant.io/integrations/alexa",
"integration_type": "system",
"iot_class": "cloud_push"
}

View File

@@ -8,8 +8,8 @@ from typing import Any
import aiohttp
import voluptuous as vol
from yalexs.authenticator import ValidationResult
from yalexs.const import BRANDS, DEFAULT_BRAND
from yalexs.authenticator_common import ValidationResult
from yalexs.const import BRANDS_WITHOUT_OAUTH, DEFAULT_BRAND
from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
@@ -118,7 +118,7 @@ class AugustConfigFlow(ConfigFlow, domain=DOMAIN):
vol.Required(
CONF_BRAND,
default=self._user_auth_details.get(CONF_BRAND, DEFAULT_BRAND),
): vol.In(BRANDS),
): vol.In(BRANDS_WITHOUT_OAUTH),
vol.Required(
CONF_LOGIN_METHOD,
default=self._user_auth_details.get(
@@ -208,7 +208,7 @@ class AugustConfigFlow(ConfigFlow, domain=DOMAIN):
vol.Required(
CONF_BRAND,
default=self._user_auth_details.get(CONF_BRAND, DEFAULT_BRAND),
): vol.In(BRANDS),
): vol.In(BRANDS_WITHOUT_OAUTH),
vol.Required(CONF_PASSWORD): str,
}
),

View File

@@ -28,5 +28,5 @@
"documentation": "https://www.home-assistant.io/integrations/august",
"iot_class": "cloud_push",
"loggers": ["pubnub", "yalexs"],
"requirements": ["yalexs==6.4.3", "yalexs-ble==2.4.3"]
"requirements": ["yalexs==8.4.1", "yalexs-ble==2.4.3"]
}

View File

@@ -244,7 +244,6 @@ class BluesoundPlayer(MediaPlayerEntity):
self._status: Status | None = None
self._inputs: list[Input] = []
self._presets: list[Preset] = []
self._is_online = False
self._muted = False
self._master: BluesoundPlayer | None = None
self._is_master = False
@@ -312,26 +311,33 @@ class BluesoundPlayer(MediaPlayerEntity):
async def _start_poll_command(self):
"""Loop which polls the status of the player."""
try:
while True:
while True:
try:
await self.async_update_status()
except (TimeoutError, ClientError):
_LOGGER.error("Node %s:%s is offline, retrying later", self.name, self.port)
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT)
self.start_polling()
except CancelledError:
_LOGGER.debug("Stopping the polling of node %s:%s", self.name, self.port)
except Exception:
_LOGGER.exception("Unexpected error in %s:%s", self.name, self.port)
raise
except (TimeoutError, ClientError):
_LOGGER.error(
"Node %s:%s is offline, retrying later", self.host, self.port
)
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT)
except CancelledError:
_LOGGER.debug(
"Stopping the polling of node %s:%s", self.host, self.port
)
return
except Exception:
_LOGGER.exception(
"Unexpected error in %s:%s, retrying later", self.host, self.port
)
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT)
async def async_added_to_hass(self) -> None:
"""Start the polling task."""
await super().async_added_to_hass()
self._polling_task = self.hass.async_create_task(self._start_poll_command())
self._polling_task = self.hass.async_create_background_task(
self._start_poll_command(),
name=f"bluesound.polling_{self.host}:{self.port}",
)
async def async_will_remove_from_hass(self) -> None:
"""Stop the polling task."""
@@ -345,7 +351,7 @@ class BluesoundPlayer(MediaPlayerEntity):
async def async_update(self) -> None:
"""Update internal status of the entity."""
if not self._is_online:
if not self.available:
return
with suppress(TimeoutError):
@@ -362,7 +368,7 @@ class BluesoundPlayer(MediaPlayerEntity):
try:
status = await self._player.status(etag=etag, poll_timeout=120, timeout=125)
self._is_online = True
self._attr_available = True
self._last_status_update = dt_util.utcnow()
self._status = status
@@ -391,7 +397,7 @@ class BluesoundPlayer(MediaPlayerEntity):
self.async_write_ha_state()
except (TimeoutError, ClientError):
self._is_online = False
self._attr_available = False
self._last_status_update = None
self._status = None
self.async_write_ha_state()

View File

@@ -16,7 +16,7 @@
"requirements": [
"bleak==0.22.2",
"bleak-retry-connector==3.5.0",
"bluetooth-adapters==0.19.3",
"bluetooth-adapters==0.19.4",
"bluetooth-auto-recovery==1.4.2",
"bluetooth-data-tools==1.19.4",
"dbus-fast==2.22.1",

View File

@@ -7,7 +7,7 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["pybravia"],
"requirements": ["pybravia==0.3.3"],
"requirements": ["pybravia==0.3.4"],
"ssdp": [
{
"st": "urn:schemas-sony-com:service:ScalarWebAPI:1",

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/chacon_dio",
"iot_class": "cloud_push",
"loggers": ["dio_chacon_api"],
"requirements": ["dio-chacon-wifi-api==1.1.0"]
"requirements": ["dio-chacon-wifi-api==1.2.0"]
}

View File

@@ -5,7 +5,9 @@ from __future__ import annotations
from datetime import timedelta
import logging
from coinbase.wallet.client import Client
from coinbase.rest import RESTClient
from coinbase.rest.rest_base import HTTPError
from coinbase.wallet.client import Client as LegacyClient
from coinbase.wallet.error import AuthenticationError
from homeassistant.config_entries import ConfigEntry
@@ -15,8 +17,23 @@ from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.util import Throttle
from .const import (
ACCOUNT_IS_VAULT,
API_ACCOUNT_AMOUNT,
API_ACCOUNT_AVALIABLE,
API_ACCOUNT_BALANCE,
API_ACCOUNT_CURRENCY,
API_ACCOUNT_CURRENCY_CODE,
API_ACCOUNT_HOLD,
API_ACCOUNT_ID,
API_ACCOUNTS_DATA,
API_ACCOUNT_NAME,
API_ACCOUNT_VALUE,
API_ACCOUNTS,
API_DATA,
API_RATES_CURRENCY,
API_RESOURCE_TYPE,
API_TYPE_VAULT,
API_V3_ACCOUNT_ID,
API_V3_TYPE_VAULT,
CONF_CURRENCIES,
CONF_EXCHANGE_BASE,
CONF_EXCHANGE_RATES,
@@ -59,9 +76,16 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
def create_and_update_instance(entry: ConfigEntry) -> CoinbaseData:
"""Create and update a Coinbase Data instance."""
client = Client(entry.data[CONF_API_KEY], entry.data[CONF_API_TOKEN])
if "organizations" not in entry.data[CONF_API_KEY]:
client = LegacyClient(entry.data[CONF_API_KEY], entry.data[CONF_API_TOKEN])
version = "v2"
else:
client = RESTClient(
api_key=entry.data[CONF_API_KEY], api_secret=entry.data[CONF_API_TOKEN]
)
version = "v3"
base_rate = entry.options.get(CONF_EXCHANGE_BASE, "USD")
instance = CoinbaseData(client, base_rate)
instance = CoinbaseData(client, base_rate, version)
instance.update()
return instance
@@ -86,42 +110,83 @@ async def update_listener(hass: HomeAssistant, config_entry: ConfigEntry) -> Non
registry.async_remove(entity.entity_id)
def get_accounts(client):
def get_accounts(client, version):
"""Handle paginated accounts."""
response = client.get_accounts()
accounts = response[API_ACCOUNTS_DATA]
next_starting_after = response.pagination.next_starting_after
while next_starting_after:
response = client.get_accounts(starting_after=next_starting_after)
accounts += response[API_ACCOUNTS_DATA]
if version == "v2":
accounts = response[API_DATA]
next_starting_after = response.pagination.next_starting_after
return accounts
while next_starting_after:
response = client.get_accounts(starting_after=next_starting_after)
accounts += response[API_DATA]
next_starting_after = response.pagination.next_starting_after
return [
{
API_ACCOUNT_ID: account[API_ACCOUNT_ID],
API_ACCOUNT_NAME: account[API_ACCOUNT_NAME],
API_ACCOUNT_CURRENCY: account[API_ACCOUNT_CURRENCY][
API_ACCOUNT_CURRENCY_CODE
],
API_ACCOUNT_AMOUNT: account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT],
ACCOUNT_IS_VAULT: account[API_RESOURCE_TYPE] == API_TYPE_VAULT,
}
for account in accounts
]
accounts = response[API_ACCOUNTS]
while response["has_next"]:
response = client.get_accounts(cursor=response["cursor"])
accounts += response["accounts"]
return [
{
API_ACCOUNT_ID: account[API_V3_ACCOUNT_ID],
API_ACCOUNT_NAME: account[API_ACCOUNT_NAME],
API_ACCOUNT_CURRENCY: account[API_ACCOUNT_CURRENCY],
API_ACCOUNT_AMOUNT: account[API_ACCOUNT_AVALIABLE][API_ACCOUNT_VALUE]
+ account[API_ACCOUNT_HOLD][API_ACCOUNT_VALUE],
ACCOUNT_IS_VAULT: account[API_RESOURCE_TYPE] == API_V3_TYPE_VAULT,
}
for account in accounts
]
class CoinbaseData:
"""Get the latest data and update the states."""
def __init__(self, client, exchange_base):
def __init__(self, client, exchange_base, version):
"""Init the coinbase data object."""
self.client = client
self.accounts = None
self.exchange_base = exchange_base
self.exchange_rates = None
self.user_id = self.client.get_current_user()[API_ACCOUNT_ID]
if version == "v2":
self.user_id = self.client.get_current_user()[API_ACCOUNT_ID]
else:
self.user_id = (
"v3_" + client.get_portfolios()["portfolios"][0][API_V3_ACCOUNT_ID]
)
self.api_version = version
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from coinbase."""
try:
self.accounts = get_accounts(self.client)
self.exchange_rates = self.client.get_exchange_rates(
currency=self.exchange_base
)
except AuthenticationError as coinbase_error:
self.accounts = get_accounts(self.client, self.api_version)
if self.api_version == "v2":
self.exchange_rates = self.client.get_exchange_rates(
currency=self.exchange_base
)
else:
self.exchange_rates = self.client.get(
"/v2/exchange-rates",
params={API_RATES_CURRENCY: self.exchange_base},
)[API_DATA]
except (AuthenticationError, HTTPError) as coinbase_error:
_LOGGER.error(
"Authentication error connecting to coinbase: %s", coinbase_error
)

View File

@@ -5,7 +5,9 @@ from __future__ import annotations
import logging
from typing import Any
from coinbase.wallet.client import Client
from coinbase.rest import RESTClient
from coinbase.rest.rest_base import HTTPError
from coinbase.wallet.client import Client as LegacyClient
from coinbase.wallet.error import AuthenticationError
import voluptuous as vol
@@ -15,18 +17,17 @@ from homeassistant.config_entries import (
ConfigFlowResult,
OptionsFlow,
)
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from . import get_accounts
from .const import (
ACCOUNT_IS_VAULT,
API_ACCOUNT_CURRENCY,
API_ACCOUNT_CURRENCY_CODE,
API_DATA,
API_RATES,
API_RESOURCE_TYPE,
API_TYPE_VAULT,
CONF_CURRENCIES,
CONF_EXCHANGE_BASE,
CONF_EXCHANGE_PRECISION,
@@ -49,8 +50,11 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
def get_user_from_client(api_key, api_token):
"""Get the user name from Coinbase API credentials."""
client = Client(api_key, api_token)
return client.get_current_user()
if "organizations" not in api_key:
client = LegacyClient(api_key, api_token)
return client.get_current_user()["name"]
client = RESTClient(api_key=api_key, api_secret=api_token)
return client.get_portfolios()["portfolios"][0]["name"]
async def validate_api(hass: HomeAssistant, data):
@@ -60,11 +64,13 @@ async def validate_api(hass: HomeAssistant, data):
user = await hass.async_add_executor_job(
get_user_from_client, data[CONF_API_KEY], data[CONF_API_TOKEN]
)
except AuthenticationError as error:
if "api key" in str(error):
except (AuthenticationError, HTTPError) as error:
if "api key" in str(error) or " 401 Client Error" in str(error):
_LOGGER.debug("Coinbase rejected API credentials due to an invalid API key")
raise InvalidKey from error
if "invalid signature" in str(error):
if "invalid signature" in str(
error
) or "'Could not deserialize key data" in str(error):
_LOGGER.debug(
"Coinbase rejected API credentials due to an invalid API secret"
)
@@ -73,8 +79,8 @@ async def validate_api(hass: HomeAssistant, data):
raise InvalidAuth from error
except ConnectionError as error:
raise CannotConnect from error
return {"title": user["name"]}
api_version = "v3" if "organizations" in data[CONF_API_KEY] else "v2"
return {"title": user, "api_version": api_version}
async def validate_options(hass: HomeAssistant, config_entry: ConfigEntry, options):
@@ -82,14 +88,20 @@ async def validate_options(hass: HomeAssistant, config_entry: ConfigEntry, optio
client = hass.data[DOMAIN][config_entry.entry_id].client
accounts = await hass.async_add_executor_job(get_accounts, client)
accounts = await hass.async_add_executor_job(
get_accounts, client, config_entry.data.get("api_version", "v2")
)
accounts_currencies = [
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]
account[API_ACCOUNT_CURRENCY]
for account in accounts
if account[API_RESOURCE_TYPE] != API_TYPE_VAULT
if not account[ACCOUNT_IS_VAULT]
]
available_rates = await hass.async_add_executor_job(client.get_exchange_rates)
if config_entry.data.get("api_version", "v2") == "v2":
available_rates = await hass.async_add_executor_job(client.get_exchange_rates)
else:
resp = await hass.async_add_executor_job(client.get, "/v2/exchange-rates")
available_rates = resp[API_DATA]
if CONF_CURRENCIES in options:
for currency in options[CONF_CURRENCIES]:
if currency not in accounts_currencies:
@@ -134,6 +146,7 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
user_input[CONF_API_VERSION] = info["api_version"]
return self.async_create_entry(title=info["title"], data=user_input)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors

View File

@@ -1,5 +1,7 @@
"""Constants used for Coinbase."""
ACCOUNT_IS_VAULT = "is_vault"
CONF_CURRENCIES = "account_balance_currencies"
CONF_EXCHANGE_BASE = "exchange_base"
CONF_EXCHANGE_RATES = "exchange_rate_currencies"
@@ -10,18 +12,25 @@ DOMAIN = "coinbase"
# Constants for data returned by Coinbase API
API_ACCOUNT_AMOUNT = "amount"
API_ACCOUNT_AVALIABLE = "available_balance"
API_ACCOUNT_BALANCE = "balance"
API_ACCOUNT_CURRENCY = "currency"
API_ACCOUNT_CURRENCY_CODE = "code"
API_ACCOUNT_HOLD = "hold"
API_ACCOUNT_ID = "id"
API_ACCOUNT_NATIVE_BALANCE = "balance"
API_ACCOUNT_NAME = "name"
API_ACCOUNTS_DATA = "data"
API_ACCOUNT_VALUE = "value"
API_ACCOUNTS = "accounts"
API_DATA = "data"
API_RATES = "rates"
API_RATES_CURRENCY = "currency"
API_RESOURCE_PATH = "resource_path"
API_RESOURCE_TYPE = "type"
API_TYPE_VAULT = "vault"
API_USD = "USD"
API_V3_ACCOUNT_ID = "uuid"
API_V3_TYPE_VAULT = "ACCOUNT_TYPE_VAULT"
WALLETS = {
"1INCH": "1INCH",

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/coinbase",
"iot_class": "cloud_polling",
"loggers": ["coinbase"],
"requirements": ["coinbase==2.1.0"]
"requirements": ["coinbase==2.1.0", "coinbase-advanced-py==1.2.2"]
}

View File

@@ -12,15 +12,12 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import CoinbaseData
from .const import (
ACCOUNT_IS_VAULT,
API_ACCOUNT_AMOUNT,
API_ACCOUNT_BALANCE,
API_ACCOUNT_CURRENCY,
API_ACCOUNT_CURRENCY_CODE,
API_ACCOUNT_ID,
API_ACCOUNT_NAME,
API_RATES,
API_RESOURCE_TYPE,
API_TYPE_VAULT,
CONF_CURRENCIES,
CONF_EXCHANGE_PRECISION,
CONF_EXCHANGE_PRECISION_DEFAULT,
@@ -31,6 +28,7 @@ from .const import (
_LOGGER = logging.getLogger(__name__)
ATTR_NATIVE_BALANCE = "Balance in native currency"
ATTR_API_VERSION = "API Version"
CURRENCY_ICONS = {
"BTC": "mdi:currency-btc",
@@ -56,9 +54,9 @@ async def async_setup_entry(
entities: list[SensorEntity] = []
provided_currencies: list[str] = [
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]
account[API_ACCOUNT_CURRENCY]
for account in instance.accounts
if account[API_RESOURCE_TYPE] != API_TYPE_VAULT
if not account[ACCOUNT_IS_VAULT]
]
desired_currencies: list[str] = []
@@ -73,6 +71,11 @@ async def async_setup_entry(
)
for currency in desired_currencies:
_LOGGER.debug(
"Attempting to set up %s account sensor with %s API",
currency,
instance.api_version,
)
if currency not in provided_currencies:
_LOGGER.warning(
(
@@ -85,12 +88,17 @@ async def async_setup_entry(
entities.append(AccountSensor(instance, currency))
if CONF_EXCHANGE_RATES in config_entry.options:
entities.extend(
ExchangeRateSensor(
instance, rate, exchange_base_currency, exchange_precision
for rate in config_entry.options[CONF_EXCHANGE_RATES]:
_LOGGER.debug(
"Attempting to set up %s account sensor with %s API",
rate,
instance.api_version,
)
entities.append(
ExchangeRateSensor(
instance, rate, exchange_base_currency, exchange_precision
)
)
for rate in config_entry.options[CONF_EXCHANGE_RATES]
)
async_add_entities(entities)
@@ -105,26 +113,21 @@ class AccountSensor(SensorEntity):
self._coinbase_data = coinbase_data
self._currency = currency
for account in coinbase_data.accounts:
if (
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE] != currency
or account[API_RESOURCE_TYPE] == API_TYPE_VAULT
):
if account[API_ACCOUNT_CURRENCY] != currency or account[ACCOUNT_IS_VAULT]:
continue
self._attr_name = f"Coinbase {account[API_ACCOUNT_NAME]}"
self._attr_unique_id = (
f"coinbase-{account[API_ACCOUNT_ID]}-wallet-"
f"{account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]}"
f"{account[API_ACCOUNT_CURRENCY]}"
)
self._attr_native_value = account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT]
self._attr_native_unit_of_measurement = account[API_ACCOUNT_CURRENCY][
API_ACCOUNT_CURRENCY_CODE
]
self._attr_native_value = account[API_ACCOUNT_AMOUNT]
self._attr_native_unit_of_measurement = account[API_ACCOUNT_CURRENCY]
self._attr_icon = CURRENCY_ICONS.get(
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE],
account[API_ACCOUNT_CURRENCY],
DEFAULT_COIN_ICON,
)
self._native_balance = round(
float(account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT])
float(account[API_ACCOUNT_AMOUNT])
/ float(coinbase_data.exchange_rates[API_RATES][currency]),
2,
)
@@ -144,21 +147,26 @@ class AccountSensor(SensorEntity):
"""Return the state attributes of the sensor."""
return {
ATTR_NATIVE_BALANCE: f"{self._native_balance} {self._coinbase_data.exchange_base}",
ATTR_API_VERSION: self._coinbase_data.api_version,
}
def update(self) -> None:
"""Get the latest state of the sensor."""
_LOGGER.debug(
"Updating %s account sensor with %s API",
self._currency,
self._coinbase_data.api_version,
)
self._coinbase_data.update()
for account in self._coinbase_data.accounts:
if (
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]
!= self._currency
or account[API_RESOURCE_TYPE] == API_TYPE_VAULT
account[API_ACCOUNT_CURRENCY] != self._currency
or account[ACCOUNT_IS_VAULT]
):
continue
self._attr_native_value = account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT]
self._attr_native_value = account[API_ACCOUNT_AMOUNT]
self._native_balance = round(
float(account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT])
float(account[API_ACCOUNT_AMOUNT])
/ float(self._coinbase_data.exchange_rates[API_RATES][self._currency]),
2,
)
@@ -202,8 +210,13 @@ class ExchangeRateSensor(SensorEntity):
def update(self) -> None:
"""Get the latest state of the sensor."""
_LOGGER.debug(
"Updating %s rate sensor with %s API",
self._currency,
self._coinbase_data.api_version,
)
self._coinbase_data.update()
self._attr_native_value = round(
1 / float(self._coinbase_data.exchange_rates.rates[self._currency]),
1 / float(self._coinbase_data.exchange_rates[API_RATES][self._currency]),
self._precision,
)

View File

@@ -1,12 +1,11 @@
"""Support for Concord232 alarm control panels."""
# mypy: ignore-errors
from __future__ import annotations
import datetime
import logging
# from concord232 import client as concord232_client
from concord232 import client as concord232_client
import requests
import voluptuous as vol

View File

@@ -1,12 +1,11 @@
"""Support for exposing Concord232 elements as sensors."""
# mypy: ignore-errors
from __future__ import annotations
import datetime
import logging
# from concord232 import client as concord232_client
from concord232 import client as concord232_client
import requests
import voluptuous as vol

View File

@@ -2,9 +2,8 @@
"domain": "concord232",
"name": "Concord232",
"codeowners": [],
"disabled": "This integration is disabled because it uses non-open source code to operate.",
"documentation": "https://www.home-assistant.io/integrations/concord232",
"iot_class": "local_polling",
"loggers": ["concord232", "stevedore"],
"requirements": ["concord232==0.15"]
"requirements": ["concord232==0.15.1"]
}

View File

@@ -1,5 +0,0 @@
extend = "../../../pyproject.toml"
lint.extend-ignore = [
"F821"
]

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["hassil==1.7.4", "home-assistant-intents==2024.7.29"]
"requirements": ["hassil==1.7.4", "home-assistant-intents==2024.8.7"]
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/coolmaster",
"iot_class": "local_polling",
"loggers": ["pycoolmasternet_async"],
"requirements": ["pycoolmasternet-async==0.2.0"]
"requirements": ["pycoolmasternet-async==0.1.5"]
}

View File

@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/daikin",
"iot_class": "local_polling",
"loggers": ["pydaikin"],
"requirements": ["pydaikin==2.13.1"],
"requirements": ["pydaikin==2.13.4"],
"zeroconf": ["_dkapi._tcp.local."]
}

View File

@@ -195,7 +195,7 @@ class ConfiguredDoorBird:
title: str | None = data.get("title")
if not title or not title.startswith("Home Assistant"):
continue
event = title.split("(")[1].strip(")")
event = title.partition("(")[2].strip(")")
if input_type := favorite_input_type.get(identifier):
events.append(DoorbirdEvent(event, input_type))
elif input_type := default_event_types.get(event):

View File

@@ -431,41 +431,42 @@ def rename_old_gas_to_mbus(
) -> None:
"""Rename old gas sensor to mbus variant."""
dev_reg = dr.async_get(hass)
device_entry_v1 = dev_reg.async_get_device(identifiers={(DOMAIN, entry.entry_id)})
if device_entry_v1 is not None:
device_id = device_entry_v1.id
for dev_id in (mbus_device_id, entry.entry_id):
device_entry_v1 = dev_reg.async_get_device(identifiers={(DOMAIN, dev_id)})
if device_entry_v1 is not None:
device_id = device_entry_v1.id
ent_reg = er.async_get(hass)
entries = er.async_entries_for_device(ent_reg, device_id)
ent_reg = er.async_get(hass)
entries = er.async_entries_for_device(ent_reg, device_id)
for entity in entries:
if entity.unique_id.endswith(
"belgium_5min_gas_meter_reading"
) or entity.unique_id.endswith("hourly_gas_meter_reading"):
try:
ent_reg.async_update_entity(
entity.entity_id,
new_unique_id=mbus_device_id,
device_id=mbus_device_id,
)
except ValueError:
LOGGER.debug(
"Skip migration of %s because it already exists",
entity.entity_id,
)
else:
LOGGER.debug(
"Migrated entity %s from unique id %s to %s",
entity.entity_id,
entity.unique_id,
mbus_device_id,
)
# Cleanup old device
dev_entities = er.async_entries_for_device(
ent_reg, device_id, include_disabled_entities=True
)
if not dev_entities:
dev_reg.async_remove_device(device_id)
for entity in entries:
if entity.unique_id.endswith(
"belgium_5min_gas_meter_reading"
) or entity.unique_id.endswith("hourly_gas_meter_reading"):
try:
ent_reg.async_update_entity(
entity.entity_id,
new_unique_id=mbus_device_id,
device_id=mbus_device_id,
)
except ValueError:
LOGGER.debug(
"Skip migration of %s because it already exists",
entity.entity_id,
)
else:
LOGGER.debug(
"Migrated entity %s from unique id %s to %s",
entity.entity_id,
entity.unique_id,
mbus_device_id,
)
# Cleanup old device
dev_entities = er.async_entries_for_device(
ent_reg, device_id, include_disabled_entities=True
)
if not dev_entities:
dev_reg.async_remove_device(device_id)
def is_supported_description(

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
from functools import partial
import logging
import ssl
from typing import Any, cast
@@ -105,11 +106,14 @@ async def _validate_input(
if not user_input.get(CONF_VERIFY_MQTT_CERTIFICATE, True) and mqtt_url:
ssl_context = get_default_no_verify_context()
mqtt_config = create_mqtt_config(
device_id=device_id,
country=country,
override_mqtt_url=mqtt_url,
ssl_context=ssl_context,
mqtt_config = await hass.async_add_executor_job(
partial(
create_mqtt_config,
device_id=device_id,
country=country,
override_mqtt_url=mqtt_url,
ssl_context=ssl_context,
)
)
client = MqttClient(mqtt_config, authenticator)

View File

@@ -3,6 +3,7 @@
from __future__ import annotations
from collections.abc import Mapping
from functools import partial
import logging
import ssl
from typing import Any
@@ -64,32 +65,28 @@ class EcovacsController:
if not config.get(CONF_VERIFY_MQTT_CERTIFICATE, True) and mqtt_url:
ssl_context = get_default_no_verify_context()
self._mqtt = MqttClient(
create_mqtt_config(
device_id=self._device_id,
country=country,
override_mqtt_url=mqtt_url,
ssl_context=ssl_context,
),
self._authenticator,
self._mqtt_config_fn = partial(
create_mqtt_config,
device_id=self._device_id,
country=country,
override_mqtt_url=mqtt_url,
ssl_context=ssl_context,
)
self._mqtt_client: MqttClient | None = None
self._added_legacy_entities: set[str] = set()
async def initialize(self) -> None:
"""Init controller."""
mqtt_config_verfied = False
try:
devices = await self._api_client.get_devices()
credentials = await self._authenticator.authenticate()
for device_config in devices:
if isinstance(device_config, DeviceInfo):
# MQTT device
if not mqtt_config_verfied:
await self._mqtt.verify_config()
mqtt_config_verfied = True
device = Device(device_config, self._authenticator)
await device.initialize(self._mqtt)
mqtt = await self._get_mqtt_client()
await device.initialize(mqtt)
self._devices.append(device)
else:
# Legacy device
@@ -116,7 +113,8 @@ class EcovacsController:
await device.teardown()
for legacy_device in self._legacy_devices:
await self._hass.async_add_executor_job(legacy_device.disconnect)
await self._mqtt.disconnect()
if self._mqtt_client is not None:
await self._mqtt_client.disconnect()
await self._authenticator.teardown()
def add_legacy_entity(self, device: VacBot, component: str) -> None:
@@ -127,6 +125,16 @@ class EcovacsController:
"""Check if legacy entity is added."""
return f"{device.vacuum['did']}_{component}" in self._added_legacy_entities
async def _get_mqtt_client(self) -> MqttClient:
"""Return validated MQTT client."""
if self._mqtt_client is None:
config = await self._hass.async_add_executor_job(self._mqtt_config_fn)
mqtt = MqttClient(config, self._authenticator)
await mqtt.verify_config()
self._mqtt_client = mqtt
return self._mqtt_client
@property
def devices(self) -> list[Device]:
"""Return devices."""

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
"iot_class": "cloud_push",
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
"requirements": ["py-sucks==0.9.10", "deebot-client==8.2.0"]
"requirements": ["py-sucks==0.9.10", "deebot-client==8.3.0"]
}

View File

@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["openwebif"],
"requirements": ["openwebifpy==4.2.5"]
"requirements": ["openwebifpy==4.2.7"]
}

View File

@@ -2,6 +2,8 @@
from __future__ import annotations
from typing import Any
from homeassistant.components.weather import (
ATTR_CONDITION_CLEAR_NIGHT,
ATTR_CONDITION_CLOUDY,
@@ -190,10 +192,12 @@ def get_forecast(ec_data, hourly) -> list[Forecast] | None:
if not (half_days := ec_data.daily_forecasts):
return None
def get_day_forecast(fcst: list[dict[str, str]]) -> Forecast:
def get_day_forecast(
fcst: list[dict[str, Any]],
) -> Forecast:
high_temp = int(fcst[0]["temperature"]) if len(fcst) == 2 else None
return {
ATTR_FORECAST_TIME: fcst[0]["timestamp"],
ATTR_FORECAST_TIME: fcst[0]["timestamp"].isoformat(),
ATTR_FORECAST_NATIVE_TEMP: high_temp,
ATTR_FORECAST_NATIVE_TEMP_LOW: int(fcst[-1]["temperature"]),
ATTR_FORECAST_PRECIPITATION_PROBABILITY: int(

View File

@@ -346,7 +346,7 @@ class ESPHomeManager:
) -> int | None:
"""Start a voice assistant pipeline."""
if self.voice_assistant_pipeline is not None:
_LOGGER.warning("Voice assistant UDP server was not stopped")
_LOGGER.warning("Previous Voice assistant pipeline was not stopped")
self.voice_assistant_pipeline.stop()
self.voice_assistant_pipeline = None
@@ -654,12 +654,13 @@ def _async_setup_device_registry(
if device_info.manufacturer:
manufacturer = device_info.manufacturer
model = device_info.model
hw_version = None
if device_info.project_name:
project_name = device_info.project_name.split(".")
manufacturer = project_name[0]
model = project_name[1]
hw_version = device_info.project_version
sw_version = (
f"{device_info.project_version} (ESPHome {device_info.esphome_version})"
)
suggested_area = None
if device_info.suggested_area:
@@ -674,7 +675,6 @@ def _async_setup_device_registry(
manufacturer=manufacturer,
model=model,
sw_version=sw_version,
hw_version=hw_version,
suggested_area=suggested_area,
)
return device_entry.id

View File

@@ -17,7 +17,7 @@
"mqtt": ["esphome/discover/#"],
"quality_scale": "platinum",
"requirements": [
"aioesphomeapi==24.6.2",
"aioesphomeapi==25.0.0",
"esphome-dashboard-api==1.2.3",
"bleak-esphome==1.0.0"
],

View File

@@ -8,6 +8,7 @@ from typing import Any
from aioesphomeapi import (
DeviceInfo as ESPHomeDeviceInfo,
EntityInfo,
UpdateCommand,
UpdateInfo,
UpdateState,
)
@@ -259,9 +260,15 @@ class ESPHomeUpdateEntity(EsphomeEntity[UpdateInfo, UpdateState], UpdateEntity):
"""Return the title of the update."""
return self._state.title
@convert_api_error_ha_error
async def async_update(self) -> None:
"""Command device to check for update."""
if self.available:
self._client.update_command(key=self._key, command=UpdateCommand.CHECK)
@convert_api_error_ha_error
async def async_install(
self, version: str | None, backup: bool, **kwargs: Any
) -> None:
"""Update the current value."""
self._client.update_command(key=self._key, install=True)
"""Command device to install update."""
self._client.update_command(key=self._key, command=UpdateCommand.INSTALL)

View File

@@ -3,5 +3,6 @@
"name": "FFmpeg",
"codeowners": [],
"documentation": "https://www.home-assistant.io/integrations/ffmpeg",
"integration_type": "system",
"requirements": ["ha-ffmpeg==3.2.0"]
}

View File

@@ -653,8 +653,6 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
entities: list[er.RegistryEntry] = er.async_entries_for_config_entry(
entity_reg, config_entry.entry_id
)
orphan_macs: set[str] = set()
for entity in entities:
entry_mac = entity.unique_id.split("_")[0]
if (
@@ -662,17 +660,16 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
or "_internet_access" in entity.unique_id
) and entry_mac not in device_hosts:
_LOGGER.info("Removing orphan entity entry %s", entity.entity_id)
orphan_macs.add(entry_mac)
entity_reg.async_remove(entity.entity_id)
device_reg = dr.async_get(self.hass)
orphan_connections = {
(CONNECTION_NETWORK_MAC, dr.format_mac(mac)) for mac in orphan_macs
valid_connections = {
(CONNECTION_NETWORK_MAC, dr.format_mac(mac)) for mac in device_hosts
}
for device in dr.async_entries_for_config_entry(
device_reg, config_entry.entry_id
):
if any(con in device.connections for con in orphan_connections):
if not any(con in device.connections for con in valid_connections):
_LOGGER.debug("Removing obsolete device entry %s", device.name)
device_reg.async_update_device(
device.id, remove_config_entry_id=config_entry.entry_id

View File

@@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20240805.1"]
"requirements": ["home-assistant-frontend==20240809.0"]
}

View File

@@ -45,15 +45,13 @@ class GlancesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
except exceptions.GlancesApiError as err:
raise UpdateFailed from err
# Update computed values
uptime: datetime | None = self.data["computed"]["uptime"] if self.data else None
uptime: datetime | None = None
up_duration: timedelta | None = None
if up_duration := parse_duration(data.get("uptime")):
if "uptime" in data and (up_duration := parse_duration(data["uptime"])):
uptime = self.data["computed"]["uptime"] if self.data else None
# Update uptime if previous value is None or previous uptime is bigger than
# new uptime (i.e. server restarted)
if (
self.data is None
or self.data["computed"]["uptime_duration"] > up_duration
):
if uptime is None or self.data["computed"]["uptime_duration"] > up_duration:
uptime = utcnow() - up_duration
data["computed"] = {"uptime_duration": up_duration, "uptime": uptime}
return data or {}

View File

@@ -325,6 +325,7 @@ class GlancesSensor(CoordinatorEntity[GlancesDataUpdateCoordinator], SensorEntit
entity_description: GlancesSensorEntityDescription
_attr_has_entity_name = True
_data_valid: bool = False
def __init__(
self,
@@ -351,14 +352,7 @@ class GlancesSensor(CoordinatorEntity[GlancesDataUpdateCoordinator], SensorEntit
@property
def available(self) -> bool:
"""Set sensor unavailable when native value is invalid."""
if super().available:
return (
not self._numeric_state_expected
or isinstance(value := self.native_value, (int, float))
or isinstance(value, str)
and value.isnumeric()
)
return False
return super().available and self._data_valid
@callback
def _handle_coordinator_update(self) -> None:
@@ -368,10 +362,19 @@ class GlancesSensor(CoordinatorEntity[GlancesDataUpdateCoordinator], SensorEntit
def _update_native_value(self) -> None:
"""Update sensor native value from coordinator data."""
data = self.coordinator.data[self.entity_description.type]
if dict_val := data.get(self._sensor_label):
data = self.coordinator.data.get(self.entity_description.type)
if data and (dict_val := data.get(self._sensor_label)):
self._attr_native_value = dict_val.get(self.entity_description.key)
elif self.entity_description.key in data:
elif data and (self.entity_description.key in data):
self._attr_native_value = data.get(self.entity_description.key)
else:
self._attr_native_value = None
self._update_data_valid()
def _update_data_valid(self) -> None:
self._data_valid = self._attr_native_value is not None and (
not self._numeric_state_expected
or isinstance(self._attr_native_value, (int, float))
or isinstance(self._attr_native_value, str)
and self._attr_native_value.isnumeric()
)

View File

@@ -5,5 +5,6 @@
"codeowners": ["@home-assistant/cloud"],
"dependencies": ["http"],
"documentation": "https://www.home-assistant.io/integrations/google_assistant",
"integration_type": "system",
"iot_class": "cloud_push"
}

View File

@@ -59,7 +59,10 @@ def tts_options_schema(
vol.Optional(
CONF_GENDER,
description={"suggested_value": config_options.get(CONF_GENDER)},
default=texttospeech.SsmlVoiceGender.NEUTRAL.name, # type: ignore[attr-defined]
default=config_options.get(
CONF_GENDER,
texttospeech.SsmlVoiceGender.NEUTRAL.name, # type: ignore[attr-defined]
),
): vol.All(
vol.Upper,
SelectSelector(
@@ -72,7 +75,7 @@ def tts_options_schema(
vol.Optional(
CONF_VOICE,
description={"suggested_value": config_options.get(CONF_VOICE)},
default=DEFAULT_VOICE,
default=config_options.get(CONF_VOICE, DEFAULT_VOICE),
): SelectSelector(
SelectSelectorConfig(
mode=SelectSelectorMode.DROPDOWN,
@@ -82,7 +85,10 @@ def tts_options_schema(
vol.Optional(
CONF_ENCODING,
description={"suggested_value": config_options.get(CONF_ENCODING)},
default=texttospeech.AudioEncoding.MP3.name, # type: ignore[attr-defined]
default=config_options.get(
CONF_ENCODING,
texttospeech.AudioEncoding.MP3.name, # type: ignore[attr-defined]
),
): vol.All(
vol.Upper,
SelectSelector(
@@ -95,22 +101,22 @@ def tts_options_schema(
vol.Optional(
CONF_SPEED,
description={"suggested_value": config_options.get(CONF_SPEED)},
default=1.0,
default=config_options.get(CONF_SPEED, 1.0),
): NumberSelector(NumberSelectorConfig(min=0.25, max=4.0, step=0.01)),
vol.Optional(
CONF_PITCH,
description={"suggested_value": config_options.get(CONF_PITCH)},
default=0,
default=config_options.get(CONF_PITCH, 0),
): NumberSelector(NumberSelectorConfig(min=-20.0, max=20.0, step=0.1)),
vol.Optional(
CONF_GAIN,
description={"suggested_value": config_options.get(CONF_GAIN)},
default=0,
default=config_options.get(CONF_GAIN, 0),
): NumberSelector(NumberSelectorConfig(min=-96.0, max=16.0, step=0.1)),
vol.Optional(
CONF_PROFILES,
description={"suggested_value": config_options.get(CONF_PROFILES)},
default=[],
default=config_options.get(CONF_PROFILES, []),
): SelectSelector(
SelectSelectorConfig(
mode=SelectSelectorMode.DROPDOWN,
@@ -132,7 +138,7 @@ def tts_options_schema(
vol.Optional(
CONF_TEXT_TYPE,
description={"suggested_value": config_options.get(CONF_TEXT_TYPE)},
default="text",
default=config_options.get(CONF_TEXT_TYPE, "text"),
): vol.All(
vol.Lower,
SelectSelector(

View File

@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/gree",
"iot_class": "local_polling",
"loggers": ["greeclimate"],
"requirements": ["greeclimate==2.0.0"]
"requirements": ["greeclimate==2.1.0"]
}

View File

@@ -22,8 +22,9 @@ from homeassistant.components.notify import (
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SERVICE,
CONF_ACTION,
CONF_ENTITIES,
CONF_SERVICE,
STATE_UNAVAILABLE,
)
from homeassistant.core import HomeAssistant, callback
@@ -36,11 +37,37 @@ from .entity import GroupEntity
CONF_SERVICES = "services"
def _backward_compat_schema(value: Any | None) -> Any:
"""Backward compatibility for notify service schemas."""
if not isinstance(value, dict):
return value
# `service` has been renamed to `action`
if CONF_SERVICE in value:
if CONF_ACTION in value:
raise vol.Invalid(
"Cannot specify both 'service' and 'action'. Please use 'action' only."
)
value[CONF_ACTION] = value.pop(CONF_SERVICE)
return value
PLATFORM_SCHEMA = NOTIFY_PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_SERVICES): vol.All(
cv.ensure_list,
[{vol.Required(ATTR_SERVICE): cv.slug, vol.Optional(ATTR_DATA): dict}],
[
vol.All(
_backward_compat_schema,
{
vol.Required(CONF_ACTION): cv.slug,
vol.Optional(ATTR_DATA): dict,
},
)
],
)
}
)
@@ -88,7 +115,7 @@ class GroupNotifyPlatform(BaseNotificationService):
tasks.append(
asyncio.create_task(
self.hass.services.async_call(
DOMAIN, entity[ATTR_SERVICE], sending_payload, blocking=True
DOMAIN, entity[CONF_ACTION], sending_payload, blocking=True
)
)
)

View File

@@ -327,14 +327,14 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
GrowattSensorEntityDescription(
key="tlx_battery_2_discharge_w",
translation_key="tlx_battery_2_discharge_w",
api_key="bdc1DischargePower",
api_key="bdc2DischargePower",
native_unit_of_measurement=UnitOfPower.WATT,
device_class=SensorDeviceClass.POWER,
),
GrowattSensorEntityDescription(
key="tlx_battery_2_discharge_total",
translation_key="tlx_battery_2_discharge_total",
api_key="bdc1DischargeTotal",
api_key="bdc2DischargeTotal",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
@@ -376,14 +376,14 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
GrowattSensorEntityDescription(
key="tlx_battery_2_charge_w",
translation_key="tlx_battery_2_charge_w",
api_key="bdc1ChargePower",
api_key="bdc2ChargePower",
native_unit_of_measurement=UnitOfPower.WATT,
device_class=SensorDeviceClass.POWER,
),
GrowattSensorEntityDescription(
key="tlx_battery_2_charge_total",
translation_key="tlx_battery_2_charge_total",
api_key="bdc1ChargeTotal",
api_key="bdc2ChargeTotal",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,

View File

@@ -49,7 +49,14 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
try:
user_response = await self.api.user.get()
tasks_response = await self.api.tasks.user.get()
tasks_response.extend(await self.api.tasks.user.get(type="completedTodos"))
tasks_response.extend(
[
{"id": task["_id"], **task}
for task in await self.api.tasks.user.get(type="completedTodos")
if task.get("_id")
]
)
except ClientResponseError as error:
raise UpdateFailed(f"Error communicating with API: {error}") from error

View File

@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/holiday",
"iot_class": "local_polling",
"requirements": ["holidays==0.53", "babel==2.15.0"]
"requirements": ["holidays==0.55", "babel==2.15.0"]
}

View File

@@ -60,8 +60,11 @@
"integration_not_found": {
"title": "Integration {domain} not found",
"fix_flow": {
"abort": {
"issue_ignored": "Not existing integration {domain} ignored."
},
"step": {
"remove_entries": {
"init": {
"title": "[%key:component::homeassistant::issues::integration_not_found::title%]",
"description": "The integration `{domain}` could not be found. This happens when a (custom) integration was removed from Home Assistant, but there are still configurations for this `integration`. Please use the buttons below to either remove the previous configurations for `{domain}` or ignore this.",
"menu_options": {

View File

@@ -22,6 +22,7 @@ from homeassistant.components import (
sensor,
)
from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN
from homeassistant.components.event import DOMAIN as EVENT_DOMAIN
from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN
from homeassistant.components.media_player import (
DOMAIN as MEDIA_PLAYER_DOMAIN,
@@ -167,9 +168,11 @@ CAMERA_SCHEMA = BASIC_INFO_SCHEMA.extend(
vol.Optional(
CONF_VIDEO_PACKET_SIZE, default=DEFAULT_VIDEO_PACKET_SIZE
): cv.positive_int,
vol.Optional(CONF_LINKED_MOTION_SENSOR): cv.entity_domain(binary_sensor.DOMAIN),
vol.Optional(CONF_LINKED_MOTION_SENSOR): cv.entity_domain(
[binary_sensor.DOMAIN, EVENT_DOMAIN]
),
vol.Optional(CONF_LINKED_DOORBELL_SENSOR): cv.entity_domain(
binary_sensor.DOMAIN
[binary_sensor.DOMAIN, EVENT_DOMAIN]
),
}
)

View File

@@ -845,21 +845,41 @@ class HKDevice:
async def async_update(self, now: datetime | None = None) -> None:
"""Poll state of all entities attached to this bridge/accessory."""
to_poll = self.pollable_characteristics
accessories = self.entity_map.accessories
if (
len(self.entity_map.accessories) == 1
len(accessories) == 1
and self.available
and not (self.pollable_characteristics - self.watchable_characteristics)
and not (to_poll - self.watchable_characteristics)
and self.pairing.is_available
and await self.pairing.controller.async_reachable(
self.unique_id, timeout=5.0
)
):
# If its a single accessory and all chars are watchable,
# we don't need to poll.
_LOGGER.debug("Accessory is reachable, skip polling: %s", self.unique_id)
return
# only poll the firmware version to keep the connection alive
# https://github.com/home-assistant/core/issues/123412
#
# Firmware revision is used here since iOS does this to keep camera
# connections alive, and the goal is to not regress
# https://github.com/home-assistant/core/issues/116143
# by polling characteristics that are not normally polled frequently
# and may not be tested by the device vendor.
#
_LOGGER.debug(
"Accessory is reachable, limiting poll to firmware version: %s",
self.unique_id,
)
first_accessory = accessories[0]
accessory_info = first_accessory.services.first(
service_type=ServicesTypes.ACCESSORY_INFORMATION
)
assert accessory_info is not None
firmware_iid = accessory_info[CharacteristicsTypes.FIRMWARE_REVISION].iid
to_poll = {(first_accessory.aid, firmware_iid)}
if not self.pollable_characteristics:
if not to_poll:
self.async_update_available_state()
_LOGGER.debug(
"HomeKit connection not polling any characteristics: %s", self.unique_id
@@ -892,9 +912,7 @@ class HKDevice:
_LOGGER.debug("Starting HomeKit device update: %s", self.unique_id)
try:
new_values_dict = await self.get_characteristics(
self.pollable_characteristics
)
new_values_dict = await self.get_characteristics(to_poll)
except AccessoryNotFoundError:
# Not only did the connection fail, but also the accessory is not
# visible on the network.

View File

@@ -14,6 +14,6 @@
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
"iot_class": "local_push",
"loggers": ["aiohomekit", "commentjson"],
"requirements": ["aiohomekit==3.2.1"],
"requirements": ["aiohomekit==3.2.3"],
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
}

View File

@@ -7,6 +7,6 @@
"iot_class": "local_polling",
"loggers": ["homewizard_energy"],
"quality_scale": "platinum",
"requirements": ["python-homewizard-energy==v6.1.1"],
"requirements": ["python-homewizard-energy==v6.2.0"],
"zeroconf": ["_hwenergy._tcp.local."]
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/homeworks",
"iot_class": "local_push",
"loggers": ["pyhomeworks"],
"requirements": ["pyhomeworks==1.1.0"]
"requirements": ["pyhomeworks==1.1.2"]
}

View File

@@ -533,7 +533,7 @@ class HTML5NotificationService(BaseNotificationService):
elif response.status_code > 399:
_LOGGER.error(
"There was an issue sending the notification %s: %s",
response.status,
response.status_code,
response.text,
)

View File

@@ -3,81 +3,46 @@
from __future__ import annotations
from collections.abc import Mapping
import mimetypes
from pathlib import Path
from typing import Final
from aiohttp import hdrs
from aiohttp.hdrs import CACHE_CONTROL, CONTENT_TYPE
from aiohttp.web import FileResponse, Request, StreamResponse
from aiohttp.web_exceptions import HTTPForbidden, HTTPNotFound
from aiohttp.web_fileresponse import CONTENT_TYPES, FALLBACK_CONTENT_TYPE
from aiohttp.web_urldispatcher import StaticResource
from lru import LRU
from .const import KEY_HASS
CACHE_TIME: Final = 31 * 86400 # = 1 month
CACHE_HEADER = f"public, max-age={CACHE_TIME}"
CACHE_HEADERS: Mapping[str, str] = {hdrs.CACHE_CONTROL: CACHE_HEADER}
PATH_CACHE: LRU[tuple[str, Path], tuple[Path | None, str | None]] = LRU(512)
def _get_file_path(rel_url: str, directory: Path) -> Path | None:
"""Return the path to file on disk or None."""
filename = Path(rel_url)
if filename.anchor:
# rel_url is an absolute name like
# /static/\\machine_name\c$ or /static/D:\path
# where the static dir is totally different
raise HTTPForbidden
filepath: Path = directory.joinpath(filename).resolve()
filepath.relative_to(directory)
# on opening a dir, load its contents if allowed
if filepath.is_dir():
return None
if filepath.is_file():
return filepath
raise FileNotFoundError
CACHE_HEADERS: Mapping[str, str] = {CACHE_CONTROL: CACHE_HEADER}
RESPONSE_CACHE: LRU[tuple[str, Path], tuple[Path, str]] = LRU(512)
class CachingStaticResource(StaticResource):
"""Static Resource handler that will add cache headers."""
async def _handle(self, request: Request) -> StreamResponse:
"""Return requested file from disk as a FileResponse."""
"""Wrap base handler to cache file path resolution and content type guess."""
rel_url = request.match_info["filename"]
key = (rel_url, self._directory)
if (filepath_content_type := PATH_CACHE.get(key)) is None:
hass = request.app[KEY_HASS]
try:
filepath = await hass.async_add_executor_job(_get_file_path, *key)
except (ValueError, FileNotFoundError) as error:
# relatively safe
raise HTTPNotFound from error
except HTTPForbidden:
# forbidden
raise
except Exception as error:
# perm error or other kind!
request.app.logger.exception("Unexpected exception")
raise HTTPNotFound from error
response: StreamResponse
content_type: str | None = None
if filepath is not None:
content_type = (mimetypes.guess_type(rel_url))[
0
] or "application/octet-stream"
PATH_CACHE[key] = (filepath, content_type)
if key in RESPONSE_CACHE:
file_path, content_type = RESPONSE_CACHE[key]
response = FileResponse(file_path, chunk_size=self._chunk_size)
response.headers[CONTENT_TYPE] = content_type
else:
filepath, content_type = filepath_content_type
if filepath and content_type:
return FileResponse(
filepath,
chunk_size=self._chunk_size,
headers={
hdrs.CACHE_CONTROL: CACHE_HEADER,
hdrs.CONTENT_TYPE: content_type,
},
response = await super()._handle(request)
if not isinstance(response, FileResponse):
# Must be directory index; ignore caching
return response
file_path = response._path # noqa: SLF001
response.content_type = (
CONTENT_TYPES.guess_type(file_path)[0] or FALLBACK_CONTENT_TYPE
)
# Cache actual header after setter construction.
content_type = response.headers[CONTENT_TYPE]
RESPONSE_CACHE[key] = (file_path, content_type)
raise HTTPForbidden if filepath is None else HTTPNotFound
response.headers[CACHE_CONTROL] = CACHE_HEADER
return response

View File

@@ -11,6 +11,6 @@
"iot_class": "local_push",
"loggers": ["aiohue"],
"quality_scale": "platinum",
"requirements": ["aiohue==4.7.2"],
"requirements": ["aiohue==4.7.3"],
"zeroconf": ["_hue._tcp.local."]
}

View File

@@ -80,9 +80,9 @@ async def async_setup_hue_events(bridge: HueBridge):
CONF_DEVICE_ID: device.id, # type: ignore[union-attr]
CONF_UNIQUE_ID: hue_resource.id,
CONF_TYPE: hue_resource.relative_rotary.rotary_report.action.value,
CONF_SUBTYPE: hue_resource.relative_rotary.last_event.rotation.direction.value,
CONF_DURATION: hue_resource.relative_rotary.last_event.rotation.duration,
CONF_STEPS: hue_resource.relative_rotary.last_event.rotation.steps,
CONF_SUBTYPE: hue_resource.relative_rotary.rotary_report.rotation.direction.value,
CONF_DURATION: hue_resource.relative_rotary.rotary_report.rotation.duration,
CONF_STEPS: hue_resource.relative_rotary.rotary_report.rotation.steps,
}
hass.bus.async_fire(ATTR_HUE_EVENT, data)

View File

@@ -31,12 +31,14 @@
"round": "[%key:component::integration::config::step::user::data::round%]",
"source": "[%key:component::integration::config::step::user::data::source%]",
"unit_prefix": "[%key:component::integration::config::step::user::data::unit_prefix%]",
"unit_time": "[%key:component::integration::config::step::user::data::unit_time%]"
"unit_time": "[%key:component::integration::config::step::user::data::unit_time%]",
"max_sub_interval": "[%key:component::integration::config::step::user::data::max_sub_interval%]"
},
"data_description": {
"round": "[%key:component::integration::config::step::user::data_description::round%]",
"unit_prefix": "[%key:component::integration::config::step::user::data_description::unit_prefix%]",
"unit_time": "[%key:component::integration::config::step::user::data_description::unit_time%]"
"unit_time": "[%key:component::integration::config::step::user::data_description::unit_time%]",
"max_sub_interval": "[%key:component::integration::config::step::user::data_description::max_sub_interval%]"
}
}
}

View File

@@ -45,7 +45,7 @@
"jakim": "Jabatan Kemajuan Islam Malaysia (JAKIM)",
"tunisia": "Tunisia",
"algeria": "Algeria",
"kemenag": "ementerian Agama Republik Indonesia",
"kemenag": "Kementerian Agama Republik Indonesia",
"morocco": "Morocco",
"portugal": "Comunidade Islamica de Lisboa",
"jordan": "Ministry of Awqaf, Islamic Affairs and Holy Places, Jordan",

View File

@@ -441,6 +441,9 @@ class ZoneDevice(ClimateEntity):
_attr_name = None
_attr_temperature_unit = UnitOfTemperature.CELSIUS
_attr_target_temperature_step = 0.5
_attr_supported_features = (
ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON
)
def __init__(self, controller: ControllerDevice, zone: Zone) -> None:
"""Initialise ZoneDevice."""

View File

@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["jvcprojector"],
"requirements": ["pyjvcprojector==1.0.11"]
"requirements": ["pyjvcprojector==1.0.12"]
}

View File

@@ -147,18 +147,10 @@ CONFIG_SCHEMA = vol.Schema(
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Start the KNX integration."""
hass.data[DATA_HASS_CONFIG] = config
conf: ConfigType | None = config.get(DOMAIN)
if conf is None:
# If we have a config entry, setup is done by that config entry.
# If there is no config entry, this should fail.
return bool(hass.config_entries.async_entries(DOMAIN))
conf = dict(conf)
hass.data[DATA_KNX_CONFIG] = conf
if (conf := config.get(DOMAIN)) is not None:
hass.data[DATA_KNX_CONFIG] = dict(conf)
register_knx_services(hass)
return True

View File

@@ -5,7 +5,11 @@ from __future__ import annotations
from typing import Any
from xknx import XKNX
from xknx.devices import Climate as XknxClimate, ClimateMode as XknxClimateMode
from xknx.devices import (
Climate as XknxClimate,
ClimateMode as XknxClimateMode,
Device as XknxDevice,
)
from xknx.dpt.dpt_20 import HVACControllerMode
from homeassistant import config_entries
@@ -241,12 +245,9 @@ class KNXClimate(KnxYamlEntity, ClimateEntity):
if self._device.supports_on_off and not self._device.is_on:
return HVACMode.OFF
if self._device.mode is not None and self._device.mode.supports_controller_mode:
hvac_mode = CONTROLLER_MODES.get(
return CONTROLLER_MODES.get(
self._device.mode.controller_mode, self.default_hvac_mode
)
if hvac_mode is not HVACMode.OFF:
self._last_hvac_mode = hvac_mode
return hvac_mode
return self.default_hvac_mode
@property
@@ -261,11 +262,15 @@ class KNXClimate(KnxYamlEntity, ClimateEntity):
if self._device.supports_on_off:
if not ha_controller_modes:
ha_controller_modes.append(self.default_hvac_mode)
ha_controller_modes.append(self._last_hvac_mode)
ha_controller_modes.append(HVACMode.OFF)
hvac_modes = list(set(filter(None, ha_controller_modes)))
return hvac_modes if hvac_modes else [self.default_hvac_mode]
return (
hvac_modes
if hvac_modes
else [self.hvac_mode] # mode read-only -> fall back to only current mode
)
@property
def hvac_action(self) -> HVACAction | None:
@@ -354,3 +359,13 @@ class KNXClimate(KnxYamlEntity, ClimateEntity):
self._device.mode.unregister_device_updated_cb(self.after_update_callback)
self._device.mode.xknx.devices.async_remove(self._device.mode)
await super().async_will_remove_from_hass()
def after_update_callback(self, _device: XknxDevice) -> None:
"""Call after device was updated."""
if self._device.mode is not None and self._device.mode.supports_controller_mode:
hvac_mode = CONTROLLER_MODES.get(
self._device.mode.controller_mode, self.default_hvac_mode
)
if hvac_mode is not HVACMode.OFF:
self._last_hvac_mode = hvac_mode
super().after_update_callback(_device)

View File

@@ -226,7 +226,7 @@ def _create_ui_light(xknx: XKNX, knx_config: ConfigType, name: str) -> XknxLight
group_address_color_temp_state = None
color_temperature_type = ColorTemperatureType.UINT_2_BYTE
if ga_color_temp := knx_config.get(CONF_GA_COLOR_TEMP):
if ga_color_temp[CONF_DPT] == ColorTempModes.RELATIVE:
if ga_color_temp[CONF_DPT] == ColorTempModes.RELATIVE.value:
group_address_tunable_white = ga_color_temp[CONF_GA_WRITE]
group_address_tunable_white_state = [
ga_color_temp[CONF_GA_STATE],
@@ -239,7 +239,7 @@ def _create_ui_light(xknx: XKNX, knx_config: ConfigType, name: str) -> XknxLight
ga_color_temp[CONF_GA_STATE],
*ga_color_temp[CONF_GA_PASSIVE],
]
if ga_color_temp[CONF_DPT] == ColorTempModes.ABSOLUTE_FLOAT:
if ga_color_temp[CONF_DPT] == ColorTempModes.ABSOLUTE_FLOAT.value:
color_temperature_type = ColorTemperatureType.FLOAT_2_BYTE
_color_dpt = get_dpt(CONF_GA_COLOR)

View File

@@ -11,9 +11,9 @@
"loggers": ["xknx", "xknxproject"],
"quality_scale": "platinum",
"requirements": [
"xknx==3.0.0",
"xknx==3.1.1",
"xknxproject==3.7.1",
"knx-frontend==2024.7.25.204106"
"knx-frontend==2024.8.9.225351"
],
"single_config_entry": true
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/lacrosse_view",
"iot_class": "cloud_polling",
"loggers": ["lacrosse_view"],
"requirements": ["lacrosse-view==1.0.1"]
"requirements": ["lacrosse-view==1.0.2"]
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/lcn",
"iot_class": "local_push",
"loggers": ["pypck"],
"requirements": ["pypck==0.7.17"]
"requirements": ["pypck==0.7.20"]
}

View File

@@ -9,7 +9,7 @@
},
"iot_class": "local_push",
"loggers": ["pylutron_caseta"],
"requirements": ["pylutron-caseta==0.20.0"],
"requirements": ["pylutron-caseta==0.21.1"],
"zeroconf": [
{
"type": "_lutron._tcp.local.",

View File

@@ -277,4 +277,15 @@ class MadvrSensor(MadVREntity, SensorEntity):
@property
def native_value(self) -> float | str | None:
"""Return the state of the sensor."""
return self.entity_description.value_fn(self.coordinator)
val = self.entity_description.value_fn(self.coordinator)
# check if sensor is enum
if self.entity_description.device_class == SensorDeviceClass.ENUM:
if (
self.entity_description.options
and val in self.entity_description.options
):
return val
# return None for values that are not in the options
return None
return val

View File

@@ -60,6 +60,8 @@ TRANSITION_BLOCKLIST = (
(4456, 1011, "1.0.0", "2.00.00"),
(4488, 260, "1.0", "1.0.0"),
(4488, 514, "1.0", "1.0.0"),
(4921, 42, "1.0", "1.01.060"),
(4921, 43, "1.0", "1.01.060"),
(4999, 24875, "1.0", "27.0"),
(4999, 25057, "1.0", "27.0"),
(5009, 514, "1.0", "1.0.0"),

View File

@@ -27,7 +27,6 @@ type SelectCluster = (
| clusters.RvcRunMode
| clusters.RvcCleanMode
| clusters.DishwasherMode
| clusters.MicrowaveOvenMode
| clusters.EnergyEvseMode
| clusters.DeviceEnergyManagementMode
)
@@ -199,18 +198,6 @@ DISCOVERY_SCHEMAS = [
clusters.DishwasherMode.Attributes.SupportedModes,
),
),
MatterDiscoverySchema(
platform=Platform.SELECT,
entity_description=MatterSelectEntityDescription(
key="MatterMicrowaveOvenMode",
translation_key="mode",
),
entity_class=MatterModeSelectEntity,
required_attributes=(
clusters.MicrowaveOvenMode.Attributes.CurrentMode,
clusters.MicrowaveOvenMode.Attributes.SupportedModes,
),
),
MatterDiscoverySchema(
platform=Platform.SELECT,
entity_description=MatterSelectEntityDescription(
@@ -242,12 +229,12 @@ DISCOVERY_SCHEMAS = [
entity_category=EntityCategory.CONFIG,
translation_key="startup_on_off",
options=["On", "Off", "Toggle", "Previous"],
measurement_to_ha=lambda x: {
measurement_to_ha=lambda x: { # pylint: disable=unnecessary-lambda
0: "Off",
1: "On",
2: "Toggle",
None: "Previous",
}[x],
}.get(x),
ha_to_native_value=lambda x: {
"Off": 0,
"On": 1,

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/mealie",
"integration_type": "service",
"iot_class": "local_polling",
"requirements": ["aiomealie==0.8.0"]
"requirements": ["aiomealie==0.8.1"]
}

View File

@@ -8,6 +8,6 @@
"iot_class": "calculated",
"loggers": ["yt_dlp"],
"quality_scale": "internal",
"requirements": ["yt-dlp==2024.07.16"],
"requirements": ["yt-dlp==2024.08.06"],
"single_config_entry": true
}

View File

@@ -5,5 +5,5 @@
"documentation": "https://www.home-assistant.io/integrations/mfi",
"iot_class": "local_polling",
"loggers": ["mficlient"],
"requirements": ["mficlient==0.3.0"]
"requirements": ["mficlient==0.5.0"]
}

View File

@@ -124,12 +124,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
):
await async_create_cloud_hook(hass, webhook_id, entry)
if (
CONF_CLOUDHOOK_URL not in entry.data
and cloud.async_active_subscription(hass)
and cloud.async_is_connected(hass)
):
await async_create_cloud_hook(hass, webhook_id, entry)
if cloud.async_is_logged_in(hass):
if (
CONF_CLOUDHOOK_URL not in entry.data
and cloud.async_active_subscription(hass)
and cloud.async_is_connected(hass)
):
await async_create_cloud_hook(hass, webhook_id, entry)
elif CONF_CLOUDHOOK_URL in entry.data:
# If we have a cloudhook but no longer logged in to the cloud, remove it from the entry
data = dict(entry.data)
data.pop(CONF_CLOUDHOOK_URL)
hass.config_entries.async_update_entry(entry, data=data)
entry.async_on_unload(cloud.async_listen_connection_change(hass, manage_cloudhook))

View File

@@ -6,5 +6,5 @@
"dependencies": ["application_credentials"],
"documentation": "https://www.home-assistant.io/integrations/monzo",
"iot_class": "cloud_polling",
"requirements": ["monzopy==1.3.0"]
"requirements": ["monzopy==1.3.2"]
}

View File

@@ -20,5 +20,5 @@
"iot_class": "cloud_push",
"loggers": ["google_nest_sdm"],
"quality_scale": "platinum",
"requirements": ["google-nest-sdm==4.0.5"]
"requirements": ["google-nest-sdm==4.0.7"]
}

View File

@@ -50,13 +50,15 @@ class NextBusDataUpdateCoordinator(DataUpdateCoordinator):
async def _async_update_data(self) -> dict[str, Any]:
"""Fetch data from NextBus."""
self.logger.debug("Updating data from API. Routes: %s", str(self._route_stops))
_route_stops = set(self._route_stops)
self.logger.debug("Updating data from API. Routes: %s", str(_route_stops))
def _update_data() -> dict:
"""Fetch data from NextBus."""
self.logger.debug("Updating data from API (executor)")
predictions: dict[RouteStop, dict[str, Any]] = {}
for route_stop in self._route_stops:
for route_stop in _route_stops:
prediction_results: list[dict[str, Any]] = []
try:
prediction_results = self.client.predictions_for_stop(

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/nextbus",
"iot_class": "cloud_polling",
"loggers": ["py_nextbus"],
"requirements": ["py-nextbusnext==2.0.3"]
"requirements": ["py-nextbusnext==2.0.4"]
}

View File

@@ -346,9 +346,5 @@ class OllamaConversationEntity(
self, hass: HomeAssistant, entry: ConfigEntry
) -> None:
"""Handle options update."""
if entry.options.get(CONF_LLM_HASS_API):
self._attr_supported_features = (
conversation.ConversationEntityFeature.CONTROL
)
else:
self._attr_supported_features = conversation.ConversationEntityFeature(0)
# Reload as we update device info + entity name + supported features
await hass.config_entries.async_reload(entry.entry_id)

View File

@@ -328,9 +328,5 @@ class OpenAIConversationEntity(
self, hass: HomeAssistant, entry: ConfigEntry
) -> None:
"""Handle options update."""
if entry.options.get(CONF_LLM_HASS_API):
self._attr_supported_features = (
conversation.ConversationEntityFeature.CONTROL
)
else:
self._attr_supported_features = conversation.ConversationEntityFeature(0)
# Reload as we update device info + entity name + supported features
await hass.config_entries.async_reload(entry.entry_id)

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
from dataclasses import dataclass
import logging
from pyopenweathermap import OWMClient
from pyopenweathermap import create_owm_client
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
@@ -33,6 +33,7 @@ class OpenweathermapData:
"""Runtime data definition."""
name: str
mode: str
coordinator: WeatherUpdateCoordinator
@@ -52,7 +53,7 @@ async def async_setup_entry(
else:
async_delete_issue(hass, entry.entry_id)
owm_client = OWMClient(api_key, mode, lang=language)
owm_client = create_owm_client(api_key, mode, lang=language)
weather_coordinator = WeatherUpdateCoordinator(
owm_client, latitude, longitude, hass
)
@@ -61,7 +62,7 @@ async def async_setup_entry(
entry.async_on_unload(entry.add_update_listener(async_update_options))
entry.runtime_data = OpenweathermapData(name, weather_coordinator)
entry.runtime_data = OpenweathermapData(name, mode, weather_coordinator)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)

View File

@@ -58,10 +58,17 @@ FORECAST_MODE_DAILY = "daily"
FORECAST_MODE_FREE_DAILY = "freedaily"
FORECAST_MODE_ONECALL_HOURLY = "onecall_hourly"
FORECAST_MODE_ONECALL_DAILY = "onecall_daily"
OWM_MODE_V25 = "v2.5"
OWM_MODE_FREE_CURRENT = "current"
OWM_MODE_FREE_FORECAST = "forecast"
OWM_MODE_V30 = "v3.0"
OWM_MODES = [OWM_MODE_V30, OWM_MODE_V25]
DEFAULT_OWM_MODE = OWM_MODE_V30
OWM_MODE_V25 = "v2.5"
OWM_MODES = [
OWM_MODE_FREE_CURRENT,
OWM_MODE_FREE_FORECAST,
OWM_MODE_V30,
OWM_MODE_V25,
]
DEFAULT_OWM_MODE = OWM_MODE_FREE_CURRENT
LANGUAGES = [
"af",

View File

@@ -86,8 +86,14 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
"""Format the weather response correctly."""
_LOGGER.debug("OWM weather response: %s", weather_report)
current_weather = (
self._get_current_weather_data(weather_report.current)
if weather_report.current is not None
else {}
)
return {
ATTR_API_CURRENT: self._get_current_weather_data(weather_report.current),
ATTR_API_CURRENT: current_weather,
ATTR_API_HOURLY_FORECAST: [
self._get_hourly_forecast_weather_data(item)
for item in weather_report.hourly_forecast
@@ -122,6 +128,8 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
}
def _get_hourly_forecast_weather_data(self, forecast: HourlyWeatherForecast):
uv_index = float(forecast.uv_index) if forecast.uv_index is not None else None
return Forecast(
datetime=forecast.date_time.isoformat(),
condition=self._get_condition(forecast.condition.id),
@@ -134,12 +142,14 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
wind_speed=forecast.wind_speed,
native_wind_gust_speed=forecast.wind_gust,
wind_bearing=forecast.wind_bearing,
uv_index=float(forecast.uv_index),
uv_index=uv_index,
precipitation_probability=round(forecast.precipitation_probability * 100),
precipitation=self._calc_precipitation(forecast.rain, forecast.snow),
)
def _get_daily_forecast_weather_data(self, forecast: DailyWeatherForecast):
uv_index = float(forecast.uv_index) if forecast.uv_index is not None else None
return Forecast(
datetime=forecast.date_time.isoformat(),
condition=self._get_condition(forecast.condition.id),
@@ -153,7 +163,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
wind_speed=forecast.wind_speed,
native_wind_gust_speed=forecast.wind_gust,
wind_bearing=forecast.wind_bearing,
uv_index=float(forecast.uv_index),
uv_index=uv_index,
precipitation_probability=round(forecast.precipitation_probability * 100),
precipitation=round(forecast.rain + forecast.snow, 2),
)

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/openweathermap",
"iot_class": "cloud_polling",
"loggers": ["pyopenweathermap"],
"requirements": ["pyopenweathermap==0.0.9"]
"requirements": ["pyopenweathermap==0.1.1"]
}

View File

@@ -19,6 +19,7 @@ from homeassistant.const import (
UnitOfVolumetricFlux,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import StateType
@@ -47,6 +48,7 @@ from .const import (
DEFAULT_NAME,
DOMAIN,
MANUFACTURER,
OWM_MODE_FREE_FORECAST,
)
from .coordinator import WeatherUpdateCoordinator
@@ -161,16 +163,23 @@ async def async_setup_entry(
name = domain_data.name
weather_coordinator = domain_data.coordinator
entities: list[AbstractOpenWeatherMapSensor] = [
OpenWeatherMapSensor(
name,
f"{config_entry.unique_id}-{description.key}",
description,
weather_coordinator,
if domain_data.mode == OWM_MODE_FREE_FORECAST:
entity_registry = er.async_get(hass)
entries = er.async_entries_for_config_entry(
entity_registry, config_entry.entry_id
)
for entry in entries:
entity_registry.async_remove(entry.entity_id)
else:
async_add_entities(
OpenWeatherMapSensor(
name,
f"{config_entry.unique_id}-{description.key}",
description,
weather_coordinator,
)
for description in WEATHER_SENSOR_TYPES
)
for description in WEATHER_SENSOR_TYPES
]
async_add_entities(entities)
class AbstractOpenWeatherMapSensor(SensorEntity):

View File

@@ -2,7 +2,7 @@
from typing import Any
from pyopenweathermap import OWMClient, RequestError
from pyopenweathermap import RequestError, create_owm_client
from homeassistant.const import CONF_LANGUAGE, CONF_MODE
@@ -16,7 +16,7 @@ async def validate_api_key(api_key, mode):
api_key_valid = None
errors, description_placeholders = {}, {}
try:
owm_client = OWMClient(api_key, mode)
owm_client = create_owm_client(api_key, mode)
api_key_valid = await owm_client.validate_key()
except RequestError as error:
errors["base"] = "cannot_connect"

View File

@@ -8,6 +8,7 @@ from homeassistant.components.weather import (
WeatherEntityFeature,
)
from homeassistant.const import (
UnitOfLength,
UnitOfPrecipitationDepth,
UnitOfPressure,
UnitOfSpeed,
@@ -29,6 +30,7 @@ from .const import (
ATTR_API_HUMIDITY,
ATTR_API_PRESSURE,
ATTR_API_TEMPERATURE,
ATTR_API_VISIBILITY_DISTANCE,
ATTR_API_WIND_BEARING,
ATTR_API_WIND_GUST,
ATTR_API_WIND_SPEED,
@@ -36,6 +38,9 @@ from .const import (
DEFAULT_NAME,
DOMAIN,
MANUFACTURER,
OWM_MODE_FREE_FORECAST,
OWM_MODE_V25,
OWM_MODE_V30,
)
from .coordinator import WeatherUpdateCoordinator
@@ -48,10 +53,11 @@ async def async_setup_entry(
"""Set up OpenWeatherMap weather entity based on a config entry."""
domain_data = config_entry.runtime_data
name = domain_data.name
mode = domain_data.mode
weather_coordinator = domain_data.coordinator
unique_id = f"{config_entry.unique_id}"
owm_weather = OpenWeatherMapWeather(name, unique_id, weather_coordinator)
owm_weather = OpenWeatherMapWeather(name, unique_id, mode, weather_coordinator)
async_add_entities([owm_weather], False)
@@ -66,11 +72,13 @@ class OpenWeatherMapWeather(SingleCoordinatorWeatherEntity[WeatherUpdateCoordina
_attr_native_pressure_unit = UnitOfPressure.HPA
_attr_native_temperature_unit = UnitOfTemperature.CELSIUS
_attr_native_wind_speed_unit = UnitOfSpeed.METERS_PER_SECOND
_attr_native_visibility_unit = UnitOfLength.METERS
def __init__(
self,
name: str,
unique_id: str,
mode: str,
weather_coordinator: WeatherUpdateCoordinator,
) -> None:
"""Initialize the sensor."""
@@ -83,59 +91,71 @@ class OpenWeatherMapWeather(SingleCoordinatorWeatherEntity[WeatherUpdateCoordina
manufacturer=MANUFACTURER,
name=DEFAULT_NAME,
)
self._attr_supported_features = (
WeatherEntityFeature.FORECAST_DAILY | WeatherEntityFeature.FORECAST_HOURLY
)
if mode in (OWM_MODE_V30, OWM_MODE_V25):
self._attr_supported_features = (
WeatherEntityFeature.FORECAST_DAILY
| WeatherEntityFeature.FORECAST_HOURLY
)
elif mode == OWM_MODE_FREE_FORECAST:
self._attr_supported_features = WeatherEntityFeature.FORECAST_HOURLY
@property
def condition(self) -> str | None:
"""Return the current condition."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_CONDITION]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_CONDITION)
@property
def cloud_coverage(self) -> float | None:
"""Return the Cloud coverage in %."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_CLOUDS]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_CLOUDS)
@property
def native_apparent_temperature(self) -> float | None:
"""Return the apparent temperature."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_FEELS_LIKE_TEMPERATURE]
return self.coordinator.data[ATTR_API_CURRENT].get(
ATTR_API_FEELS_LIKE_TEMPERATURE
)
@property
def native_temperature(self) -> float | None:
"""Return the temperature."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_TEMPERATURE]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_TEMPERATURE)
@property
def native_pressure(self) -> float | None:
"""Return the pressure."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_PRESSURE]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_PRESSURE)
@property
def humidity(self) -> float | None:
"""Return the humidity."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_HUMIDITY]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_HUMIDITY)
@property
def native_dew_point(self) -> float | None:
"""Return the dew point."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_DEW_POINT]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_DEW_POINT)
@property
def native_wind_gust_speed(self) -> float | None:
"""Return the wind gust speed."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_WIND_GUST]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_WIND_GUST)
@property
def native_wind_speed(self) -> float | None:
"""Return the wind speed."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_WIND_SPEED]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_WIND_SPEED)
@property
def wind_bearing(self) -> float | str | None:
"""Return the wind bearing."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_WIND_BEARING]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_WIND_BEARING)
@property
def visibility(self) -> float | str | None:
"""Return visibility."""
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_VISIBILITY_DISTANCE)
@callback
def _async_forecast_daily(self) -> list[Forecast] | None:

View File

@@ -22,6 +22,7 @@ class PiHoleUpdateEntityDescription(UpdateEntityDescription):
installed_version: Callable[[dict], str | None] = lambda api: None
latest_version: Callable[[dict], str | None] = lambda api: None
has_update: Callable[[dict], bool | None] = lambda api: None
release_base_url: str | None = None
title: str | None = None
@@ -34,6 +35,7 @@ UPDATE_ENTITY_TYPES: tuple[PiHoleUpdateEntityDescription, ...] = (
entity_category=EntityCategory.DIAGNOSTIC,
installed_version=lambda versions: versions.get("core_current"),
latest_version=lambda versions: versions.get("core_latest"),
has_update=lambda versions: versions.get("core_update"),
release_base_url="https://github.com/pi-hole/pi-hole/releases/tag",
),
PiHoleUpdateEntityDescription(
@@ -43,6 +45,7 @@ UPDATE_ENTITY_TYPES: tuple[PiHoleUpdateEntityDescription, ...] = (
entity_category=EntityCategory.DIAGNOSTIC,
installed_version=lambda versions: versions.get("web_current"),
latest_version=lambda versions: versions.get("web_latest"),
has_update=lambda versions: versions.get("web_update"),
release_base_url="https://github.com/pi-hole/AdminLTE/releases/tag",
),
PiHoleUpdateEntityDescription(
@@ -52,6 +55,7 @@ UPDATE_ENTITY_TYPES: tuple[PiHoleUpdateEntityDescription, ...] = (
entity_category=EntityCategory.DIAGNOSTIC,
installed_version=lambda versions: versions.get("FTL_current"),
latest_version=lambda versions: versions.get("FTL_latest"),
has_update=lambda versions: versions.get("FTL_update"),
release_base_url="https://github.com/pi-hole/FTL/releases/tag",
),
)
@@ -110,7 +114,9 @@ class PiHoleUpdateEntity(PiHoleEntity, UpdateEntity):
def latest_version(self) -> str | None:
"""Latest version available for install."""
if isinstance(self.api.versions, dict):
return self.entity_description.latest_version(self.api.versions)
if self.entity_description.has_update(self.api.versions):
return self.entity_description.latest_version(self.api.versions)
return self.installed_version
return None
@property

View File

@@ -11,5 +11,5 @@
"documentation": "https://www.home-assistant.io/integrations/qnap_qsw",
"iot_class": "local_polling",
"loggers": ["aioqsw"],
"requirements": ["aioqsw==0.4.0"]
"requirements": ["aioqsw==0.4.1"]
}

View File

@@ -77,7 +77,7 @@ class LegacyBase(DeclarativeBase):
"""Base class for tables, used for schema migration."""
SCHEMA_VERSION = 44
SCHEMA_VERSION = 45
_LOGGER = logging.getLogger(__name__)

View File

@@ -632,7 +632,7 @@ def _update_states_table_with_foreign_key_options(
def _drop_foreign_key_constraints(
session_maker: Callable[[], Session], engine: Engine, table: str, column: str
) -> list[tuple[str, str, ReflectedForeignKeyConstraint]]:
) -> tuple[bool, list[tuple[str, str, ReflectedForeignKeyConstraint]]]:
"""Drop foreign key constraints for a table on specific columns."""
inspector = sqlalchemy.inspect(engine)
dropped_constraints = [
@@ -649,6 +649,7 @@ def _drop_foreign_key_constraints(
if foreign_key["name"] and foreign_key["constrained_columns"] == [column]
]
fk_remove_ok = True
for drop in drops:
with session_scope(session=session_maker()) as session:
try:
@@ -660,40 +661,185 @@ def _drop_foreign_key_constraints(
TABLE_STATES,
column,
)
fk_remove_ok = False
return dropped_constraints
return fk_remove_ok, dropped_constraints
def _restore_foreign_key_constraints(
session_maker: Callable[[], Session],
engine: Engine,
dropped_constraints: list[tuple[str, str, ReflectedForeignKeyConstraint]],
foreign_columns: list[tuple[str, str, str | None, str | None]],
) -> None:
"""Restore foreign key constraints."""
for table, column, dropped_constraint in dropped_constraints:
for table, column, foreign_table, foreign_column in foreign_columns:
constraints = Base.metadata.tables[table].foreign_key_constraints
for constraint in constraints:
if constraint.column_keys == [column]:
break
else:
_LOGGER.info(
"Did not find a matching constraint for %s", dropped_constraint
)
_LOGGER.info("Did not find a matching constraint for %s.%s", table, column)
continue
if TYPE_CHECKING:
assert foreign_table is not None
assert foreign_column is not None
# AddConstraint mutates the constraint passed to it, we need to
# undo that to avoid changing the behavior of the table schema.
# https://github.com/sqlalchemy/sqlalchemy/blob/96f1172812f858fead45cdc7874abac76f45b339/lib/sqlalchemy/sql/ddl.py#L746-L748
create_rule = constraint._create_rule # noqa: SLF001
add_constraint = AddConstraint(constraint) # type: ignore[no-untyped-call]
constraint._create_rule = create_rule # noqa: SLF001
try:
_add_constraint(session_maker, add_constraint, table, column)
except IntegrityError:
_LOGGER.exception(
(
"Could not update foreign options in %s table, will delete "
"violations and try again"
),
table,
)
_delete_foreign_key_violations(
session_maker, engine, table, column, foreign_table, foreign_column
)
_add_constraint(session_maker, add_constraint, table, column)
with session_scope(session=session_maker()) as session:
try:
connection = session.connection()
connection.execute(add_constraint)
except (InternalError, OperationalError):
_LOGGER.exception("Could not update foreign options in %s table", table)
def _add_constraint(
session_maker: Callable[[], Session],
add_constraint: AddConstraint,
table: str,
column: str,
) -> None:
"""Add a foreign key constraint."""
_LOGGER.warning(
"Adding foreign key constraint to %s.%s. "
"Note: this can take several minutes on large databases and slow "
"machines. Please be patient!",
table,
column,
)
with session_scope(session=session_maker()) as session:
try:
connection = session.connection()
connection.execute(add_constraint)
except (InternalError, OperationalError):
_LOGGER.exception("Could not update foreign options in %s table", table)
def _delete_foreign_key_violations(
session_maker: Callable[[], Session],
engine: Engine,
table: str,
column: str,
foreign_table: str,
foreign_column: str,
) -> None:
"""Remove rows which violate the constraints."""
if engine.dialect.name not in (SupportedDialect.MYSQL, SupportedDialect.POSTGRESQL):
raise RuntimeError(
f"_delete_foreign_key_violations not supported for {engine.dialect.name}"
)
_LOGGER.warning(
"Rows in table %s where %s references non existing %s.%s will be %s. "
"Note: this can take several minutes on large databases and slow "
"machines. Please be patient!",
table,
column,
foreign_table,
foreign_column,
"set to NULL" if table == foreign_table else "deleted",
)
result: CursorResult | None = None
if table == foreign_table:
# In case of a foreign reference to the same table, we set invalid
# references to NULL instead of deleting as deleting rows may
# cause additional invalid references to be created. This is to handle
# old_state_id referencing a missing state.
if engine.dialect.name == SupportedDialect.MYSQL:
while result is None or result.rowcount > 0:
with session_scope(session=session_maker()) as session:
# The subquery (SELECT {foreign_column} from {foreign_table}) is
# to be compatible with old MySQL versions which do not allow
# referencing the table being updated in the WHERE clause.
result = session.connection().execute(
text(
f"UPDATE {table} as t1 " # noqa: S608
f"SET {column} = NULL "
"WHERE ("
f"t1.{column} IS NOT NULL AND "
"NOT EXISTS "
"(SELECT 1 "
f"FROM (SELECT {foreign_column} from {foreign_table}) AS t2 "
f"WHERE t2.{foreign_column} = t1.{column})) "
"LIMIT 100000;"
)
)
elif engine.dialect.name == SupportedDialect.POSTGRESQL:
while result is None or result.rowcount > 0:
with session_scope(session=session_maker()) as session:
# PostgreSQL does not support LIMIT in UPDATE clauses, so we
# update matches from a limited subquery instead.
result = session.connection().execute(
text(
f"UPDATE {table} " # noqa: S608
f"SET {column} = NULL "
f"WHERE {column} in "
f"(SELECT {column} from {table} as t1 "
"WHERE ("
f"t1.{column} IS NOT NULL AND "
"NOT EXISTS "
"(SELECT 1 "
f"FROM {foreign_table} AS t2 "
f"WHERE t2.{foreign_column} = t1.{column})) "
"LIMIT 100000);"
)
)
return
if engine.dialect.name == SupportedDialect.MYSQL:
while result is None or result.rowcount > 0:
with session_scope(session=session_maker()) as session:
result = session.connection().execute(
# We don't use an alias for the table we're deleting from,
# support of the form `DELETE FROM table AS t1` was added in
# MariaDB 11.6 and is not supported by MySQL. Those engines
# instead support the from `DELETE t1 from table AS t1` which
# is not supported by PostgreSQL and undocumented for MariaDB.
text(
f"DELETE FROM {table} " # noqa: S608
"WHERE ("
f"{table}.{column} IS NOT NULL AND "
"NOT EXISTS "
"(SELECT 1 "
f"FROM {foreign_table} AS t2 "
f"WHERE t2.{foreign_column} = {table}.{column})) "
"LIMIT 100000;"
)
)
elif engine.dialect.name == SupportedDialect.POSTGRESQL:
while result is None or result.rowcount > 0:
with session_scope(session=session_maker()) as session:
# PostgreSQL does not support LIMIT in DELETE clauses, so we
# delete matches from a limited subquery instead.
result = session.connection().execute(
text(
f"DELETE FROM {table} " # noqa: S608
f"WHERE {column} in "
f"(SELECT {column} from {table} as t1 "
"WHERE ("
f"t1.{column} IS NOT NULL AND "
"NOT EXISTS "
"(SELECT 1 "
f"FROM {foreign_table} AS t2 "
f"WHERE t2.{foreign_column} = t1.{column})) "
"LIMIT 100000);"
)
)
@database_job_retry_wrapper("Apply migration update", 10)
@@ -1457,6 +1603,38 @@ class _SchemaVersion43Migrator(_SchemaVersionMigrator, target_version=43):
)
FOREIGN_COLUMNS = (
(
"events",
("data_id", "event_type_id"),
(
("data_id", "event_data", "data_id"),
("event_type_id", "event_types", "event_type_id"),
),
),
(
"states",
("event_id", "old_state_id", "attributes_id", "metadata_id"),
(
("event_id", None, None),
("old_state_id", "states", "state_id"),
("attributes_id", "state_attributes", "attributes_id"),
("metadata_id", "states_meta", "metadata_id"),
),
),
(
"statistics",
("metadata_id",),
(("metadata_id", "statistics_meta", "id"),),
),
(
"statistics_short_term",
("metadata_id",),
(("metadata_id", "statistics_meta", "id"),),
),
)
class _SchemaVersion44Migrator(_SchemaVersionMigrator, target_version=44):
def _apply_update(self) -> None:
"""Version specific update method."""
@@ -1469,24 +1647,14 @@ class _SchemaVersion44Migrator(_SchemaVersionMigrator, target_version=44):
else ""
)
# First drop foreign key constraints
foreign_columns = (
("events", ("data_id", "event_type_id")),
("states", ("event_id", "old_state_id", "attributes_id", "metadata_id")),
("statistics", ("metadata_id",)),
("statistics_short_term", ("metadata_id",)),
)
dropped_constraints = [
dropped_constraint
for table, columns in foreign_columns
for column in columns
for dropped_constraint in _drop_foreign_key_constraints(
self.session_maker, self.engine, table, column
)
]
_LOGGER.debug("Dropped foreign key constraints: %s", dropped_constraints)
for table, columns, _ in FOREIGN_COLUMNS:
for column in columns:
_drop_foreign_key_constraints(
self.session_maker, self.engine, table, column
)
# Then modify the constrained columns
for table, columns in foreign_columns:
for table, columns, _ in FOREIGN_COLUMNS:
_modify_columns(
self.session_maker,
self.engine,
@@ -1516,9 +1684,24 @@ class _SchemaVersion44Migrator(_SchemaVersionMigrator, target_version=44):
table,
[f"{column} {BIG_INTEGER_SQL} {identity_sql}"],
)
# Finally restore dropped constraints
class _SchemaVersion45Migrator(_SchemaVersionMigrator, target_version=45):
def _apply_update(self) -> None:
"""Version specific update method."""
# We skip this step for SQLITE, it doesn't have differently sized integers
if self.engine.dialect.name == SupportedDialect.SQLITE:
return
# Restore constraints dropped in migration to schema version 44
_restore_foreign_key_constraints(
self.session_maker, self.engine, dropped_constraints
self.session_maker,
self.engine,
[
(table, column, foreign_table, foreign_column)
for table, _, foreign_mappings in FOREIGN_COLUMNS
for column, foreign_table, foreign_column in foreign_mappings
],
)
@@ -1956,14 +2139,15 @@ def cleanup_legacy_states_event_ids(instance: Recorder) -> bool:
if instance.dialect_name == SupportedDialect.SQLITE:
# SQLite does not support dropping foreign key constraints
# so we have to rebuild the table
rebuild_sqlite_table(session_maker, instance.engine, States)
fk_remove_ok = rebuild_sqlite_table(session_maker, instance.engine, States)
else:
_drop_foreign_key_constraints(
fk_remove_ok, _ = _drop_foreign_key_constraints(
session_maker, instance.engine, TABLE_STATES, "event_id"
)
_drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX)
instance.use_legacy_events_index = False
_mark_migration_done(session, EventIDPostMigration)
if fk_remove_ok:
_drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX)
instance.use_legacy_events_index = False
_mark_migration_done(session, EventIDPostMigration)
return True
@@ -2419,6 +2603,7 @@ class EventIDPostMigration(BaseRunTimeMigration):
migration_id = "event_id_post_migration"
task = MigrationTask
migration_version = 2
@staticmethod
def migrate_data(instance: Recorder) -> bool:
@@ -2469,7 +2654,7 @@ def _mark_migration_done(
def rebuild_sqlite_table(
session_maker: Callable[[], Session], engine: Engine, table: type[Base]
) -> None:
) -> bool:
"""Rebuild an SQLite table.
This must only be called after all migrations are complete
@@ -2524,8 +2709,10 @@ def rebuild_sqlite_table(
# Swallow the exception since we do not want to ever raise
# an integrity error as it would cause the database
# to be discarded and recreated from scratch
return False
else:
_LOGGER.warning("Rebuilding SQLite table %s finished", orig_name)
return True
finally:
with session_scope(session=session_maker()) as session:
# Step 12 - Re-enable foreign keys

View File

@@ -18,5 +18,5 @@
"documentation": "https://www.home-assistant.io/integrations/reolink",
"iot_class": "local_push",
"loggers": ["reolink_aio"],
"requirements": ["reolink-aio==0.9.6"]
"requirements": ["reolink-aio==0.9.7"]
}

View File

@@ -7,7 +7,7 @@
"iot_class": "local_polling",
"loggers": ["roborock"],
"requirements": [
"python-roborock==2.5.0",
"python-roborock==2.6.0",
"vacuum-map-parser-roborock==0.1.2"
]
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/russound_rio",
"iot_class": "local_push",
"loggers": ["aiorussound"],
"requirements": ["aiorussound==2.2.0"]
"requirements": ["aiorussound==2.2.3"]
}

View File

@@ -128,11 +128,18 @@ class RussoundZoneDevice(MediaPlayerEntity):
self._zone = zone
self._sources = sources
self._attr_name = zone.name
self._attr_unique_id = f"{self._controller.mac_address}-{zone.device_str()}"
primary_mac_address = (
self._controller.mac_address
or self._controller.parent_controller.mac_address
)
self._attr_unique_id = f"{primary_mac_address}-{zone.device_str()}"
device_identifier = (
self._controller.mac_address
or f"{primary_mac_address}-{self._controller.controller_id}"
)
self._attr_device_info = DeviceInfo(
# Use MAC address of Russound device as identifier
identifiers={(DOMAIN, self._controller.mac_address)},
connections={(CONNECTION_NETWORK_MAC, self._controller.mac_address)},
identifiers={(DOMAIN, device_identifier)},
manufacturer="Russound",
name=self._controller.controller_type,
model=self._controller.controller_type,
@@ -143,6 +150,10 @@ class RussoundZoneDevice(MediaPlayerEntity):
DOMAIN,
self._controller.parent_controller.mac_address,
)
else:
self._attr_device_info["connections"] = {
(CONNECTION_NETWORK_MAC, self._controller.mac_address)
}
for flag, feature in MP_FEATURES_BY_FLAG.items():
if flag in zone.instance.supported_features:
self._attr_supported_features |= feature

View File

@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/schlage",
"iot_class": "cloud_polling",
"requirements": ["pyschlage==2024.6.0"]
"requirements": ["pyschlage==2024.8.0"]
}

View File

@@ -2,6 +2,7 @@
from dataclasses import dataclass
from datetime import timedelta
from functools import partial
import logging
from typing import Any
@@ -80,8 +81,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: SenseConfigEntry) -> boo
client_session = async_get_clientsession(hass)
gateway = ASyncSenseable(
api_timeout=timeout, wss_timeout=timeout, client_session=client_session
# Creating the AsyncSenseable object loads
# ssl certificates which does blocking IO
gateway = await hass.async_add_executor_job(
partial(
ASyncSenseable,
api_timeout=timeout,
wss_timeout=timeout,
client_session=client_session,
)
)
gateway.rate_limit = ACTIVE_UPDATE_RATE

View File

@@ -1,6 +1,7 @@
"""Config flow for Sense integration."""
from collections.abc import Mapping
from functools import partial
import logging
from typing import Any
@@ -48,8 +49,15 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN):
timeout = self._auth_data[CONF_TIMEOUT]
client_session = async_get_clientsession(self.hass)
self._gateway = ASyncSenseable(
api_timeout=timeout, wss_timeout=timeout, client_session=client_session
# Creating the AsyncSenseable object loads
# ssl certificates which does blocking IO
self._gateway = await self.hass.async_add_executor_job(
partial(
ASyncSenseable,
api_timeout=timeout,
wss_timeout=timeout,
client_session=client_session,
)
)
self._gateway.rate_limit = ACTIVE_UPDATE_RATE
await self._gateway.authenticate(

Some files were not shown because too many files have changed in this diff Show More