Compare commits

...

98 Commits

Author SHA1 Message Date
Paulus Schoutsen
e0a97ec90d 2023.5.3 (#93066) 2023-05-14 13:00:18 -04:00
Paulus Schoutsen
1f6a601fc9 Bumped version to 2023.5.3 2023-05-14 12:11:32 -04:00
Aaron Bach
ff14277805 Fix a series of bugs due to Notion API changes (#93039)
* Fix a series of bugs due to Notion API changes

* Simplify

* Reduce blast radius

* Reduce blast radius

* Fix tests
2023-05-14 12:11:22 -04:00
J. Nick Koston
6424dee231 Fix sslv2/sslv3 with unverified connections (#93037)
In #90191 we use the same ssl context for httpx now to avoid
a memory leak, but httpx previously allowed sslv2/sslv3 for
unverified connections

This reverts to the behavior before #90191
2023-05-14 12:11:21 -04:00
J. Nick Koston
13c51e9c34 Disable cleanup_closed for aiohttp.TCPConnector with cpython 3.11.1+ (#93013)
* Disable cleanup_closed for aiohttp.TCPConnector with cpython 3.11.2+

There is currently a relatively fast memory leak when using
cpython 3.11.2+ and cleanup_closed with aiohttp

For my production instance it was leaking ~450MiB per day
of `MemoryBIO`, `SSLProtocol`, `SSLObject`, `_SSLProtocolTransport`
`memoryview`, and `managedbuffer` objects

see https://github.com/aio-libs/aiohttp/issues/7252
see https://github.com/python/cpython/pull/98540

* Update homeassistant/helpers/aiohttp_client.py
2023-05-14 12:11:20 -04:00
puddly
304c34a119 Bump bellows to 0.35.5 to fix Aqara Zigbee connectivity issue (#92999)
Bump bellows to 0.35.5
2023-05-14 12:11:19 -04:00
starkillerOG
d840d27f2d Bump reolink-aio to 0.5.15 (#92979) 2023-05-14 12:11:18 -04:00
Michael
a8cf3fadaa Fix remove of device when surveillance station is not used in Synology DSM (#92957) 2023-05-14 12:11:17 -04:00
Joost Lekkerkerker
a3f3b43c20 Bump python-vehicle to 1.0.1 (#92933) 2023-05-14 12:11:17 -04:00
Robert Hillis
b0520ccb94 Bump eternalegypt to 0.0.16 (#92919) 2023-05-14 12:11:16 -04:00
Jonathan Keslin
fe308e26dc Bump volvooncall to 0.10.3 to fix sensor type error (#92913) 2023-05-14 12:11:15 -04:00
Michael
60fb71159d Fix uptime sensor deviation detection in Fritz!Tools (#92907) 2023-05-14 12:11:14 -04:00
G Johansson
413dbe89e5 Fix already_configured string in workday (#92901)
* Fix already_configured string in workday

* Fix strings
2023-05-14 12:11:13 -04:00
J. Nick Koston
7abe9f1f9a Bump bluetooth-auto-recovery to 1.2.0 (#92893) 2023-05-14 12:11:12 -04:00
Glenn Waters
252b99f00b Bump UPB integration library to 0.5.4 (#92879) 2023-05-14 12:11:11 -04:00
J. Nick Koston
8e407334b7 Add ONVIF services to diagnostics (#92878) 2023-05-14 12:11:10 -04:00
puddly
91faa31e8c Bump ZHA dependencies (#92870) 2023-05-14 12:11:09 -04:00
Michael Hansen
5e77de35bd Allow "no" to match "nb" in language util (#92862)
* Allow "no" to match "nb"

* Adjust comparison for speed
2023-05-14 12:11:09 -04:00
jjlawren
c1b18dcbba Bump sonos-websocket to 0.1.1 (#92834) 2023-05-14 12:11:08 -04:00
Diogo Gomes
3c45bda0e8 Don't try to restore unavailable nor unknown states (#92825) 2023-05-14 12:11:07 -04:00
Álvaro Fernández Rojas
7361c29cba Update aioairzone to v0.5.5 (#92812) 2023-05-14 12:11:06 -04:00
Álvaro Fernández Rojas
a551de06c7 Fix Airzone Auto operation mode (#92796) 2023-05-14 12:11:05 -04:00
Erik Montnemery
84ce2f13f2 Fix race in Alexa async_enable_proactive_mode (#92785) 2023-05-14 12:11:04 -04:00
Álvaro Fernández Rojas
5c949bd862 Update aioairzone to v0.5.3 (#92780) 2023-05-14 12:11:03 -04:00
Keilin Bickar
16020d8ab9 Bump asyncsleepiq to 1.3.5 (#92759) 2023-05-14 12:11:02 -04:00
karwosts
f866d6100d Fix zwave_js services example data (#92748) 2023-05-14 12:11:01 -04:00
Brandon Rothweiler
8d0da78fab Increase timeout to 30 seconds for Mazda integration (#92744) 2023-05-14 12:11:00 -04:00
J. Nick Koston
7173a4f377 Bump aioesphomeapi to 3.7.4 to fix proxied BLE connections not retrying right away on error (#92741) 2023-05-14 12:11:00 -04:00
Eduard van Valkenburg
d4acb2a381 Update deprecated functions in SIA (#92737)
update deprecated functions
2023-05-14 12:10:59 -04:00
Shay Levy
b1111eb2c7 Bump aiowebostv to 0.3.3 to fix Python 3.11 support (#92736)
Bump aiowebostv to 0.3.3
2023-05-14 12:10:58 -04:00
Mick Vleeshouwer
4895ca218f Bump pyoverkiz to 1.7.8 (#92702) 2023-05-14 12:10:57 -04:00
Aaron Bach
91e9d21548 Bump aionotion to 2023.05.1 (#92697) 2023-05-14 12:10:56 -04:00
J. Nick Koston
996c6c4a92 Fix onvif reauth when device returns a http 401/403 error (#92690) 2023-05-14 12:10:55 -04:00
J. Nick Koston
96ff24aa2f Always request at least one zone for multi-zone LIFX devices (#92683) 2023-05-14 12:08:33 -04:00
J. Nick Koston
dcc5940f9b Fix parallel_updates being acquired too late for entity executor jobs (#92681)
* Fix parallel_updates being acquired too late for entity executor jobs

* tweak
2023-05-14 12:08:33 -04:00
rikroe
dd51bba677 Bump bimmer_connected to 0.13.3 (#92648)
Co-authored-by: rikroe <rikroe@users.noreply.github.com>
2023-05-14 12:08:32 -04:00
Luke
ac9da5c167 Roborock continue on failed mqtt disconnect (#92502)
continue on async disconnect failure
2023-05-14 12:08:31 -04:00
Paulus Schoutsen
e904edb12e 2023.5.2 (#92610) 2023-05-05 15:23:51 -04:00
J. Nick Koston
ddebfb3ac5 Fix duplicate ONVIF sensors (#92629)
Some cameras do not configure the video source correctly
when using webhooks but work fine with PullPoint which
results in duplicate sensors
2023-05-05 14:41:00 -04:00
J. Nick Koston
fe57901b5f Add support for visitor detections to onvif (#92350) 2023-05-05 14:40:59 -04:00
J. Nick Koston
73d4c73dbb Fix missing ONVIF events when switching from PullPoint to webhooks (#92627)
We now let the PullPoint subscription expire instead of explicitly
unsubscribing when pausing the subscription. We will still unsubscribe
it if Home Assistant is shutdown or the integration is reloaded

Some cameras will cancel ALL subscriptions when we do an unsubscribe
so we want to let the PullPoint subscription expire instead
of explicitly cancelling it.
2023-05-05 14:39:32 -04:00
Bram Kragten
f1bccef224 Update frontend to 20230503.3 (#92617) 2023-05-05 14:39:31 -04:00
Joost Lekkerkerker
cf243fbe11 Lower scan interval for OpenSky (#92593)
* Lower scan interval for opensky to avoid hitting rate limit

* Lower scan interval for opensky to avoid hitting rate limit

* Update homeassistant/components/opensky/sensor.py

Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>

* Update homeassistant/components/opensky/sensor.py

Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>

---------

Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2023-05-05 14:39:30 -04:00
J. Nick Koston
35c48d3d0e Improve reliability of ONVIF subscription renewals (#92551)
* Improve reliablity of onvif subscription renewals

upstream changelog: https://github.com/hunterjm/python-onvif-zeep-async/compare/v2.0.0...v2.1.0

* ```
Traceback (most recent call last):
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/onvif/client.py", line 75, in _async_wrap_connection_error_retry
    return await func(*args, **kwargs)
  File "/Users/bdraco/home-assistant/homeassistant/components/onvif/event.py", line 441, in _async_call_pullpoint_subscription_renew
    await self._pullpoint_subscription.Renew(SUBSCRIPTION_RELATIVE_TIME)
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/zeep/proxy.py", line 64, in __call__
    return await self._proxy._binding.send_async(
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/zeep/wsdl/bindings/soap.py", line 156, in send_async
    response = await client.transport.post_xml(
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/zeep/transports.py", line 235, in post_xml
    response = await self.post(address, message, headers)
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/zeep/transports.py", line 220, in post
    response = await self.client.post(
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/httpx/_client.py", line 1845, in post
    return await self.request(
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/httpx/_client.py", line 1530, in request
    return await self.send(request, auth=auth, follow_redirects=follow_redirects)
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/httpx/_client.py", line 1617, in send
    response = await self._send_handling_auth(
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/httpx/_client.py", line 1645, in _send_handling_auth
    response = await self._send_handling_redirects(
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/httpx/_client.py", line 1682, in _send_handling_redirects
    response = await self._send_single_request(request)
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/httpx/_client.py", line 1719, in _send_single_request
    response = await transport.handle_async_request(request)
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/httpx/_transports/default.py", line 352, in handle_async_request
    with map_httpcore_exceptions():
  File "/opt/homebrew/Cellar/python@3.10/3.10.10_1/Frameworks/Python.framework/Versions/3.10/lib/python3.10/contextlib.py", line 153, in __exit__
    self.gen.throw(typ, value, traceback)
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/httpx/_transports/default.py", line 77, in map_httpcore_exceptions
    raise mapped_exc(message) from exc
httpx.ReadTimeout
```

* adjust timeouts for slower tplink cameras

* tweak

* more debug

* tweak

* adjust message

* tweak

* Revert "tweak"

This reverts commit 10ee2a8de70e93dc5be85b1992ec4d30c2188344.

* give time in seconds

* revert

* revert

* Update homeassistant/components/onvif/event.py

* Update homeassistant/components/onvif/event.py
2023-05-05 14:39:29 -04:00
Paulus Schoutsen
15ef53cd9a Bumped version to 2023.5.2 2023-05-05 08:47:12 -04:00
Erik Montnemery
fb29e1a14e Bump hatasmota to 0.6.5 (#92585)
* Bump hatasmota to 0.6.5

* Fix tests
2023-05-05 08:47:08 -04:00
epenet
f8c3586f6b Fix hassio get_os_info retry (#92569) 2023-05-05 08:47:07 -04:00
Paulus Schoutsen
e8808b5fe7 Re-run expose entities migration if first time failed (#92564)
* Re-run expose entities migration if first time failed

* Count number of exposed entities

* Add tests

---------

Co-authored-by: Erik <erik@montnemery.com>
2023-05-05 08:47:06 -04:00
J. Nick Koston
82c0967716 Bump elkm1-lib to 2.2.2 (#92560)
changelog: https://github.com/gwww/elkm1/compare/2.2.1...2.2.2

fixes #92467
2023-05-05 08:47:05 -04:00
J. Nick Koston
163823d2a5 Allow duplicate state updates when force_update is set on an esphome sensor (#92553)
* Allow duplicate states when force_update is set on an esphome sensor

fixes #91221

* Update homeassistant/components/esphome/entry_data.py

Co-authored-by: pdw-mb <pdw@mythic-beasts.com>

---------

Co-authored-by: pdw-mb <pdw@mythic-beasts.com>
2023-05-05 08:47:04 -04:00
puddly
2dd1ce2047 Handle invalid ZHA cluster handlers (#92543)
* Do not crash on startup when an invalid cluster handler is encountered

* Add a unit test
2023-05-05 08:47:03 -04:00
J. Nick Koston
241cacde62 Bump aioesphomeapi to 13.7.3 to fix disconnecting while handshake is in progress (#92537)
Bump aioesphomeapi to 13.7.3

fixes #92432
2023-05-05 08:47:02 -04:00
Erik Montnemery
8a11ee81c4 Improve cloud migration (#92520)
* Improve cloud migration

* Tweak

* Use entity_ids func

---------

Co-authored-by: Paulus Schoutsen <balloob@gmail.com>
2023-05-05 08:47:01 -04:00
J. Nick Koston
e3762724a3 Fix blocking I/O in the event loop when starting ONVIF (#92518) 2023-05-05 08:47:00 -04:00
karwosts
b973825833 Fix scene service examples (#92501) 2023-05-05 08:46:59 -04:00
Eduard van Valkenburg
b2fcbbe50e Fix for SIA Code not being handled well (#92469)
* updated sia requirements

* updates because of changes in package

* linting and other small fixes

* fix for unknown code

* added same to alarm_control_panel
2023-05-05 08:46:58 -04:00
Francesco Carnielli
d96b37a004 Fix power sensor state_class in Netatmo integration (#92468) 2023-05-05 08:46:57 -04:00
DDanii
affece8857 Fix transmission error handling (#91548)
* transmission error handle fix

* added unexpected case tests
2023-05-05 08:46:56 -04:00
Paulus Schoutsen
bce18bf61a 2023.5.1 (#92513) 2023-05-04 12:45:55 -04:00
Paulus Schoutsen
eda0731e60 Bumped version to 2023.5.1 2023-05-04 10:23:58 -04:00
Bram Kragten
238c87055f Update frontend to 20230503.2 (#92508) 2023-05-04 10:23:53 -04:00
Erik Montnemery
4b4464a3de Force migration of cloud settings to exposed_entities (#92499) 2023-05-04 10:23:52 -04:00
J. Nick Koston
a07fbdd61c Bump bluetooth-auto-recovery 1.1.2 (#92495)
Improve handling when getting the power state times out

https://github.com/Bluetooth-Devices/bluetooth-auto-recovery/compare/v1.1.1...v1.1.2
2023-05-04 10:23:52 -04:00
J. Nick Koston
3126ebe9d6 Fix lifx light strips when color zones are not initially populated (#92487)
fixes #92456
2023-05-04 10:23:51 -04:00
Aaron Bach
89aec9d356 Bump aionotion to 2023.05.0 (#92451) 2023-05-04 10:23:49 -04:00
J. Nick Koston
0cfa566ff6 Fix onvif cameras with invalid encodings in device info (#92450)
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2023-05-04 10:23:49 -04:00
J. Nick Koston
fffece95f5 Fix onvif setup when time set service is not functional (#92447) 2023-05-04 10:23:48 -04:00
Franck Nijhof
c61e29709c 2023.5.0 (#92422) 2023-05-03 20:46:28 +02:00
Michael Hansen
458fe17a48 Bump voip-utils to 0.0.7 (#92372) 2023-05-03 20:02:45 +02:00
Franck Nijhof
15fdefd23b Bumped version to 2023.5.0 2023-05-03 19:44:53 +02:00
Michael Hansen
576f9600b5 Pass OPUS payload ID through VoIP (#92421) 2023-05-03 19:44:34 +02:00
Franck Nijhof
7a62574360 Bumped version to 2023.5.0b9 2023-05-03 18:59:42 +02:00
Erik Montnemery
0251d677d8 Migrate cloud settings for all Google entities (#92416) 2023-05-03 18:59:32 +02:00
Michael Hansen
2cd9b94ecb Skip unexposed entities in intent handlers (#92415)
* Filter intent handler entities by exposure

* Add test for skipping unexposed entities
2023-05-03 18:59:29 +02:00
Erik Montnemery
3cd2ab2319 Migrate cloud settings for all Alexa entities (#92413)
* Migrate cloud settings for all Alexa entities

* Also set settings for unknown entities
2023-05-03 18:59:25 +02:00
J. Nick Koston
4f0d403393 Bump bluetooth-auto-recovery to 1.1.1 (#92412)
* Bump bluetooth-auto-recovery to 1.1.0

https://github.com/Bluetooth-Devices/bluetooth-auto-recovery/releases/tag/v1.1.0

In https://github.com/home-assistant/operating-system/issues/2485 is was discovered that a more aggressive reset strategy is needed due to a yet unsolved bug in the linux 6.1.x kernel series

* bump to 1.1.1 since event 47 cannot be decoded (newer kernels only)
2023-05-03 18:59:22 +02:00
Bram Kragten
b558cf8b59 Update frontend to 20230503.1 (#92410) 2023-05-03 18:59:18 +02:00
Erik Montnemery
820c7b77ce Update cloud WS API for getting entity (#92409)
* Update cloud WS API for getting entity

* Adjust comment
2023-05-03 18:59:15 +02:00
Erik Montnemery
9d0fc916fc Use exposed_entities API in cloud tests (#92408) 2023-05-03 18:59:11 +02:00
Erik Montnemery
387f07a97f Include all entities in cloud lists (#92406) 2023-05-03 18:59:08 +02:00
J. Nick Koston
44968cfc7c Handle webhook URL rejection in onvif (#92405) 2023-05-03 18:59:04 +02:00
Erik Montnemery
c6751bed86 Allow setting google disable 2fa flag on any entity (#92403)
* Allow setting google disable 2fa flag on any entity

* Fix test

* Include disable_2fa flag in cloud/google_assistant/entities/get
2023-05-03 18:59:01 +02:00
Bram Kragten
b87e3860d9 Update frontend to 20230503.0 (#92402) 2023-05-03 18:58:57 +02:00
David F. Mulcahey
8ef6bd85f5 Bump ZHA quirks (#92400) 2023-05-03 18:58:54 +02:00
Erik Montnemery
ad4fed4f60 Allow exposing any entity to the default conversation agent (#92398)
* Allow exposing any entity to the default conversation agent

* Tweak

* Fix race, update tests

* Update tests
2023-05-03 18:58:51 +02:00
Erik Montnemery
1050895657 Don't use storage collection helper in ExposedEntities (#92396)
* Don't use storage collection helper in ExposedEntities

* Fix tests
2023-05-03 18:58:47 +02:00
Erik Montnemery
c31d657206 Improve exposed entities tests (#92389) 2023-05-03 18:58:44 +02:00
repaxan
88343bed77 Add ZHA binding for window coverings (#92387) 2023-05-03 18:58:40 +02:00
Artem Draft
51a10a84da Bump pybravia to 0.3.3 (#92378) 2023-05-03 18:58:35 +02:00
Paulus Schoutsen
5f3bbf2804 Bumped version to 2023.5.0b8 2023-05-02 22:39:38 -04:00
Paulus Schoutsen
b8eebf085c Fix deserialize bug + add test coverage (#92382) 2023-05-02 22:39:33 -04:00
Franck Nijhof
cdfd53e1cc Bumped version to 2023.5.0b7 2023-05-02 22:44:32 +02:00
Bram Kragten
ca147dd97e Update frontend to 20230502.0 (#92373) 2023-05-02 22:43:23 +02:00
Erik Montnemery
5b1278d885 Allow exposing entities not in the entity registry to assistants (#92363) 2023-05-02 22:43:19 +02:00
J. Nick Koston
0db28dcf4d Start onvif events later (#92354) 2023-05-02 22:43:15 +02:00
Raman Gupta
7c651665c5 Clean up zwave_js.cover (#92353) 2023-05-02 22:43:12 +02:00
J. Nick Koston
2f3964e3ce Bump ulid-transform to 0.7.2 (#92344) 2023-05-02 22:43:08 +02:00
John Pettitt
eef95fa0d4 Increase default timeout in sense (#90556)
Co-authored-by: J. Nick Koston <nick@koston.org>
2023-05-02 22:43:03 +02:00
165 changed files with 2730 additions and 1137 deletions

View File

@@ -783,6 +783,7 @@ build.json @home-assistant/supervisor
/homeassistant/components/netdata/ @fabaff
/homeassistant/components/netgear/ @hacf-fr @Quentame @starkillerOG
/tests/components/netgear/ @hacf-fr @Quentame @starkillerOG
/homeassistant/components/netgear_lte/ @tkdrob
/homeassistant/components/network/ @home-assistant/core
/tests/components/network/ @home-assistant/core
/homeassistant/components/nexia/ @bdraco

View File

@@ -3,12 +3,12 @@ from __future__ import annotations
from typing import Any, Final
from aioairzone.common import OperationMode
from aioairzone.common import OperationAction, OperationMode
from aioairzone.const import (
API_MODE,
API_ON,
API_SET_POINT,
AZD_DEMAND,
AZD_ACTION,
AZD_HUMIDITY,
AZD_MASTER,
AZD_MODE,
@@ -39,12 +39,13 @@ from .const import API_TEMPERATURE_STEP, DOMAIN, TEMP_UNIT_LIB_TO_HASS
from .coordinator import AirzoneUpdateCoordinator
from .entity import AirzoneZoneEntity
HVAC_ACTION_LIB_TO_HASS: Final[dict[OperationMode, HVACAction]] = {
OperationMode.STOP: HVACAction.OFF,
OperationMode.COOLING: HVACAction.COOLING,
OperationMode.HEATING: HVACAction.HEATING,
OperationMode.FAN: HVACAction.FAN,
OperationMode.DRY: HVACAction.DRYING,
HVAC_ACTION_LIB_TO_HASS: Final[dict[OperationAction, HVACAction]] = {
OperationAction.COOLING: HVACAction.COOLING,
OperationAction.DRYING: HVACAction.DRYING,
OperationAction.FAN: HVACAction.FAN,
OperationAction.HEATING: HVACAction.HEATING,
OperationAction.IDLE: HVACAction.IDLE,
OperationAction.OFF: HVACAction.OFF,
}
HVAC_MODE_LIB_TO_HASS: Final[dict[OperationMode, HVACMode]] = {
OperationMode.STOP: HVACMode.OFF,
@@ -156,14 +157,13 @@ class AirzoneClimate(AirzoneZoneEntity, ClimateEntity):
"""Update climate attributes."""
self._attr_current_temperature = self.get_airzone_value(AZD_TEMP)
self._attr_current_humidity = self.get_airzone_value(AZD_HUMIDITY)
self._attr_hvac_action = HVAC_ACTION_LIB_TO_HASS[
self.get_airzone_value(AZD_ACTION)
]
if self.get_airzone_value(AZD_ON):
mode = self.get_airzone_value(AZD_MODE)
self._attr_hvac_mode = HVAC_MODE_LIB_TO_HASS[mode]
if self.get_airzone_value(AZD_DEMAND):
self._attr_hvac_action = HVAC_ACTION_LIB_TO_HASS[mode]
else:
self._attr_hvac_action = HVACAction.IDLE
self._attr_hvac_mode = HVAC_MODE_LIB_TO_HASS[
self.get_airzone_value(AZD_MODE)
]
else:
self._attr_hvac_action = HVACAction.OFF
self._attr_hvac_mode = HVACMode.OFF
self._attr_target_temperature = self.get_airzone_value(AZD_TEMP_SET)

View File

@@ -11,5 +11,5 @@
"documentation": "https://www.home-assistant.io/integrations/airzone",
"iot_class": "local_polling",
"loggers": ["aioairzone"],
"requirements": ["aioairzone==0.5.2"]
"requirements": ["aioairzone==0.5.5"]
}

View File

@@ -3,7 +3,7 @@ from abc import ABC, abstractmethod
import asyncio
import logging
from homeassistant.core import CALLBACK_TYPE, callback
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.helpers.storage import Store
from .const import DOMAIN
@@ -17,11 +17,12 @@ _LOGGER = logging.getLogger(__name__)
class AbstractConfig(ABC):
"""Hold the configuration for Alexa."""
_unsub_proactive_report: asyncio.Task[CALLBACK_TYPE] | None = None
_unsub_proactive_report: CALLBACK_TYPE | None = None
def __init__(self, hass):
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize abstract config."""
self.hass = hass
self._enable_proactive_mode_lock = asyncio.Lock()
self._store = None
async def async_initialize(self):
@@ -67,20 +68,17 @@ class AbstractConfig(ABC):
async def async_enable_proactive_mode(self):
"""Enable proactive mode."""
_LOGGER.debug("Enable proactive mode")
if self._unsub_proactive_report is None:
self._unsub_proactive_report = self.hass.async_create_task(
async_enable_proactive_mode(self.hass, self)
async with self._enable_proactive_mode_lock:
if self._unsub_proactive_report is not None:
return
self._unsub_proactive_report = await async_enable_proactive_mode(
self.hass, self
)
try:
await self._unsub_proactive_report
except Exception:
self._unsub_proactive_report = None
raise
async def async_disable_proactive_mode(self):
"""Disable proactive mode."""
_LOGGER.debug("Disable proactive mode")
if unsub_func := await self._unsub_proactive_report:
if unsub_func := self._unsub_proactive_report:
unsub_func()
self._unsub_proactive_report = None

View File

@@ -60,6 +60,7 @@ class AlexaConfig(AbstractConfig):
"""Return an identifier for the user that represents this config."""
return ""
@core.callback
def should_expose(self, entity_id):
"""If an entity should be exposed."""
if not self._config[CONF_FILTER].empty_filter:

View File

@@ -18,7 +18,7 @@
"bleak==0.20.2",
"bleak-retry-connector==3.0.2",
"bluetooth-adapters==0.15.3",
"bluetooth-auto-recovery==1.0.3",
"bluetooth-auto-recovery==1.2.0",
"bluetooth-data-tools==0.4.0",
"dbus-fast==1.85.0"
]

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive",
"iot_class": "cloud_polling",
"loggers": ["bimmer_connected"],
"requirements": ["bimmer_connected==0.13.2"]
"requirements": ["bimmer_connected==0.13.3"]
}

View File

@@ -7,7 +7,7 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["pybravia"],
"requirements": ["pybravia==0.3.2"],
"requirements": ["pybravia==0.3.3"],
"ssdp": [
{
"st": "urn:schemas-sony-com:service:ScalarWebAPI:1",

View File

@@ -22,6 +22,7 @@ from homeassistant.components.alexa import (
)
from homeassistant.components.binary_sensor import BinarySensorDeviceClass
from homeassistant.components.homeassistant.exposed_entities import (
async_expose_entity,
async_get_assistant_settings,
async_listen_entity_updates,
async_should_expose,
@@ -29,6 +30,7 @@ from homeassistant.components.homeassistant.exposed_entities import (
from homeassistant.components.sensor import SensorDeviceClass
from homeassistant.const import CLOUD_NEVER_EXPOSED_ENTITIES
from homeassistant.core import HomeAssistant, callback, split_entity_id
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import entity_registry as er, start
from homeassistant.helpers.entity import get_device_class
from homeassistant.helpers.event import async_call_later
@@ -104,7 +106,11 @@ def entity_supported(hass: HomeAssistant, entity_id: str) -> bool:
if domain in SUPPORTED_DOMAINS:
return True
device_class = get_device_class(hass, entity_id)
try:
device_class = get_device_class(hass, entity_id)
except HomeAssistantError:
# The entity no longer exists
return False
if (
domain == "binary_sensor"
and device_class in SUPPORTED_BINARY_SENSOR_DEVICE_CLASSES
@@ -193,35 +199,50 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
# Don't migrate if there's a YAML config
return
entity_registry = er.async_get(self.hass)
for entity_id, entry in entity_registry.entities.items():
if CLOUD_ALEXA in entry.options:
continue
options = {"should_expose": self._should_expose_legacy(entity_id)}
entity_registry.async_update_entity_options(entity_id, CLOUD_ALEXA, options)
for entity_id in {
*self.hass.states.async_entity_ids(),
*self._prefs.alexa_entity_configs,
}:
async_expose_entity(
self.hass,
CLOUD_ALEXA,
entity_id,
self._should_expose_legacy(entity_id),
)
async def async_initialize(self):
"""Initialize the Alexa config."""
await super().async_initialize()
if self._prefs.alexa_settings_version != ALEXA_SETTINGS_VERSION:
if self._prefs.alexa_settings_version < 2:
self._migrate_alexa_entity_settings_v1()
await self._prefs.async_update(
alexa_settings_version=ALEXA_SETTINGS_VERSION
async def on_hass_started(hass):
if self._prefs.alexa_settings_version != ALEXA_SETTINGS_VERSION:
if self._prefs.alexa_settings_version < 2 or (
# Recover from a bug we had in 2023.5.0 where entities didn't get exposed
self._prefs.alexa_settings_version < 3
and not any(
settings.get("should_expose", False)
for settings in async_get_assistant_settings(
hass, CLOUD_ALEXA
).values()
)
):
self._migrate_alexa_entity_settings_v1()
await self._prefs.async_update(
alexa_settings_version=ALEXA_SETTINGS_VERSION
)
async_listen_entity_updates(
self.hass, CLOUD_ALEXA, self._async_exposed_entities_updated
)
async def hass_started(hass):
async def on_hass_start(hass):
if self.enabled and ALEXA_DOMAIN not in self.hass.config.components:
await async_setup_component(self.hass, ALEXA_DOMAIN, {})
start.async_at_start(self.hass, hass_started)
start.async_at_start(self.hass, on_hass_start)
start.async_at_started(self.hass, on_hass_started)
self._prefs.async_listen_updates(self._async_prefs_updated)
async_listen_entity_updates(
self.hass, CLOUD_ALEXA, self._async_exposed_entities_updated
)
self.hass.bus.async_listen(
er.EVENT_ENTITY_REGISTRY_UPDATED,
self._handle_entity_registry_updated,
@@ -257,6 +278,7 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
and entity_supported(self.hass, entity_id)
)
@callback
def should_expose(self, entity_id):
"""If an entity should be exposed."""
if not self._config[CONF_FILTER].empty_filter:

View File

@@ -11,7 +11,11 @@ from homeassistant.components.binary_sensor import BinarySensorDeviceClass
from homeassistant.components.google_assistant import DOMAIN as GOOGLE_DOMAIN
from homeassistant.components.google_assistant.helpers import AbstractConfig
from homeassistant.components.homeassistant.exposed_entities import (
async_expose_entity,
async_get_assistant_settings,
async_get_entity_settings,
async_listen_entity_updates,
async_set_assistant_option,
async_should_expose,
)
from homeassistant.components.sensor import SensorDeviceClass
@@ -23,6 +27,7 @@ from homeassistant.core import (
callback,
split_entity_id,
)
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr, entity_registry as er, start
from homeassistant.helpers.entity import get_device_class
from homeassistant.setup import async_setup_component
@@ -171,34 +176,56 @@ class CloudGoogleConfig(AbstractConfig):
# Don't migrate if there's a YAML config
return
entity_registry = er.async_get(self.hass)
for entity_id, entry in entity_registry.entities.items():
if CLOUD_GOOGLE in entry.options:
continue
options = {"should_expose": self._should_expose_legacy(entity_id)}
if _2fa_disabled := (self._2fa_disabled_legacy(entity_id) is not None):
options[PREF_DISABLE_2FA] = _2fa_disabled
entity_registry.async_update_entity_options(
entity_id, CLOUD_GOOGLE, options
for entity_id in {
*self.hass.states.async_entity_ids(),
*self._prefs.google_entity_configs,
}:
async_expose_entity(
self.hass,
CLOUD_GOOGLE,
entity_id,
self._should_expose_legacy(entity_id),
)
if _2fa_disabled := (self._2fa_disabled_legacy(entity_id) is not None):
async_set_assistant_option(
self.hass,
CLOUD_GOOGLE,
entity_id,
PREF_DISABLE_2FA,
_2fa_disabled,
)
async def async_initialize(self):
"""Perform async initialization of config."""
await super().async_initialize()
if self._prefs.google_settings_version != GOOGLE_SETTINGS_VERSION:
if self._prefs.google_settings_version < 2:
self._migrate_google_entity_settings_v1()
await self._prefs.async_update(
google_settings_version=GOOGLE_SETTINGS_VERSION
async def on_hass_started(hass: HomeAssistant) -> None:
if self._prefs.google_settings_version != GOOGLE_SETTINGS_VERSION:
if self._prefs.google_settings_version < 2 or (
# Recover from a bug we had in 2023.5.0 where entities didn't get exposed
self._prefs.google_settings_version < 3
and not any(
settings.get("should_expose", False)
for settings in async_get_assistant_settings(
hass, CLOUD_GOOGLE
).values()
)
):
self._migrate_google_entity_settings_v1()
await self._prefs.async_update(
google_settings_version=GOOGLE_SETTINGS_VERSION
)
async_listen_entity_updates(
self.hass, CLOUD_GOOGLE, self._async_exposed_entities_updated
)
async def hass_started(hass):
async def on_hass_start(hass: HomeAssistant) -> None:
if self.enabled and GOOGLE_DOMAIN not in self.hass.config.components:
await async_setup_component(self.hass, GOOGLE_DOMAIN, {})
start.async_at_start(self.hass, hass_started)
start.async_at_start(self.hass, on_hass_start)
start.async_at_started(self.hass, on_hass_started)
# Remove any stored user agent id that is not ours
remove_agent_user_ids = []
@@ -210,9 +237,6 @@ class CloudGoogleConfig(AbstractConfig):
await self.async_disconnect_agent_user(agent_user_id)
self._prefs.async_listen_updates(self._async_prefs_updated)
async_listen_entity_updates(
self.hass, CLOUD_GOOGLE, self._async_exposed_entities_updated
)
self.hass.bus.async_listen(
er.EVENT_ENTITY_REGISTRY_UPDATED,
self._handle_entity_registry_updated,
@@ -289,14 +313,13 @@ class CloudGoogleConfig(AbstractConfig):
def should_2fa(self, state):
"""If an entity should be checked for 2FA."""
entity_registry = er.async_get(self.hass)
registry_entry = entity_registry.async_get(state.entity_id)
if not registry_entry:
try:
settings = async_get_entity_settings(self.hass, state.entity_id)
except HomeAssistantError:
# Handle the entity has been removed
return False
assistant_options = registry_entry.options.get(CLOUD_GOOGLE, {})
assistant_options = settings.get(CLOUD_GOOGLE, {})
return not assistant_options.get(PREF_DISABLE_2FA, DEFAULT_DISABLE_2FA)
async def async_report_state(self, message, agent_user_id: str):
@@ -382,7 +405,7 @@ class CloudGoogleConfig(AbstractConfig):
self.async_schedule_google_sync_all()
@callback
def _handle_device_registry_updated(self, event: Event) -> None:
async def _handle_device_registry_updated(self, event: Event) -> None:
"""Handle when device registry updated."""
if (
not self.enabled

View File

@@ -1,6 +1,7 @@
"""The HTTP api to control the cloud integration."""
import asyncio
from collections.abc import Mapping
from contextlib import suppress
import dataclasses
from functools import wraps
from http import HTTPStatus
@@ -21,11 +22,12 @@ from homeassistant.components.alexa import (
errors as alexa_errors,
)
from homeassistant.components.google_assistant import helpers as google_helpers
from homeassistant.components.homeassistant import exposed_entities
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.http.data_validator import RequestDataValidator
from homeassistant.const import CLOUD_NEVER_EXPOSED_ENTITIES
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.util.location import async_detect_location_info
@@ -566,15 +568,14 @@ async def google_assistant_get(
"""Get data for a single google assistant entity."""
cloud = hass.data[DOMAIN]
gconf = await cloud.client.get_google_config()
entity_registry = er.async_get(hass)
entity_id: str = msg["entity_id"]
state = hass.states.get(entity_id)
if not entity_registry.async_is_registered(entity_id) or not state:
if not state:
connection.send_error(
msg["id"],
websocket_api.const.ERR_NOT_FOUND,
f"{entity_id} unknown or not in the entity registry",
f"{entity_id} unknown",
)
return
@@ -587,10 +588,16 @@ async def google_assistant_get(
)
return
assistant_options: Mapping[str, Any] = {}
with suppress(HomeAssistantError, KeyError):
settings = exposed_entities.async_get_entity_settings(hass, entity_id)
assistant_options = settings[CLOUD_GOOGLE]
result = {
"entity_id": entity.entity_id,
"traits": [trait.name for trait in entity.traits()],
"might_2fa": entity.might_2fa_traits(),
PREF_DISABLE_2FA: assistant_options.get(PREF_DISABLE_2FA),
}
connection.send_result(msg["id"], result)
@@ -609,14 +616,11 @@ async def google_assistant_list(
"""List all google assistant entities."""
cloud = hass.data[DOMAIN]
gconf = await cloud.client.get_google_config()
entity_registry = er.async_get(hass)
entities = google_helpers.async_get_entities(hass, gconf)
result = []
for entity in entities:
if not entity_registry.async_is_registered(entity.entity_id):
continue
result.append(
{
"entity_id": entity.entity_id,
@@ -645,27 +649,19 @@ async def google_assistant_update(
msg: dict[str, Any],
) -> None:
"""Update google assistant entity config."""
entity_registry = er.async_get(hass)
entity_id: str = msg["entity_id"]
if not (registry_entry := entity_registry.async_get(entity_id)):
connection.send_error(
msg["id"],
websocket_api.const.ERR_NOT_ALLOWED,
f"can't configure {entity_id}",
)
return
assistant_options: Mapping[str, Any] = {}
with suppress(HomeAssistantError, KeyError):
settings = exposed_entities.async_get_entity_settings(hass, entity_id)
assistant_options = settings[CLOUD_GOOGLE]
disable_2fa = msg[PREF_DISABLE_2FA]
assistant_options: Mapping[str, Any]
if (
assistant_options := registry_entry.options.get(CLOUD_GOOGLE, {})
) and assistant_options.get(PREF_DISABLE_2FA) == disable_2fa:
if assistant_options.get(PREF_DISABLE_2FA) == disable_2fa:
return
assistant_options = assistant_options | {PREF_DISABLE_2FA: disable_2fa}
entity_registry.async_update_entity_options(
entity_id, CLOUD_GOOGLE, assistant_options
exposed_entities.async_set_assistant_option(
hass, CLOUD_GOOGLE, entity_id, PREF_DISABLE_2FA, disable_2fa
)
connection.send_result(msg["id"])
@@ -686,17 +682,8 @@ async def alexa_get(
msg: dict[str, Any],
) -> None:
"""Get data for a single alexa entity."""
entity_registry = er.async_get(hass)
entity_id: str = msg["entity_id"]
if not entity_registry.async_is_registered(entity_id):
connection.send_error(
msg["id"],
websocket_api.const.ERR_NOT_FOUND,
f"{entity_id} not in the entity registry",
)
return
if entity_id in CLOUD_NEVER_EXPOSED_ENTITIES or not entity_supported_by_alexa(
hass, entity_id
):
@@ -723,14 +710,11 @@ async def alexa_list(
"""List all alexa entities."""
cloud = hass.data[DOMAIN]
alexa_config = await cloud.client.get_alexa_config()
entity_registry = er.async_get(hass)
entities = alexa_entities.async_get_entities(hass, alexa_config)
result = []
for entity in entities:
if not entity_registry.async_is_registered(entity.entity_id):
continue
result.append(
{
"entity_id": entity.entity_id,

View File

@@ -41,8 +41,8 @@ STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
STORAGE_VERSION_MINOR = 2
ALEXA_SETTINGS_VERSION = 2
GOOGLE_SETTINGS_VERSION = 2
ALEXA_SETTINGS_VERSION = 3
GOOGLE_SETTINGS_VERSION = 3
class CloudPreferencesStore(Store):

View File

@@ -1,4 +1,5 @@
"""Const for conversation integration."""
DOMAIN = "conversation"
DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
HOME_ASSISTANT_AGENT = "homeassistant"

View File

@@ -21,19 +21,21 @@ from homeassistant.components.homeassistant.exposed_entities import (
async_listen_entity_updates,
async_should_expose,
)
from homeassistant.const import ATTR_DEVICE_CLASS
from homeassistant.const import MATCH_ALL
from homeassistant.helpers import (
area_registry as ar,
device_registry as dr,
entity_registry as er,
intent,
start,
template,
translation,
)
from homeassistant.helpers.event import async_track_state_change
from homeassistant.util.json import JsonObjectType, json_loads_object
from .agent import AbstractConversationAgent, ConversationInput, ConversationResult
from .const import DOMAIN
from .const import DEFAULT_EXPOSED_ATTRIBUTES, DOMAIN
_LOGGER = logging.getLogger(__name__)
_DEFAULT_ERROR_TEXT = "Sorry, I couldn't understand that"
@@ -81,16 +83,24 @@ def async_setup(hass: core.HomeAssistant) -> None:
async_should_expose(hass, DOMAIN, entity_id)
@core.callback
def async_handle_entity_registry_changed(event: core.Event) -> None:
"""Set expose flag on newly created entities."""
if event.data["action"] == "create":
async_should_expose(hass, DOMAIN, event.data["entity_id"])
def async_entity_state_listener(
changed_entity: str,
old_state: core.State | None,
new_state: core.State | None,
):
"""Set expose flag on new entities."""
if old_state is not None or new_state is None:
return
async_should_expose(hass, DOMAIN, changed_entity)
hass.bus.async_listen(
er.EVENT_ENTITY_REGISTRY_UPDATED,
async_handle_entity_registry_changed,
run_immediately=True,
)
@core.callback
def async_hass_started(hass: core.HomeAssistant) -> None:
"""Set expose flag on all entities."""
for state in hass.states.async_all():
async_should_expose(hass, DOMAIN, state.entity_id)
async_track_state_change(hass, MATCH_ALL, async_entity_state_listener)
start.async_at_started(hass, async_hass_started)
class DefaultAgent(AbstractConversationAgent):
@@ -130,6 +140,11 @@ class DefaultAgent(AbstractConversationAgent):
self._async_handle_entity_registry_changed,
run_immediately=True,
)
self.hass.bus.async_listen(
core.EVENT_STATE_CHANGED,
self._async_handle_state_changed,
run_immediately=True,
)
async_listen_entity_updates(
self.hass, DOMAIN, self._async_exposed_entities_updated
)
@@ -186,6 +201,7 @@ class DefaultAgent(AbstractConversationAgent):
user_input.text,
user_input.context,
language,
assistant=DOMAIN,
)
except intent.IntentHandleError:
_LOGGER.exception("Intent handling error")
@@ -475,12 +491,19 @@ class DefaultAgent(AbstractConversationAgent):
@core.callback
def _async_handle_entity_registry_changed(self, event: core.Event) -> None:
"""Clear names list cache when an entity registry entry has changed."""
if event.data["action"] == "update" and not any(
if event.data["action"] != "update" or not any(
field in event.data["changes"] for field in _ENTITY_REGISTRY_UPDATE_FIELDS
):
return
self._slot_lists = None
@core.callback
def _async_handle_state_changed(self, event: core.Event) -> None:
"""Clear names list cache when a state is added or removed from the state machine."""
if event.data.get("old_state") and event.data.get("new_state"):
return
self._slot_lists = None
@core.callback
def _async_exposed_entities_updated(self) -> None:
"""Handle updated preferences."""
@@ -493,30 +516,38 @@ class DefaultAgent(AbstractConversationAgent):
area_ids_with_entities: set[str] = set()
entity_registry = er.async_get(self.hass)
entities = [
entity
for entity in entity_registry.entities.values()
if async_should_expose(self.hass, DOMAIN, entity.entity_id)
states = [
state
for state in self.hass.states.async_all()
if async_should_expose(self.hass, DOMAIN, state.entity_id)
]
devices = dr.async_get(self.hass)
# Gather exposed entity names
entity_names = []
for entity in entities:
for state in states:
# Checked against "requires_context" and "excludes_context" in hassil
context = {"domain": entity.domain}
if entity.device_class:
context[ATTR_DEVICE_CLASS] = entity.device_class
context = {"domain": state.domain}
if state.attributes:
# Include some attributes
for attr in DEFAULT_EXPOSED_ATTRIBUTES:
if attr not in state.attributes:
continue
context[attr] = state.attributes[attr]
entity = entity_registry.async_get(state.entity_id)
if not entity:
# Default name
entity_names.append((state.name, state.name, context))
continue
if entity.aliases:
for alias in entity.aliases:
entity_names.append((alias, alias, context))
# Default name
name = entity.async_friendly_name(self.hass) or entity.entity_id.replace(
"_", " "
)
entity_names.append((name, name, context))
entity_names.append((state.name, state.name, context))
if entity.area_id:
# Expose area too

View File

@@ -15,5 +15,5 @@
"documentation": "https://www.home-assistant.io/integrations/elkm1",
"iot_class": "local_push",
"loggers": ["elkm1_lib"],
"requirements": ["elkm1-lib==2.2.1"]
"requirements": ["elkm1-lib==2.2.2"]
}

View File

@@ -25,6 +25,7 @@ from aioesphomeapi import (
NumberInfo,
SelectInfo,
SensorInfo,
SensorState,
SwitchInfo,
TextSensorInfo,
UserService,
@@ -240,9 +241,18 @@ class RuntimeEntryData:
current_state_by_type = self.state[state_type]
current_state = current_state_by_type.get(key, _SENTINEL)
subscription_key = (state_type, key)
if current_state == state and subscription_key not in stale_state:
if (
current_state == state
and subscription_key not in stale_state
and not (
type(state) is SensorState # pylint: disable=unidiomatic-typecheck
and (platform_info := self.info.get(Platform.SENSOR))
and (entity_info := platform_info.get(state.key))
and (cast(SensorInfo, entity_info)).force_update
)
):
_LOGGER.debug(
"%s: ignoring duplicate update with and key %s: %s",
"%s: ignoring duplicate update with key %s: %s",
self.name,
key,
state,

View File

@@ -15,7 +15,7 @@
"iot_class": "local_push",
"loggers": ["aioesphomeapi", "noiseprotocol"],
"requirements": [
"aioesphomeapi==13.7.2",
"aioesphomeapi==13.7.4",
"bluetooth-data-tools==0.4.0",
"esphome-dashboard-api==1.2.3"
],

View File

@@ -283,7 +283,7 @@ class FritzBoxTools(
entity_data["entity_states"][
key
] = await self.hass.async_add_executor_job(
update_fn, self.fritz_status, self.data.get(key)
update_fn, self.fritz_status, self.data["entity_states"].get(key)
)
if self.has_call_deflections:
entity_data[

View File

@@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20230501.0"]
"requirements": ["home-assistant-frontend==20230503.3"]
}

View File

@@ -590,7 +590,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
await async_setup_addon_panel(hass, hassio)
# Setup hardware integration for the detected board type
async def _async_setup_hardware_integration(hass):
async def _async_setup_hardware_integration(_: datetime) -> None:
"""Set up hardaware integration for the detected board type."""
if (os_info := get_os_info(hass)) is None:
# os info not yet fetched from supervisor, retry later
@@ -610,7 +610,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
)
)
await _async_setup_hardware_integration(hass)
await _async_setup_hardware_integration(datetime.now())
hass.async_create_task(
hass.config_entries.flow.async_init(DOMAIN, context={"source": "system"})

View File

@@ -3,7 +3,8 @@ from __future__ import annotations
from collections.abc import Callable, Mapping
import dataclasses
from typing import Any
from itertools import chain
from typing import Any, TypedDict
import voluptuous as vol
@@ -77,16 +78,41 @@ class AssistantPreferences:
return {"expose_new": self.expose_new}
@dataclasses.dataclass(frozen=True)
class ExposedEntity:
"""An exposed entity without a unique_id."""
assistants: dict[str, dict[str, Any]]
def to_json(self) -> dict[str, Any]:
"""Return a JSON serializable representation for storage."""
return {
"assistants": self.assistants,
}
class SerializedExposedEntities(TypedDict):
"""Serialized exposed entities storage storage collection."""
assistants: dict[str, dict[str, Any]]
exposed_entities: dict[str, dict[str, Any]]
class ExposedEntities:
"""Control assistant settings."""
"""Control assistant settings.
Settings for entities without a unique_id are stored in the store.
Settings for entities with a unique_id are stored in the entity registry.
"""
_assistants: dict[str, AssistantPreferences]
entities: dict[str, ExposedEntity]
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize."""
self._hass = hass
self._listeners: dict[str, list[Callable[[], None]]] = {}
self._store: Store[dict[str, dict[str, dict[str, Any]]]] = Store(
self._store: Store[SerializedExposedEntities] = Store(
hass, STORAGE_VERSION, STORAGE_KEY
)
@@ -95,7 +121,8 @@ class ExposedEntities:
websocket_api.async_register_command(self._hass, ws_expose_entity)
websocket_api.async_register_command(self._hass, ws_expose_new_entities_get)
websocket_api.async_register_command(self._hass, ws_expose_new_entities_set)
await self.async_load()
websocket_api.async_register_command(self._hass, ws_list_exposed_entities)
await self._async_load_data()
@callback
def async_listen_entity_updates(
@@ -105,30 +132,57 @@ class ExposedEntities:
self._listeners.setdefault(assistant, []).append(listener)
@callback
def async_expose_entity(
self, assistant: str, entity_id: str, should_expose: bool
def async_set_assistant_option(
self, assistant: str, entity_id: str, key: str, value: Any
) -> None:
"""Expose an entity to an assistant.
"""Set an option for an assistant.
Notify listeners if expose flag was changed.
"""
entity_registry = er.async_get(self._hass)
if not (registry_entry := entity_registry.async_get(entity_id)):
raise HomeAssistantError("Unknown entity")
return self._async_set_legacy_assistant_option(
assistant, entity_id, key, value
)
assistant_options: Mapping[str, Any]
if (
assistant_options := registry_entry.options.get(assistant, {})
) and assistant_options.get("should_expose") == should_expose:
) and assistant_options.get(key) == value:
return
assistant_options = assistant_options | {"should_expose": should_expose}
assistant_options = assistant_options | {key: value}
entity_registry.async_update_entity_options(
entity_id, assistant, assistant_options
)
for listener in self._listeners.get(assistant, []):
listener()
def _async_set_legacy_assistant_option(
self, assistant: str, entity_id: str, key: str, value: Any
) -> None:
"""Set an option for an assistant.
Notify listeners if expose flag was changed.
"""
if (
(exposed_entity := self.entities.get(entity_id))
and (assistant_options := exposed_entity.assistants.get(assistant, {}))
and assistant_options.get(key) == value
):
return
if exposed_entity:
new_exposed_entity = self._update_exposed_entity(
assistant, entity_id, key, value
)
else:
new_exposed_entity = self._new_exposed_entity(assistant, key, value)
self.entities[entity_id] = new_exposed_entity
self._async_schedule_save()
for listener in self._listeners.get(assistant, []):
listener()
@callback
def async_get_expose_new_entities(self, assistant: str) -> bool:
"""Check if new entities are exposed to an assistant."""
@@ -150,6 +204,11 @@ class ExposedEntities:
entity_registry = er.async_get(self._hass)
result: dict[str, Mapping[str, Any]] = {}
options: Mapping | None
for entity_id, exposed_entity in self.entities.items():
if options := exposed_entity.assistants.get(assistant):
result[entity_id] = options
for entity_id, entry in entity_registry.entities.items():
if options := entry.options.get(assistant):
result[entity_id] = options
@@ -162,11 +221,16 @@ class ExposedEntities:
entity_registry = er.async_get(self._hass)
result: dict[str, Mapping[str, Any]] = {}
if not (registry_entry := entity_registry.async_get(entity_id)):
assistant_settings: Mapping
if registry_entry := entity_registry.async_get(entity_id):
assistant_settings = registry_entry.options
elif exposed_entity := self.entities.get(entity_id):
assistant_settings = exposed_entity.assistants
else:
raise HomeAssistantError("Unknown entity")
for assistant in KNOWN_ASSISTANTS:
if options := registry_entry.options.get(assistant):
if options := assistant_settings.get(assistant):
result[assistant] = options
return result
@@ -181,9 +245,7 @@ class ExposedEntities:
entity_registry = er.async_get(self._hass)
if not (registry_entry := entity_registry.async_get(entity_id)):
# Entities which are not in the entity registry are not exposed
return False
return self._async_should_expose_legacy_entity(assistant, entity_id)
if assistant in registry_entry.options:
if "should_expose" in registry_entry.options[assistant]:
should_expose = registry_entry.options[assistant]["should_expose"]
@@ -202,11 +264,42 @@ class ExposedEntities:
return should_expose
def _async_should_expose_legacy_entity(
self, assistant: str, entity_id: str
) -> bool:
"""Return True if an entity should be exposed to an assistant."""
should_expose: bool
if (
exposed_entity := self.entities.get(entity_id)
) and assistant in exposed_entity.assistants:
if "should_expose" in exposed_entity.assistants[assistant]:
should_expose = exposed_entity.assistants[assistant]["should_expose"]
return should_expose
if self.async_get_expose_new_entities(assistant):
should_expose = self._is_default_exposed(entity_id, None)
else:
should_expose = False
if exposed_entity:
new_exposed_entity = self._update_exposed_entity(
assistant, entity_id, "should_expose", should_expose
)
else:
new_exposed_entity = self._new_exposed_entity(
assistant, "should_expose", should_expose
)
self.entities[entity_id] = new_exposed_entity
self._async_schedule_save()
return should_expose
def _is_default_exposed(
self, entity_id: str, registry_entry: er.RegistryEntry
self, entity_id: str, registry_entry: er.RegistryEntry | None
) -> bool:
"""Return True if an entity is exposed by default."""
if (
if registry_entry and (
registry_entry.entity_category is not None
or registry_entry.hidden_by is not None
):
@@ -216,7 +309,11 @@ class ExposedEntities:
if domain in DEFAULT_EXPOSED_DOMAINS:
return True
device_class = get_device_class(self._hass, entity_id)
try:
device_class = get_device_class(self._hass, entity_id)
except HomeAssistantError:
# The entity no longer exists
return False
if (
domain == "binary_sensor"
and device_class in DEFAULT_EXPOSED_BINARY_SENSOR_DEVICE_CLASSES
@@ -228,17 +325,43 @@ class ExposedEntities:
return False
async def async_load(self) -> None:
def _update_exposed_entity(
self, assistant: str, entity_id: str, key: str, value: Any
) -> ExposedEntity:
"""Update an exposed entity."""
entity = self.entities[entity_id]
assistants = dict(entity.assistants)
old_settings = assistants.get(assistant, {})
assistants[assistant] = old_settings | {key: value}
return ExposedEntity(assistants)
def _new_exposed_entity(
self, assistant: str, key: str, value: Any
) -> ExposedEntity:
"""Create a new exposed entity."""
return ExposedEntity(
assistants={assistant: {key: value}},
)
async def _async_load_data(self) -> SerializedExposedEntities | None:
"""Load from the store."""
data = await self._store.async_load()
assistants: dict[str, AssistantPreferences] = {}
exposed_entities: dict[str, ExposedEntity] = {}
if data:
for domain, preferences in data["assistants"].items():
assistants[domain] = AssistantPreferences(**preferences)
if data and "exposed_entities" in data:
for entity_id, preferences in data["exposed_entities"].items():
exposed_entities[entity_id] = ExposedEntity(**preferences)
self._assistants = assistants
self.entities = exposed_entities
return data
@callback
def _async_schedule_save(self) -> None:
@@ -246,17 +369,19 @@ class ExposedEntities:
self._store.async_delay_save(self._data_to_save, SAVE_DELAY)
@callback
def _data_to_save(self) -> dict[str, dict[str, dict[str, Any]]]:
"""Return data to store in a file."""
data = {}
data["assistants"] = {
domain: preferences.to_json()
for domain, preferences in self._assistants.items()
def _data_to_save(self) -> SerializedExposedEntities:
"""Return JSON-compatible date for storing to file."""
return {
"assistants": {
domain: preferences.to_json()
for domain, preferences in self._assistants.items()
},
"exposed_entities": {
entity_id: entity.to_json()
for entity_id, entity in self.entities.items()
},
}
return data
@callback
@websocket_api.require_admin
@@ -272,7 +397,6 @@ def ws_expose_entity(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
) -> None:
"""Expose an entity to an assistant."""
entity_registry = er.async_get(hass)
entity_ids: str = msg["entity_ids"]
if blocked := next(
@@ -288,28 +412,37 @@ def ws_expose_entity(
)
return
if unknown := next(
(
entity_id
for entity_id in entity_ids
if entity_id not in entity_registry.entities
),
None,
):
connection.send_error(
msg["id"], websocket_api.const.ERR_NOT_FOUND, f"can't expose '{unknown}'"
)
return
exposed_entities: ExposedEntities = hass.data[DATA_EXPOSED_ENTITIES]
for entity_id in entity_ids:
for assistant in msg["assistants"]:
exposed_entities.async_expose_entity(
assistant, entity_id, msg["should_expose"]
)
async_expose_entity(hass, assistant, entity_id, msg["should_expose"])
connection.send_result(msg["id"])
@callback
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required("type"): "homeassistant/expose_entity/list",
}
)
def ws_list_exposed_entities(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
) -> None:
"""Expose an entity to an assistant."""
result: dict[str, Any] = {}
exposed_entities: ExposedEntities = hass.data[DATA_EXPOSED_ENTITIES]
entity_registry = er.async_get(hass)
for entity_id in chain(exposed_entities.entities, entity_registry.entities):
result[entity_id] = {}
entity_settings = async_get_entity_settings(hass, entity_id)
for assistant, settings in entity_settings.items():
if "should_expose" not in settings:
continue
result[entity_id][assistant] = settings["should_expose"]
connection.send_result(msg["id"], {"exposed_entities": result})
@callback
@websocket_api.require_admin
@websocket_api.websocket_command(
@@ -380,8 +513,9 @@ def async_expose_entity(
should_expose: bool,
) -> None:
"""Get assistant expose settings for an entity."""
exposed_entities: ExposedEntities = hass.data[DATA_EXPOSED_ENTITIES]
exposed_entities.async_expose_entity(assistant, entity_id, should_expose)
async_set_assistant_option(
hass, assistant, entity_id, "should_expose", should_expose
)
@callback
@@ -389,3 +523,15 @@ def async_should_expose(hass: HomeAssistant, assistant: str, entity_id: str) ->
"""Return True if an entity should be exposed to an assistant."""
exposed_entities: ExposedEntities = hass.data[DATA_EXPOSED_ENTITIES]
return exposed_entities.async_should_expose(assistant, entity_id)
@callback
def async_set_assistant_option(
hass: HomeAssistant, assistant: str, entity_id: str, option: str, value: Any
) -> None:
"""Set an option for an assistant.
Notify listeners if expose flag was changed.
"""
exposed_entities: ExposedEntities = hass.data[DATA_EXPOSED_ENTITIES]
exposed_entities.async_set_assistant_option(assistant, entity_id, option, value)

View File

@@ -174,23 +174,23 @@ class IntegrationSensor(RestoreEntity, SensorEntity):
async def async_added_to_hass(self) -> None:
"""Handle entity which will be added."""
await super().async_added_to_hass()
if state := await self.async_get_last_state():
try:
self._state = Decimal(state.state)
except (DecimalException, ValueError) as err:
_LOGGER.warning(
"%s could not restore last state %s: %s",
self.entity_id,
state.state,
err,
)
else:
self._attr_device_class = state.attributes.get(ATTR_DEVICE_CLASS)
if self._unit_of_measurement is None:
self._unit_of_measurement = state.attributes.get(
ATTR_UNIT_OF_MEASUREMENT
if (state := await self.async_get_last_state()) is not None:
if state.state == STATE_UNAVAILABLE:
self._attr_available = False
elif state.state != STATE_UNKNOWN:
try:
self._state = Decimal(state.state)
except (DecimalException, ValueError) as err:
_LOGGER.warning(
"%s could not restore last state %s: %s",
self.entity_id,
state.state,
err,
)
self._attr_device_class = state.attributes.get(ATTR_DEVICE_CLASS)
self._unit_of_measurement = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
@callback
def calc_integration(event: Event) -> None:
"""Handle the sensor state changes."""

View File

@@ -140,16 +140,18 @@ class GetStateIntentHandler(intent.IntentHandler):
area=area,
domains=domains,
device_classes=device_classes,
assistant=intent_obj.assistant,
)
)
_LOGGER.debug(
"Found %s state(s) that matched: name=%s, area=%s, domains=%s, device_classes=%s",
"Found %s state(s) that matched: name=%s, area=%s, domains=%s, device_classes=%s, assistant=%s",
len(states),
name,
area,
domains,
device_classes,
intent_obj.assistant,
)
# Create response

View File

@@ -11,6 +11,7 @@ from typing import Any, cast
from aiolifx.aiolifx import (
Light,
Message,
MultiZoneDirection,
MultiZoneEffectType,
TileEffectType,
@@ -56,6 +57,8 @@ from .util import (
LIGHT_UPDATE_INTERVAL = 10
REQUEST_REFRESH_DELAY = 0.35
LIFX_IDENTIFY_DELAY = 3.0
ZONES_PER_COLOR_UPDATE_REQUEST = 8
RSSI_DBM_FW = AwesomeVersion("2.77")
@@ -205,14 +208,53 @@ class LIFXUpdateCoordinator(DataUpdateCoordinator[None]):
methods, DEFAULT_ATTEMPTS, OVERALL_TIMEOUT
)
def get_number_of_zones(self) -> int:
"""Return the number of zones.
If the number of zones is not yet populated, return 1 since
the device will have a least one zone.
"""
return len(self.device.color_zones) if self.device.color_zones else 1
@callback
def _async_build_color_zones_update_requests(self) -> list[Callable]:
"""Build a color zones update request."""
device = self.device
return [
partial(device.get_color_zones, start_index=zone)
for zone in range(0, len(device.color_zones), 8)
]
calls: list[Callable] = []
for zone in range(
0, self.get_number_of_zones(), ZONES_PER_COLOR_UPDATE_REQUEST
):
def _wrap_get_color_zones(
callb: Callable[[Message, dict[str, Any] | None], None],
get_color_zones_args: dict[str, Any],
) -> None:
"""Capture the callback and make sure resp_set_multizonemultizone is called before."""
def _wrapped_callback(
bulb: Light,
response: Message,
**kwargs: Any,
) -> None:
# We need to call resp_set_multizonemultizone to populate
# the color_zones attribute before calling the callback
device.resp_set_multizonemultizone(response)
# Now call the original callback
callb(bulb, response, **kwargs)
device.get_color_zones(**get_color_zones_args, callb=_wrapped_callback)
calls.append(
partial(
_wrap_get_color_zones,
get_color_zones_args={
"start_index": zone,
"end_index": zone + ZONES_PER_COLOR_UPDATE_REQUEST - 1,
},
)
)
return calls
async def _async_update_data(self) -> None:
"""Fetch all device data from the api."""
@@ -224,7 +266,7 @@ class LIFXUpdateCoordinator(DataUpdateCoordinator[None]):
):
await self._async_populate_device_info()
num_zones = len(device.color_zones) if device.color_zones is not None else 0
num_zones = self.get_number_of_zones()
features = lifx_features(self.device)
is_extended_multizone = features["extended_multizone"]
is_legacy_multizone = not is_extended_multizone and features["multizone"]
@@ -256,7 +298,7 @@ class LIFXUpdateCoordinator(DataUpdateCoordinator[None]):
if is_extended_multizone or is_legacy_multizone:
self.active_effect = FirmwareEffect[self.device.effect.get("effect", "OFF")]
if is_legacy_multizone and num_zones != len(device.color_zones):
if is_legacy_multizone and num_zones != self.get_number_of_zones():
# The number of zones has changed so we need
# to update the zones again. This happens rarely.
await self.async_get_color_zones()

View File

@@ -382,7 +382,7 @@ class LIFXMultiZone(LIFXColor):
"""Send a color change to the bulb."""
bulb = self.bulb
color_zones = bulb.color_zones
num_zones = len(color_zones)
num_zones = self.coordinator.get_number_of_zones()
# Zone brightness is not reported when powered off
if not self.is_on and hsbk[HSBK_BRIGHTNESS] is None:

View File

@@ -51,7 +51,7 @@ PLATFORMS = [
]
async def with_timeout(task, timeout_seconds=10):
async def with_timeout(task, timeout_seconds=30):
"""Run an async task with a timeout."""
async with async_timeout.timeout(timeout_seconds):
return await task

View File

@@ -266,7 +266,7 @@ SENSOR_TYPES: tuple[NetatmoSensorEntityDescription, ...] = (
netatmo_name="power",
entity_registry_enabled_default=True,
native_unit_of_measurement=UnitOfPower.WATT,
state_class=SensorStateClass.TOTAL,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.POWER,
),
)

View File

@@ -1,9 +1,9 @@
{
"domain": "netgear_lte",
"name": "NETGEAR LTE",
"codeowners": [],
"codeowners": ["@tkdrob"],
"documentation": "https://www.home-assistant.io/integrations/netgear_lte",
"iot_class": "local_polling",
"loggers": ["eternalegypt"],
"requirements": ["eternalegypt==0.0.15"]
"requirements": ["eternalegypt==0.0.16"]
}

View File

@@ -2,7 +2,7 @@
from __future__ import annotations
import asyncio
from dataclasses import dataclass, field, fields
from dataclasses import dataclass, field
from datetime import timedelta
import logging
import traceback
@@ -10,9 +10,16 @@ from typing import Any
from uuid import UUID
from aionotion import async_get_client
from aionotion.bridge.models import Bridge
from aionotion.bridge.models import Bridge, BridgeAllResponse
from aionotion.errors import InvalidCredentialsError, NotionError
from aionotion.sensor.models import Listener, ListenerKind, Sensor
from aionotion.sensor.models import (
Listener,
ListenerAllResponse,
ListenerKind,
Sensor,
SensorAllResponse,
)
from aionotion.user.models import UserPreferences, UserPreferencesResponse
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform
@@ -51,6 +58,11 @@ PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
ATTR_SYSTEM_MODE = "system_mode"
ATTR_SYSTEM_NAME = "system_name"
DATA_BRIDGES = "bridges"
DATA_LISTENERS = "listeners"
DATA_SENSORS = "sensors"
DATA_USER_PREFERENCES = "user_preferences"
DEFAULT_SCAN_INTERVAL = timedelta(minutes=1)
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
@@ -84,6 +96,9 @@ def is_uuid(value: str) -> bool:
class NotionData:
"""Define a manager class for Notion data."""
hass: HomeAssistant
entry: ConfigEntry
# Define a dict of bridges, indexed by bridge ID (an integer):
bridges: dict[int, Bridge] = field(default_factory=dict)
@@ -93,12 +108,40 @@ class NotionData:
# Define a dict of sensors, indexed by sensor UUID (a string):
sensors: dict[str, Sensor] = field(default_factory=dict)
# Define a user preferences response object:
user_preferences: UserPreferences | None = field(default=None)
def update_data_from_response(
self,
response: BridgeAllResponse
| ListenerAllResponse
| SensorAllResponse
| UserPreferencesResponse,
) -> None:
"""Update data from an aionotion response."""
if isinstance(response, BridgeAllResponse):
for bridge in response.bridges:
# If a new bridge is discovered, register it:
if bridge.id not in self.bridges:
_async_register_new_bridge(self.hass, self.entry, bridge)
self.bridges[bridge.id] = bridge
elif isinstance(response, ListenerAllResponse):
self.listeners = {listener.id: listener for listener in response.listeners}
elif isinstance(response, SensorAllResponse):
self.sensors = {sensor.uuid: sensor for sensor in response.sensors}
elif isinstance(response, UserPreferencesResponse):
self.user_preferences = response.user_preferences
def asdict(self) -> dict[str, Any]:
"""Represent this dataclass (and its Pydantic contents) as a dict."""
return {
field.name: [obj.dict() for obj in getattr(self, field.name).values()]
for field in fields(self)
data: dict[str, Any] = {
DATA_BRIDGES: [bridge.dict() for bridge in self.bridges.values()],
DATA_LISTENERS: [listener.dict() for listener in self.listeners.values()],
DATA_SENSORS: [sensor.dict() for sensor in self.sensors.values()],
}
if self.user_preferences:
data[DATA_USER_PREFERENCES] = self.user_preferences.dict()
return data
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
@@ -121,11 +164,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_update() -> NotionData:
"""Get the latest data from the Notion API."""
data = NotionData()
data = NotionData(hass=hass, entry=entry)
tasks = {
"bridges": client.bridge.async_all(),
"listeners": client.sensor.async_listeners(),
"sensors": client.sensor.async_all(),
DATA_BRIDGES: client.bridge.async_all(),
DATA_LISTENERS: client.sensor.async_listeners(),
DATA_SENSORS: client.sensor.async_all(),
DATA_USER_PREFERENCES: client.user.async_preferences(),
}
results = await asyncio.gather(*tasks.values(), return_exceptions=True)
@@ -145,16 +189,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
f"There was an unknown error while updating {attr}: {result}"
) from result
for item in result:
if attr == "bridges":
# If a new bridge is discovered, register it:
if item.id not in data.bridges:
_async_register_new_bridge(hass, item, entry)
data.bridges[item.id] = item
elif attr == "listeners":
data.listeners[item.id] = item
else:
data.sensors[item.uuid] = item
data.update_data_from_response(result)
return data
@@ -216,7 +251,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
@callback
def _async_register_new_bridge(
hass: HomeAssistant, bridge: Bridge, entry: ConfigEntry
hass: HomeAssistant, entry: ConfigEntry, bridge: Bridge
) -> None:
"""Register a new bridge."""
if name := bridge.name:
@@ -279,6 +314,11 @@ class NotionEntity(CoordinatorEntity[DataUpdateCoordinator[NotionData]]):
and self._listener_id in self.coordinator.data.listeners
)
@property
def listener(self) -> Listener:
"""Return the listener related to this entity."""
return self.coordinator.data.listeners[self._listener_id]
@callback
def _async_update_bridge_id(self) -> None:
"""Update the entity's bridge ID if it has changed.
@@ -310,21 +350,9 @@ class NotionEntity(CoordinatorEntity[DataUpdateCoordinator[NotionData]]):
this_device.id, via_device_id=bridge_device.id
)
@callback
def _async_update_from_latest_data(self) -> None:
"""Update the entity from the latest data."""
raise NotImplementedError
@callback
def _handle_coordinator_update(self) -> None:
"""Respond to a DataUpdateCoordinator update."""
if self._listener_id in self.coordinator.data.listeners:
self._async_update_bridge_id()
self._async_update_from_latest_data()
self.async_write_ha_state()
async def async_added_to_hass(self) -> None:
"""Handle entity which will be added."""
await super().async_added_to_hass()
self._async_update_from_latest_data()
super()._handle_coordinator_update()

View File

@@ -13,7 +13,7 @@ from homeassistant.components.binary_sensor import (
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant, callback
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import NotionEntity
@@ -37,7 +37,7 @@ from .model import NotionEntityDescriptionMixin
class NotionBinarySensorDescriptionMixin:
"""Define an entity description mixin for binary and regular sensors."""
on_state: Literal["alarm", "critical", "leak", "not_missing", "open"]
on_state: Literal["alarm", "leak", "low", "not_missing", "open"]
@dataclass
@@ -56,7 +56,7 @@ BINARY_SENSOR_DESCRIPTIONS = (
device_class=BinarySensorDeviceClass.BATTERY,
entity_category=EntityCategory.DIAGNOSTIC,
listener_kind=ListenerKind.BATTERY,
on_state="critical",
on_state="low",
),
NotionBinarySensorDescription(
key=SENSOR_DOOR,
@@ -146,17 +146,10 @@ class NotionBinarySensor(NotionEntity, BinarySensorEntity):
entity_description: NotionBinarySensorDescription
@callback
def _async_update_from_latest_data(self) -> None:
"""Fetch new state data for the sensor."""
listener = self.coordinator.data.listeners[self._listener_id]
if listener.status.trigger_value:
state = listener.status.trigger_value
elif listener.insights.primary.value:
state = listener.insights.primary.value
else:
LOGGER.warning("Unknown listener structure: %s", listener)
state = None
self._attr_is_on = self.entity_description.on_state == state
@property
def is_on(self) -> bool | None:
"""Return true if the binary sensor is on."""
if not self.listener.insights.primary.value:
LOGGER.warning("Unknown listener structure: %s", self.listener.dict())
return False
return self.listener.insights.primary.value == self.entity_description.on_state

View File

@@ -16,6 +16,7 @@ CONF_DEVICE_KEY = "device_key"
CONF_HARDWARE_ID = "hardware_id"
CONF_LAST_BRIDGE_HARDWARE_ID = "last_bridge_hardware_id"
CONF_TITLE = "title"
CONF_USER_ID = "user_id"
TO_REDACT = {
CONF_DEVICE_KEY,
@@ -27,6 +28,7 @@ TO_REDACT = {
CONF_TITLE,
CONF_UNIQUE_ID,
CONF_USERNAME,
CONF_USER_ID,
}

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["aionotion"],
"requirements": ["aionotion==2023.04.2"]
"requirements": ["aionotion==2023.05.4"]
}

View File

@@ -11,11 +11,11 @@ from homeassistant.components.sensor import (
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import UnitOfTemperature
from homeassistant.core import HomeAssistant, callback
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import NotionEntity
from .const import DOMAIN, LOGGER, SENSOR_TEMPERATURE
from .const import DOMAIN, SENSOR_TEMPERATURE
from .model import NotionEntityDescriptionMixin
@@ -63,15 +63,24 @@ async def async_setup_entry(
class NotionSensor(NotionEntity, SensorEntity):
"""Define a Notion sensor."""
@callback
def _async_update_from_latest_data(self) -> None:
"""Fetch new state data for the sensor."""
listener = self.coordinator.data.listeners[self._listener_id]
@property
def native_unit_of_measurement(self) -> str | None:
"""Return the unit of measurement of the sensor."""
if self.listener.listener_kind == ListenerKind.TEMPERATURE:
if not self.coordinator.data.user_preferences:
return None
if self.coordinator.data.user_preferences.celsius_enabled:
return UnitOfTemperature.CELSIUS
return UnitOfTemperature.FAHRENHEIT
return None
if listener.listener_kind == ListenerKind.TEMPERATURE:
self._attr_native_value = round(listener.status.temperature, 1) # type: ignore[attr-defined]
else:
LOGGER.error(
"Unknown listener type for sensor %s",
self.coordinator.data.sensors[self._sensor_id],
)
@property
def native_value(self) -> str | None:
"""Return the value reported by the sensor.
The Notion API only returns a localized string for temperature (e.g. "70°"); we
simply remove the degree symbol:
"""
if not self.listener.status_localized:
return None
return self.listener.status_localized.state[:-1]

View File

@@ -1,5 +1,6 @@
"""The ONVIF integration."""
import asyncio
from http import HTTPStatus
import logging
from httpx import RequestError
@@ -56,7 +57,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
except ONVIFError as err:
await device.device.close()
raise ConfigEntryNotReady(
f"Could not setup camera {device.device.host}:{device.device.port}: {err}"
f"Could not setup camera {device.device.host}:{device.device.port}: {stringify_onvif_error(err)}"
) from err
except TransportError as err:
await device.device.close()
stringified_onvif_error = stringify_onvif_error(err)
if err.status_code in (
HTTPStatus.UNAUTHORIZED.value,
HTTPStatus.FORBIDDEN.value,
):
raise ConfigEntryAuthFailed(
f"Auth Failed: {stringified_onvif_error}"
) from err
raise ConfigEntryNotReady(
f"Could not setup camera {device.device.host}:{device.device.port}: {stringified_onvif_error}"
) from err
except asyncio.CancelledError as err:
# After https://github.com/agronholm/anyio/issues/374 is resolved

View File

@@ -34,7 +34,7 @@ class RebootButton(ONVIFBaseEntity, ButtonEntity):
async def async_press(self) -> None:
"""Send out a SystemReboot command."""
device_mgmt = self.device.device.create_devicemgmt_service()
device_mgmt = await self.device.device.create_devicemgmt_service()
await device_mgmt.SystemReboot()

View File

@@ -142,10 +142,14 @@ class OnvifFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
hass.async_create_task(hass.config_entries.async_reload(entry_id))
return self.async_abort(reason="reauth_successful")
username = (user_input or {}).get(CONF_USERNAME) or entry.data[CONF_USERNAME]
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema(
{vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str}
{
vol.Required(CONF_USERNAME, default=username): str,
vol.Required(CONF_PASSWORD): str,
}
),
errors=errors,
description_placeholders=description_placeholders,
@@ -275,7 +279,7 @@ class OnvifFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
try:
await device.update_xaddrs()
device_mgmt = device.create_devicemgmt_service()
device_mgmt = await device.create_devicemgmt_service()
# Get the MAC address to use as the unique ID for the config flow
if not self.device_id:
try:
@@ -314,7 +318,7 @@ class OnvifFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
}
)
# Verify there is an H264 profile
media_service = device.create_media_service()
media_service = await device.create_media_service()
profiles = await media_service.GetProfiles()
except AttributeError: # Likely an empty document or 404 from the wrong port
LOGGER.debug(

View File

@@ -12,7 +12,7 @@ from httpx import RequestError
import onvif
from onvif import ONVIFCamera
from onvif.exceptions import ONVIFError
from zeep.exceptions import Fault, XMLParseError
from zeep.exceptions import Fault, TransportError, XMLParseError, XMLSyntaxError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
@@ -100,6 +100,7 @@ class ONVIFDevice:
# Get all device info
await self.device.update_xaddrs()
LOGGER.debug("%s: xaddrs = %s", self.name, self.device.xaddrs)
# Get device capabilities
self.onvif_capabilities = await self.device.get_capabilities()
@@ -112,10 +113,20 @@ class ONVIFDevice:
# Fetch basic device info and capabilities
self.info = await self.async_get_device_info()
LOGGER.debug("Camera %s info = %s", self.name, self.info)
LOGGER.debug("%s: camera info = %s", self.name, self.info)
# Check profiles before capabilities since the camera may be slow to respond
# once the event manager is started in async_get_capabilities.
#
# We need to check capabilities before profiles, because we need the data
# from capabilities to determine profiles correctly.
#
# We no longer initialize events in capabilities to avoid the problem
# where cameras become slow to respond for a bit after starting events, and
# instead we start events last and than update capabilities.
#
LOGGER.debug("%s: fetching initial capabilities", self.name)
self.capabilities = await self.async_get_capabilities()
LOGGER.debug("%s: fetching profiles", self.name)
self.profiles = await self.async_get_profiles()
LOGGER.debug("Camera %s profiles = %s", self.name, self.profiles)
@@ -123,11 +134,9 @@ class ONVIFDevice:
if not self.profiles:
raise ONVIFError("No camera profiles found")
self.capabilities = await self.async_get_capabilities()
LOGGER.debug("Camera %s capabilities = %s", self.name, self.capabilities)
if self.capabilities.ptz:
self.device.create_ptz_service()
LOGGER.debug("%s: creating PTZ service", self.name)
await self.device.create_ptz_service()
# Determine max resolution from profiles
self.max_resolution = max(
@@ -136,6 +145,12 @@ class ONVIFDevice:
if profile.video.encoding == "H264"
)
# Start events last since some cameras become slow to respond
# for a bit after starting events
LOGGER.debug("%s: starting events", self.name)
self.capabilities.events = await self.async_start_events()
LOGGER.debug("Camera %s capabilities = %s", self.name, self.capabilities)
async def async_stop(self, event=None):
"""Shut it all down."""
if self.events:
@@ -144,7 +159,7 @@ class ONVIFDevice:
async def async_manually_set_date_and_time(self) -> None:
"""Set Date and Time Manually using SetSystemDateAndTime command."""
device_mgmt = self.device.create_devicemgmt_service()
device_mgmt = await self.device.create_devicemgmt_service()
# Retrieve DateTime object from camera to use as template for Set operation
device_time = await device_mgmt.GetSystemDateAndTime()
@@ -187,82 +202,105 @@ class ONVIFDevice:
async def async_check_date_and_time(self) -> None:
"""Warns if device and system date not synced."""
LOGGER.debug("%s: Setting up the ONVIF device management service", self.name)
device_mgmt = self.device.create_devicemgmt_service()
device_mgmt = await self.device.create_devicemgmt_service()
system_date = dt_util.utcnow()
LOGGER.debug("%s: Retrieving current device date/time", self.name)
try:
system_date = dt_util.utcnow()
device_time = await device_mgmt.GetSystemDateAndTime()
if not device_time:
LOGGER.debug(
"""Couldn't get device '%s' date/time.
GetSystemDateAndTime() return null/empty""",
self.name,
)
return
LOGGER.debug("%s: Device time: %s", self.name, device_time)
tzone = dt_util.DEFAULT_TIME_ZONE
cdate = device_time.LocalDateTime
if device_time.UTCDateTime:
tzone = dt_util.UTC
cdate = device_time.UTCDateTime
elif device_time.TimeZone:
tzone = dt_util.get_time_zone(device_time.TimeZone.TZ) or tzone
if cdate is None:
LOGGER.warning(
"%s: Could not retrieve date/time on this camera", self.name
)
else:
cam_date = dt.datetime(
cdate.Date.Year,
cdate.Date.Month,
cdate.Date.Day,
cdate.Time.Hour,
cdate.Time.Minute,
cdate.Time.Second,
0,
tzone,
)
cam_date_utc = cam_date.astimezone(dt_util.UTC)
LOGGER.debug(
"%s: Device date/time: %s | System date/time: %s",
self.name,
cam_date_utc,
system_date,
)
dt_diff = cam_date - system_date
self._dt_diff_seconds = dt_diff.total_seconds()
# It could be off either direction, so we need to check the absolute value
if abs(self._dt_diff_seconds) > 5:
LOGGER.warning(
(
"The date/time on %s (UTC) is '%s', "
"which is different from the system '%s', "
"this could lead to authentication issues"
),
self.name,
cam_date_utc,
system_date,
)
if device_time.DateTimeType == "Manual":
# Set Date and Time ourselves if Date and Time is set manually in the camera.
await self.async_manually_set_date_and_time()
except RequestError as err:
LOGGER.warning(
"Couldn't get device '%s' date/time. Error: %s", self.name, err
)
return
if not device_time:
LOGGER.debug(
"""Couldn't get device '%s' date/time.
GetSystemDateAndTime() return null/empty""",
self.name,
)
return
LOGGER.debug("%s: Device time: %s", self.name, device_time)
tzone = dt_util.DEFAULT_TIME_ZONE
cdate = device_time.LocalDateTime
if device_time.UTCDateTime:
tzone = dt_util.UTC
cdate = device_time.UTCDateTime
elif device_time.TimeZone:
tzone = dt_util.get_time_zone(device_time.TimeZone.TZ) or tzone
if cdate is None:
LOGGER.warning("%s: Could not retrieve date/time on this camera", self.name)
return
cam_date = dt.datetime(
cdate.Date.Year,
cdate.Date.Month,
cdate.Date.Day,
cdate.Time.Hour,
cdate.Time.Minute,
cdate.Time.Second,
0,
tzone,
)
cam_date_utc = cam_date.astimezone(dt_util.UTC)
LOGGER.debug(
"%s: Device date/time: %s | System date/time: %s",
self.name,
cam_date_utc,
system_date,
)
dt_diff = cam_date - system_date
self._dt_diff_seconds = dt_diff.total_seconds()
# It could be off either direction, so we need to check the absolute value
if abs(self._dt_diff_seconds) < 5:
return
LOGGER.warning(
(
"The date/time on %s (UTC) is '%s', "
"which is different from the system '%s', "
"this could lead to authentication issues"
),
self.name,
cam_date_utc,
system_date,
)
if device_time.DateTimeType != "Manual":
return
# Set Date and Time ourselves if Date and Time is set manually in the camera.
try:
await self.async_manually_set_date_and_time()
except (RequestError, TransportError):
LOGGER.warning("%s: Could not sync date/time on this camera", self.name)
async def async_get_device_info(self) -> DeviceInfo:
"""Obtain information about this device."""
device_mgmt = self.device.create_devicemgmt_service()
device_info = await device_mgmt.GetDeviceInformation()
device_mgmt = await self.device.create_devicemgmt_service()
manufacturer = None
model = None
firmware_version = None
serial_number = None
try:
device_info = await device_mgmt.GetDeviceInformation()
except (XMLParseError, XMLSyntaxError, TransportError) as ex:
# Some cameras have invalid UTF-8 in their device information (TransportError)
# and others have completely invalid XML (XMLParseError, XMLSyntaxError)
LOGGER.warning("%s: Failed to fetch device information: %s", self.name, ex)
else:
manufacturer = device_info.Manufacturer
model = device_info.Model
firmware_version = device_info.FirmwareVersion
serial_number = device_info.SerialNumber
# Grab the last MAC address for backwards compatibility
mac = None
@@ -282,10 +320,10 @@ class ONVIFDevice:
)
return DeviceInfo(
device_info.Manufacturer,
device_info.Model,
device_info.FirmwareVersion,
device_info.SerialNumber,
manufacturer,
model,
firmware_version,
serial_number,
mac,
)
@@ -293,7 +331,7 @@ class ONVIFDevice:
"""Obtain information about the available services on the device."""
snapshot = False
with suppress(*GET_CAPABILITIES_EXCEPTIONS):
media_service = self.device.create_media_service()
media_service = await self.device.create_media_service()
media_capabilities = await media_service.GetServiceCapabilities()
snapshot = media_capabilities and media_capabilities.SnapshotUri
@@ -304,26 +342,34 @@ class ONVIFDevice:
imaging = False
with suppress(*GET_CAPABILITIES_EXCEPTIONS):
self.device.create_imaging_service()
await self.device.create_imaging_service()
imaging = True
events = False
return Capabilities(snapshot=snapshot, ptz=ptz, imaging=imaging)
async def async_start_events(self):
"""Start the event handler."""
with suppress(*GET_CAPABILITIES_EXCEPTIONS, XMLParseError):
onvif_capabilities = self.onvif_capabilities or {}
pull_point_support = onvif_capabilities.get("Events", {}).get(
pull_point_support = (onvif_capabilities.get("Events") or {}).get(
"WSPullPointSupport"
)
LOGGER.debug("%s: WSPullPointSupport: %s", self.name, pull_point_support)
events = await self.events.async_start(
pull_point_support is not False, True
)
return await self.events.async_start(pull_point_support is not False, True)
return Capabilities(snapshot, events, ptz, imaging)
return False
async def async_get_profiles(self) -> list[Profile]:
"""Obtain media profiles for this device."""
media_service = self.device.create_media_service()
result = await media_service.GetProfiles()
media_service = await self.device.create_media_service()
LOGGER.debug("%s: xaddr for media_service: %s", self.name, media_service.xaddr)
try:
result = await media_service.GetProfiles()
except GET_CAPABILITIES_EXCEPTIONS:
LOGGER.debug(
"%s: Could not get profiles from ONVIF device", self.name, exc_info=True
)
raise
profiles: list[Profile] = []
if not isinstance(result, list):
@@ -362,7 +408,7 @@ class ONVIFDevice:
)
try:
ptz_service = self.device.create_ptz_service()
ptz_service = await self.device.create_ptz_service()
presets = await ptz_service.GetPresets(profile.token)
profile.ptz.presets = [preset.token for preset in presets if preset]
except GET_CAPABILITIES_EXCEPTIONS:
@@ -381,7 +427,7 @@ class ONVIFDevice:
async def async_get_stream_uri(self, profile: Profile) -> str:
"""Get the stream URI for a specified profile."""
media_service = self.device.create_media_service()
media_service = await self.device.create_media_service()
req = media_service.create_type("GetStreamUri")
req.ProfileToken = profile.token
req.StreamSetup = {
@@ -408,7 +454,7 @@ class ONVIFDevice:
LOGGER.warning("PTZ actions are not supported on device '%s'", self.name)
return
ptz_service = self.device.create_ptz_service()
ptz_service = await self.device.create_ptz_service()
pan_val = distance * PAN_FACTOR.get(pan, 0)
tilt_val = distance * TILT_FACTOR.get(tilt, 0)
@@ -530,7 +576,7 @@ class ONVIFDevice:
LOGGER.warning("PTZ actions are not supported on device '%s'", self.name)
return
ptz_service = self.device.create_ptz_service()
ptz_service = await self.device.create_ptz_service()
LOGGER.debug(
"Running Aux Command | Cmd = %s",
@@ -561,7 +607,7 @@ class ONVIFDevice:
)
return
imaging_service = self.device.create_imaging_service()
imaging_service = await self.device.create_imaging_service()
LOGGER.debug("Setting Imaging Setting | Settings = %s", settings)
try:

View File

@@ -27,6 +27,10 @@ async def async_get_config_entry_diagnostics(
"info": asdict(device.info),
"capabilities": asdict(device.capabilities),
"profiles": [asdict(profile) for profile in device.profiles],
"services": {
str(key): service.url for key, service in device.device.services.items()
},
"xaddrs": device.device.xaddrs,
}
data["events"] = {
"webhook_manager_state": device.events.webhook_manager.state,

View File

@@ -9,9 +9,9 @@ import datetime as dt
from aiohttp.web import Request
from httpx import RemoteProtocolError, RequestError, TransportError
from onvif import ONVIFCamera, ONVIFService
from onvif.client import NotificationManager
from onvif.client import NotificationManager, retry_connection_error
from onvif.exceptions import ONVIFError
from zeep.exceptions import Fault, XMLParseError
from zeep.exceptions import Fault, ValidationError, XMLParseError
from homeassistant.components import webhook
from homeassistant.config_entries import ConfigEntry
@@ -35,13 +35,13 @@ from .util import stringify_onvif_error
UNHANDLED_TOPICS: set[str] = {"tns1:MediaControl/VideoEncoderConfiguration"}
SUBSCRIPTION_ERRORS = (Fault, asyncio.TimeoutError, TransportError)
CREATE_ERRORS = (ONVIFError, Fault, RequestError, XMLParseError)
CREATE_ERRORS = (ONVIFError, Fault, RequestError, XMLParseError, ValidationError)
SET_SYNCHRONIZATION_POINT_ERRORS = (*SUBSCRIPTION_ERRORS, TypeError)
UNSUBSCRIBE_ERRORS = (XMLParseError, *SUBSCRIPTION_ERRORS)
RENEW_ERRORS = (ONVIFError, RequestError, XMLParseError, *SUBSCRIPTION_ERRORS)
#
# We only keep the subscription alive for 3 minutes, and will keep
# renewing it every 1.5 minutes. This is to avoid the camera
# We only keep the subscription alive for 10 minutes, and will keep
# renewing it every 8 minutes. This is to avoid the camera
# accumulating subscriptions which will be impossible to clean up
# since ONVIF does not provide a way to list existing subscriptions.
#
@@ -49,12 +49,25 @@ RENEW_ERRORS = (ONVIFError, RequestError, XMLParseError, *SUBSCRIPTION_ERRORS)
# sending events to us, and we will not be able to recover until
# the subscriptions expire or the camera is rebooted.
#
SUBSCRIPTION_TIME = dt.timedelta(minutes=3)
SUBSCRIPTION_RELATIVE_TIME = (
"PT3M" # use relative time since the time on the camera is not reliable
)
SUBSCRIPTION_RENEW_INTERVAL = SUBSCRIPTION_TIME.total_seconds() / 2
SUBSCRIPTION_RENEW_INTERVAL_ON_ERROR = 60.0
SUBSCRIPTION_TIME = dt.timedelta(minutes=10)
# SUBSCRIPTION_RELATIVE_TIME uses a relative time since the time on the camera
# is not reliable. We use 600 seconds (10 minutes) since some cameras cannot
# parse time in the format "PT10M" (10 minutes).
SUBSCRIPTION_RELATIVE_TIME = "PT600S"
# SUBSCRIPTION_RENEW_INTERVAL Must be less than the
# overall timeout of 90 * (SUBSCRIPTION_ATTEMPTS) 2 = 180 seconds
#
# We use 8 minutes between renewals to make sure we never hit the
# 10 minute limit even if the first renewal attempt fails
SUBSCRIPTION_RENEW_INTERVAL = 8 * 60
# The number of attempts to make when creating or renewing a subscription
SUBSCRIPTION_ATTEMPTS = 2
# The time to wait before trying to restart the subscription if it fails
SUBSCRIPTION_RESTART_INTERVAL_ON_ERROR = 60
PULLPOINT_POLL_TIME = dt.timedelta(seconds=60)
PULLPOINT_MESSAGE_LIMIT = 100
@@ -276,7 +289,13 @@ class PullPointManager:
"""Pause pullpoint subscription."""
LOGGER.debug("%s: Pausing PullPoint manager", self._name)
self.state = PullPointManagerState.PAUSED
self._hass.async_create_task(self._async_cancel_and_unsubscribe())
# Cancel the renew job so we don't renew the subscription
# and stop pulling messages.
self._async_cancel_pullpoint_renew()
self.async_cancel_pull_messages()
# We do not unsubscribe from the pullpoint subscription and instead
# let the subscription expire since some cameras will terminate all
# subscriptions if we unsubscribe which will break the webhook.
@callback
def async_resume(self) -> None:
@@ -327,20 +346,7 @@ class PullPointManager:
async def _async_start_pullpoint(self) -> bool:
"""Start pullpoint subscription."""
try:
try:
started = await self._async_create_pullpoint_subscription()
except RequestError:
#
# We should only need to retry on RemoteProtocolError but some cameras
# are flaky and sometimes do not respond to the Renew request so we
# retry on RequestError as well.
#
# For RemoteProtocolError:
# http://datatracker.ietf.org/doc/html/rfc2616#section-8.1.4 allows the server
# to close the connection at any time, we treat this as a normal and try again
# once since we do not want to declare the camera as not supporting PullPoint
# if it just happened to close the connection at the wrong time.
started = await self._async_create_pullpoint_subscription()
started = await self._async_create_pullpoint_subscription()
except CREATE_ERRORS as err:
LOGGER.debug(
"%s: Device does not support PullPoint service or has too many subscriptions: %s",
@@ -372,16 +378,16 @@ class PullPointManager:
# scheduled when the current one is done if needed.
return
async with self._renew_lock:
next_attempt = SUBSCRIPTION_RENEW_INTERVAL_ON_ERROR
next_attempt = SUBSCRIPTION_RESTART_INTERVAL_ON_ERROR
try:
if (
await self._async_renew_pullpoint()
or await self._async_restart_pullpoint()
):
if await self._async_renew_pullpoint():
next_attempt = SUBSCRIPTION_RENEW_INTERVAL
else:
await self._async_restart_pullpoint()
finally:
self.async_schedule_pullpoint_renew(next_attempt)
@retry_connection_error(SUBSCRIPTION_ATTEMPTS)
async def _async_create_pullpoint_subscription(self) -> bool:
"""Create pullpoint subscription."""
@@ -392,12 +398,12 @@ class PullPointManager:
return False
# Create subscription manager
self._pullpoint_subscription = self._device.create_subscription_service(
self._pullpoint_subscription = await self._device.create_subscription_service(
"PullPointSubscription"
)
# Create the service that will be used to pull messages from the device.
self._pullpoint_service = self._device.create_pullpoint_service()
self._pullpoint_service = await self._device.create_pullpoint_service()
# Initialize events
with suppress(*SET_SYNCHRONIZATION_POINT_ERRORS):
@@ -447,6 +453,11 @@ class PullPointManager:
)
self._pullpoint_subscription = None
@retry_connection_error(SUBSCRIPTION_ATTEMPTS)
async def _async_call_pullpoint_subscription_renew(self) -> None:
"""Call PullPoint subscription Renew."""
await self._pullpoint_subscription.Renew(SUBSCRIPTION_RELATIVE_TIME)
async def _async_renew_pullpoint(self) -> bool:
"""Renew the PullPoint subscription."""
if (
@@ -458,20 +469,7 @@ class PullPointManager:
# The first time we renew, we may get a Fault error so we
# suppress it. The subscription will be restarted in
# async_restart later.
try:
await self._pullpoint_subscription.Renew(SUBSCRIPTION_RELATIVE_TIME)
except RequestError:
#
# We should only need to retry on RemoteProtocolError but some cameras
# are flaky and sometimes do not respond to the Renew request so we
# retry on RequestError as well.
#
# For RemoteProtocolError:
# http://datatracker.ietf.org/doc/html/rfc2616#section-8.1.4 allows the server
# to close the connection at any time, we treat this as a normal and try again
# once since we do not want to mark events as stale
# if it just happened to close the connection at the wrong time.
await self._pullpoint_subscription.Renew(SUBSCRIPTION_RELATIVE_TIME)
await self._async_call_pullpoint_subscription_renew()
LOGGER.debug("%s: Renewed PullPoint subscription", self._name)
return True
except RENEW_ERRORS as err:
@@ -521,7 +519,7 @@ class PullPointManager:
stringify_onvif_error(err),
)
return True
except (XMLParseError, *SUBSCRIPTION_ERRORS) as err:
except Fault as err:
# Device may not support subscriptions so log at debug level
# when we get an XMLParseError
LOGGER.debug(
@@ -532,6 +530,16 @@ class PullPointManager:
# Treat errors as if the camera restarted. Assume that the pullpoint
# subscription is no longer valid.
return False
except (XMLParseError, RequestError, TimeoutError, TransportError) as err:
LOGGER.debug(
"%s: PullPoint subscription encountered an unexpected error and will be retried "
"(this is normal for some cameras): %s",
self._name,
stringify_onvif_error(err),
)
# Avoid renewing the subscription too often since it causes problems
# for some cameras, mainly the Tapo ones.
return True
if self.state != PullPointManagerState.STARTED:
# If the webhook became started working during the long poll,
@@ -655,36 +663,42 @@ class WebHookManager:
self._renew_or_restart_job,
)
@retry_connection_error(SUBSCRIPTION_ATTEMPTS)
async def _async_create_webhook_subscription(self) -> None:
"""Create webhook subscription."""
LOGGER.debug("%s: Creating webhook subscription", self._name)
LOGGER.debug(
"%s: Creating webhook subscription with URL: %s",
self._name,
self._webhook_url,
)
self._notification_manager = self._device.create_notification_manager(
{
"InitialTerminationTime": SUBSCRIPTION_RELATIVE_TIME,
"ConsumerReference": {"Address": self._webhook_url},
}
)
self._webhook_subscription = await self._notification_manager.setup()
try:
self._webhook_subscription = await self._notification_manager.setup()
except ValidationError as err:
# This should only happen if there is a problem with the webhook URL
# that is causing it to not be well formed.
LOGGER.exception(
"%s: validation error while creating webhook subscription: %s",
self._name,
err,
)
raise
await self._notification_manager.start()
LOGGER.debug("%s: Webhook subscription created", self._name)
LOGGER.debug(
"%s: Webhook subscription created with URL: %s",
self._name,
self._webhook_url,
)
async def _async_start_webhook(self) -> bool:
"""Start webhook."""
try:
try:
await self._async_create_webhook_subscription()
except RequestError:
#
# We should only need to retry on RemoteProtocolError but some cameras
# are flaky and sometimes do not respond to the Renew request so we
# retry on RequestError as well.
#
# For RemoteProtocolError:
# http://datatracker.ietf.org/doc/html/rfc2616#section-8.1.4 allows the server
# to close the connection at any time, we treat this as a normal and try again
# once since we do not want to declare the camera as not supporting webhooks
# if it just happened to close the connection at the wrong time.
await self._async_create_webhook_subscription()
await self._async_create_webhook_subscription()
except CREATE_ERRORS as err:
self._event_manager.async_webhook_failed()
LOGGER.debug(
@@ -702,6 +716,12 @@ class WebHookManager:
await self._async_unsubscribe_webhook()
return await self._async_start_webhook()
@retry_connection_error(SUBSCRIPTION_ATTEMPTS)
async def _async_call_webhook_subscription_renew(self) -> None:
"""Call PullPoint subscription Renew."""
assert self._webhook_subscription is not None
await self._webhook_subscription.Renew(SUBSCRIPTION_RELATIVE_TIME)
async def _async_renew_webhook(self) -> bool:
"""Renew webhook subscription."""
if (
@@ -710,20 +730,7 @@ class WebHookManager:
):
return False
try:
try:
await self._webhook_subscription.Renew(SUBSCRIPTION_RELATIVE_TIME)
except RequestError:
#
# We should only need to retry on RemoteProtocolError but some cameras
# are flaky and sometimes do not respond to the Renew request so we
# retry on RequestError as well.
#
# For RemoteProtocolError:
# http://datatracker.ietf.org/doc/html/rfc2616#section-8.1.4 allows the server
# to close the connection at any time, we treat this as a normal and try again
# once since we do not want to mark events as stale
# if it just happened to close the connection at the wrong time.
await self._webhook_subscription.Renew(SUBSCRIPTION_RELATIVE_TIME)
await self._async_call_webhook_subscription_renew()
LOGGER.debug("%s: Renewed Webhook subscription", self._name)
return True
except RENEW_ERRORS as err:
@@ -747,13 +754,12 @@ class WebHookManager:
# scheduled when the current one is done if needed.
return
async with self._renew_lock:
next_attempt = SUBSCRIPTION_RENEW_INTERVAL_ON_ERROR
next_attempt = SUBSCRIPTION_RESTART_INTERVAL_ON_ERROR
try:
if (
await self._async_renew_webhook()
or await self._async_restart_webhook()
):
if await self._async_renew_webhook():
next_attempt = SUBSCRIPTION_RENEW_INTERVAL
else:
await self._async_restart_webhook()
finally:
self._async_schedule_webhook_renew(next_attempt)

View File

@@ -8,5 +8,5 @@
"documentation": "https://www.home-assistant.io/integrations/onvif",
"iot_class": "local_push",
"loggers": ["onvif", "wsdiscovery", "zeep"],
"requirements": ["onvif-zeep-async==1.3.1", "WSDiscovery==2.0.0"]
"requirements": ["onvif-zeep-async==2.1.1", "WSDiscovery==2.0.0"]
}

View File

@@ -15,6 +15,19 @@ PARSERS: Registry[
str, Callable[[str, Any], Coroutine[Any, Any, Event | None]]
] = Registry()
VIDEO_SOURCE_MAPPING = {
"vsconf": "VideoSourceToken",
}
def _normalize_video_source(source: str) -> str:
"""Normalize video source.
Some cameras do not set the VideoSourceToken correctly so we get duplicate
sensors, so we need to normalize it to the correct value.
"""
return VIDEO_SOURCE_MAPPING.get(source, source)
def local_datetime_or_none(value: str) -> datetime.datetime | None:
"""Convert strings to datetimes, if invalid, return None."""
@@ -188,7 +201,7 @@ async def async_parse_field_detector(uid: str, msg) -> Event | None:
rule = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
if source.Name == "VideoAnalyticsConfigurationToken":
video_analytics = source.Value
if source.Name == "Rule":
@@ -220,7 +233,7 @@ async def async_parse_cell_motion_detector(uid: str, msg) -> Event | None:
rule = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
if source.Name == "VideoAnalyticsConfigurationToken":
video_analytics = source.Value
if source.Name == "Rule":
@@ -251,7 +264,7 @@ async def async_parse_motion_region_detector(uid: str, msg) -> Event | None:
rule = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
if source.Name == "VideoAnalyticsConfigurationToken":
video_analytics = source.Value
if source.Name == "Rule":
@@ -282,7 +295,7 @@ async def async_parse_tamper_detector(uid: str, msg) -> Event | None:
rule = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
if source.Name == "VideoAnalyticsConfigurationToken":
video_analytics = source.Value
if source.Name == "Rule":
@@ -312,7 +325,7 @@ async def async_parse_dog_cat_detector(uid: str, msg) -> Event | None:
video_source = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "Source":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
return Event(
f"{uid}_{msg.Topic._value_1}_{video_source}",
@@ -337,7 +350,7 @@ async def async_parse_vehicle_detector(uid: str, msg) -> Event | None:
video_source = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "Source":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
return Event(
f"{uid}_{msg.Topic._value_1}_{video_source}",
@@ -362,7 +375,7 @@ async def async_parse_person_detector(uid: str, msg) -> Event | None:
video_source = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "Source":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
return Event(
f"{uid}_{msg.Topic._value_1}_{video_source}",
@@ -387,7 +400,7 @@ async def async_parse_face_detector(uid: str, msg) -> Event | None:
video_source = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "Source":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
return Event(
f"{uid}_{msg.Topic._value_1}_{video_source}",
@@ -401,6 +414,31 @@ async def async_parse_face_detector(uid: str, msg) -> Event | None:
return None
@PARSERS.register("tns1:RuleEngine/MyRuleDetector/Visitor")
# pylint: disable=protected-access
async def async_parse_visitor_detector(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:RuleEngine/MyRuleDetector/Visitor
"""
try:
video_source = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "Source":
video_source = _normalize_video_source(source.Value)
return Event(
f"{uid}_{msg.Topic._value_1}_{video_source}",
"Visitor Detection",
"binary_sensor",
"occupancy",
None,
msg.Message._value_1.Data.SimpleItem[0].Value == "true",
)
except (AttributeError, KeyError):
return None
@PARSERS.register("tns1:Device/Trigger/DigitalInput")
# pylint: disable=protected-access
async def async_parse_digital_input(uid: str, msg) -> Event | None:
@@ -658,7 +696,7 @@ async def async_parse_count_aggregation_counter(uid: str, msg) -> Event | None:
rule = ""
for source in msg.Message._value_1.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = source.Value
video_source = _normalize_video_source(source.Value)
if source.Name == "VideoAnalyticsConfigurationToken":
video_analytics = source.Value
if source.Name == "Rule":

View File

@@ -47,6 +47,7 @@
},
"reauth_confirm": {
"title": "Reauthenticate the ONVIF device",
"description": "Some devices will reject authentication if the time is out of sync by more than 5 seconds. If authentication is unsuccessful, verify the time on the device is correct and try again.",
"data": {
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]"

View File

@@ -34,7 +34,7 @@ def stringify_onvif_error(error: Exception) -> str:
message += f" (actor:{error.actor})"
else:
message = str(error)
return message or "Device sent empty error"
return message or f"Device sent empty error with type {type(error)}"
def is_auth_error(error: Exception) -> bool:

View File

@@ -38,7 +38,8 @@ DEFAULT_ALTITUDE = 0
EVENT_OPENSKY_ENTRY = f"{DOMAIN}_entry"
EVENT_OPENSKY_EXIT = f"{DOMAIN}_exit"
SCAN_INTERVAL = timedelta(seconds=12) # opensky public limit is 10 seconds
# OpenSky free user has 400 credits, with 4 credits per API call. 100/24 = ~4 requests per hour
SCAN_INTERVAL = timedelta(minutes=15)
OPENSKY_API_URL = "https://opensky-network.org/api/states/all"
OPENSKY_API_FIELDS = [

View File

@@ -13,7 +13,7 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"],
"requirements": ["pyoverkiz==1.7.7"],
"requirements": ["pyoverkiz==1.7.8"],
"zeroconf": [
{
"type": "_kizbox._tcp.local.",

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"quality_scale": "platinum",
"requirements": ["vehicle==1.0.0"]
"requirements": ["vehicle==1.0.1"]
}

View File

@@ -46,28 +46,28 @@ BUTTON_ENTITIES = (
key="ptz_left",
name="PTZ left",
icon="mdi:pan",
supported=lambda api, ch: api.supported(ch, "pan_tilt"),
supported=lambda api, ch: api.supported(ch, "pan"),
method=lambda api, ch: api.set_ptz_command(ch, command=PtzEnum.left.value),
),
ReolinkButtonEntityDescription(
key="ptz_right",
name="PTZ right",
icon="mdi:pan",
supported=lambda api, ch: api.supported(ch, "pan_tilt"),
supported=lambda api, ch: api.supported(ch, "pan"),
method=lambda api, ch: api.set_ptz_command(ch, command=PtzEnum.right.value),
),
ReolinkButtonEntityDescription(
key="ptz_up",
name="PTZ up",
icon="mdi:pan",
supported=lambda api, ch: api.supported(ch, "pan_tilt"),
supported=lambda api, ch: api.supported(ch, "tilt"),
method=lambda api, ch: api.set_ptz_command(ch, command=PtzEnum.up.value),
),
ReolinkButtonEntityDescription(
key="ptz_down",
name="PTZ down",
icon="mdi:pan",
supported=lambda api, ch: api.supported(ch, "pan_tilt"),
supported=lambda api, ch: api.supported(ch, "tilt"),
method=lambda api, ch: api.set_ptz_command(ch, command=PtzEnum.down.value),
),
ReolinkButtonEntityDescription(

View File

@@ -18,5 +18,5 @@
"documentation": "https://www.home-assistant.io/integrations/reolink",
"iot_class": "local_push",
"loggers": ["reolink_aio"],
"requirements": ["reolink-aio==0.5.13"]
"requirements": ["reolink-aio==0.5.15"]
}

View File

@@ -35,7 +35,7 @@ SIREN_ENTITIES = (
key="siren",
name="Siren",
icon="mdi:alarm-light",
supported=lambda api, ch: api.supported(ch, "siren"),
supported=lambda api, ch: api.supported(ch, "siren_play"),
),
)

View File

@@ -8,6 +8,7 @@ import logging
from roborock.api import RoborockApiClient
from roborock.cloud_api import RoborockMqttClient
from roborock.containers import HomeDataDevice, RoborockDeviceInfo, UserData
from roborock.exceptions import RoborockException
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_USERNAME
@@ -44,7 +45,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
for device, result in zip(devices, network_results)
if result is not None
}
await mqtt_client.async_disconnect()
try:
await mqtt_client.async_disconnect()
except RoborockException as err:
_LOGGER.warning("Failed disconnecting from the mqtt server %s", err)
if not network_info:
raise ConfigEntryNotReady(
"Could not get network information about your devices"

View File

@@ -29,7 +29,7 @@ apply:
name: Entities state
description: The entities and the state that they need to be.
required: true
example:
example: |
light.kitchen: "on"
light.ceiling:
state: "on"
@@ -60,7 +60,7 @@ create:
entities:
name: Entities state
description: The entities to control with the scene.
example:
example: |
light.tv_back_light: "on"
light.ceiling:
state: "on"
@@ -70,7 +70,7 @@ create:
snapshot_entities:
name: Snapshot entities
description: The entities of which a snapshot is to be taken
example:
example: |
- light.ceiling
- light.kitchen
selector:

View File

@@ -10,7 +10,7 @@ from sense_energy import (
)
DOMAIN = "sense"
DEFAULT_TIMEOUT = 10
DEFAULT_TIMEOUT = 30
ACTIVE_UPDATE_RATE = 60
DEFAULT_NAME = "Sense"
SENSE_DATA = "sense_data"

View File

@@ -17,7 +17,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
hass.data[DOMAIN][entry.entry_id] = hub
try:
if hub.sia_client:
await hub.sia_client.start(reuse_port=True)
await hub.sia_client.async_start(reuse_port=True)
except OSError as exc:
raise ConfigEntryNotReady(
f"SIA Server at port {entry.data[CONF_PORT]} could not start."

View File

@@ -123,7 +123,7 @@ class SIAAlarmControlPanel(SIABaseEntity, AlarmControlPanelEntity):
"""
new_state = None
if sia_event.code:
new_state = self.entity_description.code_consequences[sia_event.code]
new_state = self.entity_description.code_consequences.get(sia_event.code)
if new_state is None:
return False
_LOGGER.debug("New state will be %s", new_state)

View File

@@ -132,7 +132,7 @@ class SIABinarySensor(SIABaseEntity, BinarySensorEntity):
"""
new_state = None
if sia_event.code:
new_state = self.entity_description.code_consequences[sia_event.code]
new_state = self.entity_description.code_consequences.get(sia_event.code)
if new_state is None:
return False
_LOGGER.debug("New state will be %s", new_state)

View File

@@ -71,7 +71,7 @@ class SIAHub:
async def async_shutdown(self, _: Event | None = None) -> None:
"""Shutdown the SIA server."""
if self.sia_client:
await self.sia_client.stop()
await self.sia_client.async_stop()
async def async_create_and_fire_event(self, event: SIAEvent) -> None:
"""Create a event on HA dispatcher and then on HA's bus, with the data from the SIAEvent.

View File

@@ -11,5 +11,5 @@
"documentation": "https://www.home-assistant.io/integrations/sleepiq",
"iot_class": "cloud_polling",
"loggers": ["asyncsleepiq"],
"requirements": ["asyncsleepiq==1.3.4"]
"requirements": ["asyncsleepiq==1.3.5"]
}

View File

@@ -8,7 +8,7 @@
"documentation": "https://www.home-assistant.io/integrations/sonos",
"iot_class": "local_push",
"loggers": ["soco"],
"requirements": ["soco==0.29.1", "sonos-websocket==0.1.0"],
"requirements": ["soco==0.29.1", "sonos-websocket==0.1.1"],
"ssdp": [
{
"st": "urn:schemas-upnp-org:device:ZonePlayer:1"

View File

@@ -147,8 +147,10 @@ async def async_remove_config_entry_device(
api = data.api
serial = api.information.serial
storage = api.storage
# get_all_cameras does not do I/O
all_cameras: list[SynoCamera] = api.surveillance_station.get_all_cameras()
all_cameras: list[SynoCamera] = []
if api.surveillance_station is not None:
# get_all_cameras does not do I/O
all_cameras = api.surveillance_station.get_all_cameras()
device_ids = chain(
(camera.id for camera in all_cameras),
storage.volumes_ids,

View File

@@ -8,5 +8,5 @@
"iot_class": "local_push",
"loggers": ["hatasmota"],
"mqtt": ["tasmota/discovery/#"],
"requirements": ["hatasmota==0.6.4"]
"requirements": ["hatasmota==0.6.5"]
}

View File

@@ -7,7 +7,11 @@ import logging
from typing import Any
import transmission_rpc
from transmission_rpc.error import TransmissionError
from transmission_rpc.error import (
TransmissionAuthError,
TransmissionConnectError,
TransmissionError,
)
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
@@ -137,14 +141,13 @@ async def get_api(hass, entry):
_LOGGER.debug("Successfully connected to %s", host)
return api
except TransmissionAuthError as error:
_LOGGER.error("Credentials for Transmission client are not valid")
raise AuthenticationError from error
except TransmissionConnectError as error:
_LOGGER.error("Connecting to the Transmission client %s failed", host)
raise CannotConnect from error
except TransmissionError as error:
if "401: Unauthorized" in str(error):
_LOGGER.error("Credentials for Transmission client are not valid")
raise AuthenticationError from error
if "111: Connection refused" in str(error):
_LOGGER.error("Connecting to the Transmission client %s failed", host)
raise CannotConnect from error
_LOGGER.error(error)
raise UnknownError from error

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/upb",
"iot_class": "local_push",
"loggers": ["upb_lib"],
"requirements": ["upb_lib==0.5.3"]
"requirements": ["upb_lib==0.5.4"]
}

View File

@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/voip",
"iot_class": "local_push",
"quality_scale": "internal",
"requirements": ["voip-utils==0.0.6"]
"requirements": ["voip-utils==0.0.7"]
}

View File

@@ -52,7 +52,11 @@ def make_protocol(
or (pipeline.tts_engine is None)
):
# Play pre-recorded message instead of failing
return PreRecordMessageProtocol(hass, "problem.pcm")
return PreRecordMessageProtocol(
hass,
"problem.pcm",
opus_payload_type=call_info.opus_payload_type,
)
# Pipeline is properly configured
return PipelineRtpDatagramProtocol(
@@ -60,6 +64,7 @@ def make_protocol(
hass.config.language,
voip_device,
Context(user_id=devices.config_entry.data["user"]),
opus_payload_type=call_info.opus_payload_type,
)
@@ -79,7 +84,9 @@ class HassVoipDatagramProtocol(VoipDatagramProtocol):
hass, devices, call_info
),
invalid_protocol_factory=lambda call_info: PreRecordMessageProtocol(
hass, "not_configured.pcm"
hass,
"not_configured.pcm",
opus_payload_type=call_info.opus_payload_type,
),
)
self.hass = hass
@@ -109,6 +116,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol):
language: str,
voip_device: VoIPDevice,
context: Context,
opus_payload_type: int,
pipeline_timeout: float = 30.0,
audio_timeout: float = 2.0,
buffered_chunks_before_speech: int = 100,
@@ -119,7 +127,12 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol):
tts_extra_timeout: float = 1.0,
) -> None:
"""Set up pipeline RTP server."""
super().__init__(rate=RATE, width=WIDTH, channels=CHANNELS)
super().__init__(
rate=RATE,
width=WIDTH,
channels=CHANNELS,
opus_payload_type=opus_payload_type,
)
self.hass = hass
self.language = language
@@ -350,9 +363,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol):
async with async_timeout.timeout(tts_seconds + self.tts_extra_timeout):
# Assume TTS audio is 16Khz 16-bit mono
await self.hass.async_add_executor_job(
partial(self.send_audio, audio_bytes, **RTP_AUDIO_SETTINGS)
)
await self._async_send_audio(audio_bytes)
except asyncio.TimeoutError as err:
_LOGGER.warning("TTS timeout")
raise err
@@ -360,6 +371,12 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol):
# Signal pipeline to restart
self._tts_done.set()
async def _async_send_audio(self, audio_bytes: bytes, **kwargs):
"""Send audio in executor."""
await self.hass.async_add_executor_job(
partial(self.send_audio, audio_bytes, **RTP_AUDIO_SETTINGS, **kwargs)
)
async def _play_listening_tone(self) -> None:
"""Play a tone to indicate that Home Assistant is listening."""
if self._tone_bytes is None:
@@ -369,13 +386,9 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol):
"tone.pcm",
)
await self.hass.async_add_executor_job(
partial(
self.send_audio,
self._tone_bytes,
silence_before=self.tone_delay,
**RTP_AUDIO_SETTINGS,
)
await self._async_send_audio(
self._tone_bytes,
silence_before=self.tone_delay,
)
async def _play_processing_tone(self) -> None:
@@ -387,13 +400,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol):
"processing.pcm",
)
await self.hass.async_add_executor_job(
partial(
self.send_audio,
self._processing_bytes,
**RTP_AUDIO_SETTINGS,
)
)
await self._async_send_audio(self._processing_bytes)
async def _play_error_tone(self) -> None:
"""Play a tone to indicate a pipeline error occurred."""
@@ -404,13 +411,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol):
"error.pcm",
)
await self.hass.async_add_executor_job(
partial(
self.send_audio,
self._error_bytes,
**RTP_AUDIO_SETTINGS,
)
)
await self._async_send_audio(self._error_bytes)
def _load_pcm(self, file_name: str) -> bytes:
"""Load raw audio (16Khz, 16-bit mono)."""
@@ -424,11 +425,17 @@ class PreRecordMessageProtocol(RtpDatagramProtocol):
self,
hass: HomeAssistant,
file_name: str,
opus_payload_type: int,
message_delay: float = 1.0,
loop_delay: float = 2.0,
) -> None:
"""Set up RTP server."""
super().__init__(rate=RATE, width=WIDTH, channels=CHANNELS)
super().__init__(
rate=RATE,
width=WIDTH,
channels=CHANNELS,
opus_payload_type=opus_payload_type,
)
self.hass = hass
self.file_name = file_name
self.message_delay = message_delay

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/volvooncall",
"iot_class": "cloud_polling",
"loggers": ["geopy", "hbmqtt", "volvooncall"],
"requirements": ["volvooncall==0.10.2"]
"requirements": ["volvooncall==0.10.3"]
}

View File

@@ -7,7 +7,7 @@
"iot_class": "local_push",
"loggers": ["aiowebostv"],
"quality_scale": "platinum",
"requirements": ["aiowebostv==0.3.2"],
"requirements": ["aiowebostv==0.3.3"],
"ssdp": [
{
"st": "urn:lge-com:service:webos-second-screen:1"

View File

@@ -1,7 +1,8 @@
{
"config": {
"abort": {
"incorrect_province": "Incorrect subdivision from yaml import"
"incorrect_province": "Incorrect subdivision from yaml import",
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
},
"step": {
"user": {
@@ -31,8 +32,7 @@
},
"error": {
"add_holiday_error": "Incorrect format on date (YYYY-MM-DD)",
"remove_holiday_error": "Incorrect format on date (YYYY-MM-DD) or holiday name not found",
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
"remove_holiday_error": "Incorrect format on date (YYYY-MM-DD) or holiday name not found"
}
},
"options": {
@@ -59,7 +59,7 @@
"error": {
"add_holiday_error": "Incorrect format on date (YYYY-MM-DD)",
"remove_holiday_error": "Incorrect format on date (YYYY-MM-DD) or holiday name not found",
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
"already_configured": "Service with this configuration already exist"
}
},
"issues": {

View File

@@ -117,6 +117,7 @@ class WindowCoveringClient(ClientClusterHandler):
"""Window client cluster handler."""
@registries.BINDABLE_CLUSTERS.register(closures.WindowCovering.cluster_id)
@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(closures.WindowCovering.cluster_id)
class WindowCovering(ClusterHandler):
"""Window cluster handler."""

View File

@@ -137,7 +137,19 @@ class Endpoint:
):
cluster_handler_class = MultistateInput
# end of ugly hack
cluster_handler = cluster_handler_class(cluster, self)
try:
cluster_handler = cluster_handler_class(cluster, self)
except KeyError as err:
_LOGGER.warning(
"Cluster handler %s for cluster %s on endpoint %s is invalid: %s",
cluster_handler_class,
cluster,
self,
err,
)
continue
if cluster_handler.name == const.CLUSTER_HANDLER_POWER_CONFIGURATION:
self._device.power_configuration_ch = cluster_handler
elif cluster_handler.name == const.CLUSTER_HANDLER_IDENTIFY:

View File

@@ -20,10 +20,10 @@
"zigpy_znp"
],
"requirements": [
"bellows==0.35.2",
"bellows==0.35.5",
"pyserial==3.5",
"pyserial-asyncio==0.6",
"zha-quirks==0.0.98",
"zha-quirks==0.0.99",
"zigpy-deconz==0.21.0",
"zigpy==0.55.0",
"zigpy-xbee==0.18.0",

View File

@@ -51,7 +51,7 @@ async def async_setup_entry(
entities: list[ZWaveBaseEntity] = []
if info.platform_hint == "motorized_barrier":
entities.append(ZwaveMotorizedBarrier(config_entry, driver, info))
elif info.platform_hint == "window_shutter_tilt":
elif info.platform_hint and info.platform_hint.endswith("tilt"):
entities.append(ZWaveTiltCover(config_entry, driver, info))
else:
entities.append(ZWaveCover(config_entry, driver, info))
@@ -99,6 +99,12 @@ def zwave_tilt_to_percent(value: int) -> int:
class ZWaveCover(ZWaveBaseEntity, CoverEntity):
"""Representation of a Z-Wave Cover device."""
_attr_supported_features = (
CoverEntityFeature.OPEN
| CoverEntityFeature.CLOSE
| CoverEntityFeature.SET_POSITION
)
def __init__(
self,
config_entry: ConfigEntry,
@@ -108,11 +114,20 @@ class ZWaveCover(ZWaveBaseEntity, CoverEntity):
"""Initialize a ZWaveCover entity."""
super().__init__(config_entry, driver, info)
self._stop_cover_value = (
self.get_zwave_value(COVER_OPEN_PROPERTY)
or self.get_zwave_value(COVER_UP_PROPERTY)
or self.get_zwave_value(COVER_ON_PROPERTY)
)
if self._stop_cover_value:
self._attr_supported_features |= CoverEntityFeature.STOP
# Entity class attributes
self._attr_device_class = CoverDeviceClass.WINDOW
if self.info.platform_hint in ("window_shutter", "window_shutter_tilt"):
if self.info.platform_hint and self.info.platform_hint.startswith("shutter"):
self._attr_device_class = CoverDeviceClass.SHUTTER
if self.info.platform_hint == "window_blind":
if self.info.platform_hint and self.info.platform_hint.startswith("blind"):
self._attr_device_class = CoverDeviceClass.BLIND
@property
@@ -153,28 +168,13 @@ class ZWaveCover(ZWaveBaseEntity, CoverEntity):
async def async_stop_cover(self, **kwargs: Any) -> None:
"""Stop cover."""
cover_property = (
self.get_zwave_value(COVER_OPEN_PROPERTY)
or self.get_zwave_value(COVER_UP_PROPERTY)
or self.get_zwave_value(COVER_ON_PROPERTY)
)
if cover_property:
# Stop the cover, will stop regardless of the actual direction of travel.
await self.info.node.async_set_value(cover_property, False)
assert self._stop_cover_value
# Stop the cover, will stop regardless of the actual direction of travel.
await self.info.node.async_set_value(self._stop_cover_value, False)
class ZWaveTiltCover(ZWaveCover):
"""Representation of a Z-Wave Cover device with tilt."""
_attr_supported_features = (
CoverEntityFeature.OPEN
| CoverEntityFeature.CLOSE
| CoverEntityFeature.STOP
| CoverEntityFeature.SET_POSITION
| CoverEntityFeature.OPEN_TILT
| CoverEntityFeature.CLOSE_TILT
| CoverEntityFeature.SET_TILT_POSITION
)
"""Representation of a Z-Wave cover device with tilt."""
def __init__(
self,
@@ -184,8 +184,15 @@ class ZWaveTiltCover(ZWaveCover):
) -> None:
"""Initialize a ZWaveCover entity."""
super().__init__(config_entry, driver, info)
self.data_template = cast(
self._current_tilt_value = cast(
CoverTiltDataTemplate, self.info.platform_data_template
).current_tilt_value(self.info.platform_data)
self._attr_supported_features |= (
CoverEntityFeature.OPEN_TILT
| CoverEntityFeature.CLOSE_TILT
| CoverEntityFeature.SET_TILT_POSITION
)
@property
@@ -194,19 +201,18 @@ class ZWaveTiltCover(ZWaveCover):
None is unknown, 0 is closed, 100 is fully open.
"""
value = self.data_template.current_tilt_value(self.info.platform_data)
value = self._current_tilt_value
if value is None or value.value is None:
return None
return zwave_tilt_to_percent(int(value.value))
async def async_set_cover_tilt_position(self, **kwargs: Any) -> None:
"""Move the cover tilt to a specific position."""
tilt_value = self.data_template.current_tilt_value(self.info.platform_data)
if tilt_value:
await self.info.node.async_set_value(
tilt_value,
percent_to_zwave_tilt(kwargs[ATTR_TILT_POSITION]),
)
assert self._current_tilt_value
await self.info.node.async_set_value(
self._current_tilt_value,
percent_to_zwave_tilt(kwargs[ATTR_TILT_POSITION]),
)
async def async_open_cover_tilt(self, **kwargs: Any) -> None:
"""Open the cover tilt."""

View File

@@ -347,7 +347,7 @@ DISCOVERY_SCHEMAS = [
# Fibaro Shutter Fibaro FGR222
ZWaveDiscoverySchema(
platform=Platform.COVER,
hint="window_shutter_tilt",
hint="shutter_tilt",
manufacturer_id={0x010F},
product_id={0x1000, 0x1001},
product_type={0x0301, 0x0302},
@@ -371,7 +371,7 @@ DISCOVERY_SCHEMAS = [
# Qubino flush shutter
ZWaveDiscoverySchema(
platform=Platform.COVER,
hint="window_shutter",
hint="shutter",
manufacturer_id={0x0159},
product_id={0x0052, 0x0053},
product_type={0x0003},
@@ -380,7 +380,7 @@ DISCOVERY_SCHEMAS = [
# Graber/Bali/Spring Fashion Covers
ZWaveDiscoverySchema(
platform=Platform.COVER,
hint="window_blind",
hint="blind",
manufacturer_id={0x026E},
product_id={0x5A31},
product_type={0x4353},
@@ -389,7 +389,7 @@ DISCOVERY_SCHEMAS = [
# iBlinds v2 window blind motor
ZWaveDiscoverySchema(
platform=Platform.COVER,
hint="window_blind",
hint="blind",
manufacturer_id={0x0287},
product_id={0x000D},
product_type={0x0003},
@@ -398,7 +398,7 @@ DISCOVERY_SCHEMAS = [
# Merten 507801 Connect Roller Shutter
ZWaveDiscoverySchema(
platform=Platform.COVER,
hint="window_shutter",
hint="shutter",
manufacturer_id={0x007A},
product_id={0x0001},
product_type={0x8003},
@@ -414,7 +414,7 @@ DISCOVERY_SCHEMAS = [
# Disable endpoint 2, as it has no practical function. CC: Switch_Multilevel
ZWaveDiscoverySchema(
platform=Platform.COVER,
hint="window_shutter",
hint="shutter",
manufacturer_id={0x007A},
product_id={0x0001},
product_type={0x8003},
@@ -807,7 +807,7 @@ DISCOVERY_SCHEMAS = [
# window coverings
ZWaveDiscoverySchema(
platform=Platform.COVER,
hint="window_cover",
hint="cover",
device_class_generic={"Multilevel Switch"},
device_class_specific={
"Motor Control Class A",

View File

@@ -84,7 +84,7 @@ bulk_set_partial_config_parameters:
value:
name: Value
description: The new value(s) to set for this configuration parameter. Can either be a raw integer value to represent the bulk change or a mapping where the key is the bitmask (either in hex or integer form) and the value is the new value you want to set for that partial parameter.
example:
example: |
"0x1": 1
"0x10": 1
"0x20": 1
@@ -287,7 +287,7 @@ invoke_cc_api:
parameters:
name: Parameters
description: A list of parameters to pass to the API method. Refer to the Z-Wave JS Command Class API documentation (https://zwave-js.github.io/node-zwave-js/#/api/CCs/index) for parameters.
example: [1, 1]
example: "[1, 1]"
required: true
selector:
object:

View File

@@ -8,7 +8,7 @@ from .backports.enum import StrEnum
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2023
MINOR_VERSION: Final = 5
PATCH_VERSION: Final = "0b6"
PATCH_VERSION: Final = "3"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0)

View File

@@ -37,6 +37,11 @@ SERVER_SOFTWARE = "{0}/{1} aiohttp/{2} Python/{3[0]}.{3[1]}".format(
APPLICATION_NAME, __version__, aiohttp.__version__, sys.version_info
)
ENABLE_CLEANUP_CLOSED = sys.version_info < (3, 11, 1)
# Enabling cleanup closed on python 3.11.1+ leaks memory relatively quickly
# see https://github.com/aio-libs/aiohttp/issues/7252
# aiohttp interacts poorly with https://github.com/python/cpython/pull/98540
WARN_CLOSE_MSG = "closes the Home Assistant aiohttp session"
#
@@ -276,7 +281,7 @@ def _async_get_connector(
ssl_context = ssl_util.get_default_no_verify_context()
connector = aiohttp.TCPConnector(
enable_cleanup_closed=True,
enable_cleanup_closed=ENABLE_CLEANUP_CLOSED,
ssl=ssl_context,
limit=MAXIMUM_CONNECTIONS,
limit_per_host=MAXIMUM_CONNECTIONS_PER_HOST,

View File

@@ -763,13 +763,6 @@ class Entity(ABC):
hass = self.hass
assert hass is not None
if hasattr(self, "async_update"):
coro: asyncio.Future[None] = self.async_update()
elif hasattr(self, "update"):
coro = hass.async_add_executor_job(self.update)
else:
return
self._update_staged = True
# Process update sequential
@@ -780,8 +773,14 @@ class Entity(ABC):
update_warn = hass.loop.call_later(
SLOW_UPDATE_WARNING, self._async_slow_update_warning
)
try:
await coro
if hasattr(self, "async_update"):
await self.async_update()
elif hasattr(self, "update"):
await hass.async_add_executor_job(self.update)
else:
return
finally:
self._update_staged = False
if warning:

View File

@@ -307,26 +307,6 @@ class RegistryEntry:
hass.states.async_set(self.entity_id, STATE_UNAVAILABLE, attrs)
def async_friendly_name(self, hass: HomeAssistant) -> str | None:
"""Return the friendly name.
If self.name is not None, this returns self.name
If has_entity_name is False, self.original_name
If has_entity_name is True, this returns device.name + self.original_name
"""
if not self.has_entity_name or self.name is not None:
return self.name or self.original_name
device_registry = dr.async_get(hass)
if not (device_id := self.device_id) or not (
device_entry := device_registry.async_get(device_id)
):
return self.original_name
if not (original_name := self.original_name):
return device_entry.name_by_user or device_entry.name
return f"{device_entry.name_by_user or device_entry.name} {original_name}"
class EntityRegistryStore(storage.Store[dict[str, list[dict[str, Any]]]]):
"""Store entity registry data."""

View File

@@ -11,6 +11,7 @@ from typing import Any, TypeVar
import voluptuous as vol
from homeassistant.components.homeassistant.exposed_entities import async_should_expose
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_ENTITY_ID,
@@ -65,6 +66,7 @@ async def async_handle(
text_input: str | None = None,
context: Context | None = None,
language: str | None = None,
assistant: str | None = None,
) -> IntentResponse:
"""Handle an intent."""
handler: IntentHandler = hass.data.get(DATA_KEY, {}).get(intent_type)
@@ -79,7 +81,14 @@ async def async_handle(
language = hass.config.language
intent = Intent(
hass, platform, intent_type, slots or {}, text_input, context, language
hass,
platform=platform,
intent_type=intent_type,
slots=slots or {},
text_input=text_input,
context=context,
language=language,
assistant=assistant,
)
try:
@@ -208,6 +217,7 @@ def async_match_states(
entities: entity_registry.EntityRegistry | None = None,
areas: area_registry.AreaRegistry | None = None,
devices: device_registry.DeviceRegistry | None = None,
assistant: str | None = None,
) -> Iterable[State]:
"""Find states that match the constraints."""
if states is None:
@@ -258,6 +268,14 @@ def async_match_states(
states_and_entities = list(_filter_by_area(states_and_entities, area, devices))
if assistant is not None:
# Filter by exposure
states_and_entities = [
(state, entity)
for state, entity in states_and_entities
if async_should_expose(hass, assistant, state.entity_id)
]
if name is not None:
if devices is None:
devices = device_registry.async_get(hass)
@@ -387,6 +405,7 @@ class ServiceIntentHandler(IntentHandler):
area=area,
domains=domains,
device_classes=device_classes,
assistant=intent_obj.assistant,
)
)
@@ -496,6 +515,7 @@ class Intent:
"context",
"language",
"category",
"assistant",
]
def __init__(
@@ -508,6 +528,7 @@ class Intent:
context: Context,
language: str,
category: IntentCategory | None = None,
assistant: str | None = None,
) -> None:
"""Initialize an intent."""
self.hass = hass
@@ -518,6 +539,7 @@ class Intent:
self.context = context
self.language = language
self.category = category
self.assistant = assistant
@callback
def create_response(self) -> IntentResponse:

View File

@@ -14,7 +14,7 @@ bcrypt==4.0.1
bleak-retry-connector==3.0.2
bleak==0.20.2
bluetooth-adapters==0.15.3
bluetooth-auto-recovery==1.0.3
bluetooth-auto-recovery==1.2.0
bluetooth-data-tools==0.4.0
certifi>=2021.5.30
ciso8601==2.3.0
@@ -25,7 +25,7 @@ ha-av==10.0.0
hass-nabucasa==0.66.2
hassil==1.0.6
home-assistant-bluetooth==1.10.0
home-assistant-frontend==20230501.0
home-assistant-frontend==20230503.3
home-assistant-intents==2023.4.26
httpx==0.24.0
ifaddr==0.1.7
@@ -47,7 +47,7 @@ requests==2.28.2
scapy==2.5.0
sqlalchemy==2.0.12
typing-extensions>=4.5.0,<5.0
ulid-transform==0.7.0
ulid-transform==0.7.2
voluptuous-serialize==2.6.0
voluptuous==0.13.1
webrtcvad==2.0.10

View File

@@ -54,6 +54,20 @@ def is_region(language: str, region: str | None) -> bool:
return True
def is_language_match(lang_1: str, lang_2: str) -> bool:
"""Return true if two languages are considered the same."""
if lang_1 == lang_2:
# Exact match
return True
if {lang_1, lang_2} == {"no", "nb"}:
# no = spoken Norwegian
# nb = written Norwegian (Bokmål)
return True
return False
@dataclass
class Dialect:
"""Language with optional region and script/code."""
@@ -71,26 +85,35 @@ class Dialect:
# Regions are upper-cased
self.region = self.region.upper()
def score(self, dialect: Dialect, country: str | None = None) -> float:
def score(
self, dialect: Dialect, country: str | None = None
) -> tuple[float, float]:
"""Return score for match with another dialect where higher is better.
Score < 0 indicates a failure to match.
"""
if self.language != dialect.language:
if not is_language_match(self.language, dialect.language):
# Not a match
return -1
return (-1, 0)
is_exact_language = self.language == dialect.language
if (self.region is None) and (dialect.region is None):
# Weak match with no region constraint
return 1
# Prefer exact language match
return (2 if is_exact_language else 1, 0)
if (self.region is not None) and (dialect.region is not None):
if self.region == dialect.region:
# Exact language + region match
return math.inf
# Same language + region match
# Prefer exact language match
return (
math.inf,
1 if is_exact_language else 0,
)
# Regions are both set, but don't match
return 0
return (0, 0)
# Generate ordered list of preferred regions
pref_regions = list(
@@ -113,13 +136,13 @@ class Dialect:
# More preferred regions are at the front.
# Add 1 to boost above a weak match where no regions are set.
return 1 + (len(pref_regions) - region_idx)
return (1 + (len(pref_regions) - region_idx), 0)
except ValueError:
# Region was not in preferred list
pass
# Not a preferred region
return 0
return (0, 0)
@staticmethod
def parse(tag: str) -> Dialect:
@@ -169,4 +192,4 @@ def matches(
)
# Score < 0 is not a match
return [tag for _dialect, score, tag in scored if score >= 0]
return [tag for _dialect, score, tag in scored if score[0] >= 0]

View File

@@ -73,8 +73,6 @@ def create_no_verify_ssl_context(
https://github.com/aio-libs/aiohttp/blob/33953f110e97eecc707e1402daa8d543f38a189b/aiohttp/connector.py#L911
"""
sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
sslcontext.options |= ssl.OP_NO_SSLv2
sslcontext.options |= ssl.OP_NO_SSLv3
sslcontext.check_hostname = False
sslcontext.verify_mode = ssl.CERT_NONE
with contextlib.suppress(AttributeError):

View File

@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2023.5.0b6"
version = "2023.5.3"
license = {text = "Apache-2.0"}
description = "Open-source home automation platform running on Python 3."
readme = "README.rst"
@@ -50,7 +50,7 @@ dependencies = [
"pyyaml==6.0",
"requests==2.28.2",
"typing-extensions>=4.5.0,<5.0",
"ulid-transform==0.7.0",
"ulid-transform==0.7.2",
"voluptuous==0.13.1",
"voluptuous-serialize==2.6.0",
"yarl==1.9.2",

View File

@@ -24,7 +24,7 @@ python-slugify==4.0.1
pyyaml==6.0
requests==2.28.2
typing-extensions>=4.5.0,<5.0
ulid-transform==0.7.0
ulid-transform==0.7.2
voluptuous==0.13.1
voluptuous-serialize==2.6.0
yarl==1.9.2

View File

@@ -116,7 +116,7 @@ aio_georss_gdacs==0.8
aioairq==0.2.4
# homeassistant.components.airzone
aioairzone==0.5.2
aioairzone==0.5.5
# homeassistant.components.ambient_station
aioambient==2023.04.0
@@ -156,7 +156,7 @@ aioecowitt==2023.01.0
aioemonitor==1.0.5
# homeassistant.components.esphome
aioesphomeapi==13.7.2
aioesphomeapi==13.7.4
# homeassistant.components.flo
aioflo==2021.11.0
@@ -223,7 +223,7 @@ aionanoleaf==0.2.1
aionotify==0.2.0
# homeassistant.components.notion
aionotion==2023.04.2
aionotion==2023.05.4
# homeassistant.components.oncue
aiooncue==0.3.4
@@ -300,7 +300,7 @@ aiovlc==0.1.0
aiowatttime==0.1.1
# homeassistant.components.webostv
aiowebostv==0.3.2
aiowebostv==0.3.3
# homeassistant.components.yandex_transport
aioymaps==1.2.2
@@ -383,7 +383,7 @@ async-upnp-client==0.33.1
asyncpysupla==0.0.5
# homeassistant.components.sleepiq
asyncsleepiq==1.3.4
asyncsleepiq==1.3.5
# homeassistant.components.aten_pe
# atenpdu==0.3.2
@@ -428,10 +428,10 @@ beautifulsoup4==4.11.1
# beewi_smartclim==0.0.10
# homeassistant.components.zha
bellows==0.35.2
bellows==0.35.5
# homeassistant.components.bmw_connected_drive
bimmer_connected==0.13.2
bimmer_connected==0.13.3
# homeassistant.components.bizkaibus
bizkaibus==0.1.1
@@ -465,7 +465,7 @@ bluemaestro-ble==0.2.3
bluetooth-adapters==0.15.3
# homeassistant.components.bluetooth
bluetooth-auto-recovery==1.0.3
bluetooth-auto-recovery==1.2.0
# homeassistant.components.bluetooth
# homeassistant.components.esphome
@@ -644,7 +644,7 @@ elgato==4.0.1
eliqonline==1.2.2
# homeassistant.components.elkm1
elkm1-lib==2.2.1
elkm1-lib==2.2.2
# homeassistant.components.elmax
elmax_api==0.0.4
@@ -683,7 +683,7 @@ epsonprinter==0.0.9
esphome-dashboard-api==1.2.3
# homeassistant.components.netgear_lte
eternalegypt==0.0.15
eternalegypt==0.0.16
# homeassistant.components.eufylife_ble
eufylife_ble_client==0.1.7
@@ -881,7 +881,7 @@ hass_splunk==0.1.1
hassil==1.0.6
# homeassistant.components.tasmota
hatasmota==0.6.4
hatasmota==0.6.5
# homeassistant.components.jewish_calendar
hdate==0.10.4
@@ -911,7 +911,7 @@ hole==0.8.0
holidays==0.21.13
# homeassistant.components.frontend
home-assistant-frontend==20230501.0
home-assistant-frontend==20230503.3
# homeassistant.components.conversation
home-assistant-intents==2023.4.26
@@ -1264,7 +1264,7 @@ ondilo==0.2.0
onkyo-eiscp==1.2.7
# homeassistant.components.onvif
onvif-zeep-async==1.3.1
onvif-zeep-async==2.1.1
# homeassistant.components.opengarage
open-garage==0.2.0
@@ -1533,7 +1533,7 @@ pyblackbird==0.6
pybotvac==0.0.23
# homeassistant.components.braviatv
pybravia==0.3.2
pybravia==0.3.3
# homeassistant.components.nissan_leaf
pycarwings2==2.14
@@ -1859,7 +1859,7 @@ pyotgw==2.1.3
pyotp==2.8.0
# homeassistant.components.overkiz
pyoverkiz==1.7.7
pyoverkiz==1.7.8
# homeassistant.components.openweathermap
pyowm==3.2.0
@@ -2242,7 +2242,7 @@ regenmaschine==2022.11.0
renault-api==0.1.13
# homeassistant.components.reolink
reolink-aio==0.5.13
reolink-aio==0.5.15
# homeassistant.components.python_script
restrictedpython==6.0
@@ -2390,7 +2390,7 @@ solax==0.3.0
somfy-mylink-synergy==1.0.6
# homeassistant.components.sonos
sonos-websocket==0.1.0
sonos-websocket==0.1.1
# homeassistant.components.marytts
speak2mary==1.4.0
@@ -2565,7 +2565,7 @@ unifi-discovery==1.1.7
unifiled==0.11
# homeassistant.components.upb
upb_lib==0.5.3
upb_lib==0.5.4
# homeassistant.components.upcloud
upcloud-api==2.0.0
@@ -2582,7 +2582,7 @@ uvcclient==0.11.0
vallox-websocket-api==3.2.1
# homeassistant.components.rdw
vehicle==1.0.0
vehicle==1.0.1
# homeassistant.components.velbus
velbus-aio==2023.2.0
@@ -2594,13 +2594,13 @@ venstarcolortouch==0.19
vilfo-api-client==0.3.2
# homeassistant.components.voip
voip-utils==0.0.6
voip-utils==0.0.7
# homeassistant.components.volkszaehler
volkszaehler==0.4.0
# homeassistant.components.volvooncall
volvooncall==0.10.2
volvooncall==0.10.3
# homeassistant.components.verisure
vsure==2.6.1
@@ -2718,7 +2718,7 @@ zeroconf==0.58.2
zeversolar==0.3.1
# homeassistant.components.zha
zha-quirks==0.0.98
zha-quirks==0.0.99
# homeassistant.components.zhong_hong
zhong_hong_hvac==1.0.9

View File

@@ -106,7 +106,7 @@ aio_georss_gdacs==0.8
aioairq==0.2.4
# homeassistant.components.airzone
aioairzone==0.5.2
aioairzone==0.5.5
# homeassistant.components.ambient_station
aioambient==2023.04.0
@@ -146,7 +146,7 @@ aioecowitt==2023.01.0
aioemonitor==1.0.5
# homeassistant.components.esphome
aioesphomeapi==13.7.2
aioesphomeapi==13.7.4
# homeassistant.components.flo
aioflo==2021.11.0
@@ -204,7 +204,7 @@ aiomusiccast==0.14.8
aionanoleaf==0.2.1
# homeassistant.components.notion
aionotion==2023.04.2
aionotion==2023.05.4
# homeassistant.components.oncue
aiooncue==0.3.4
@@ -281,7 +281,7 @@ aiovlc==0.1.0
aiowatttime==0.1.1
# homeassistant.components.webostv
aiowebostv==0.3.2
aiowebostv==0.3.3
# homeassistant.components.yandex_transport
aioymaps==1.2.2
@@ -340,7 +340,7 @@ arcam-fmj==1.3.0
async-upnp-client==0.33.1
# homeassistant.components.sleepiq
asyncsleepiq==1.3.4
asyncsleepiq==1.3.5
# homeassistant.components.aurora
auroranoaa==0.0.3
@@ -361,10 +361,10 @@ base36==0.1.1
beautifulsoup4==4.11.1
# homeassistant.components.zha
bellows==0.35.2
bellows==0.35.5
# homeassistant.components.bmw_connected_drive
bimmer_connected==0.13.2
bimmer_connected==0.13.3
# homeassistant.components.bluetooth
bleak-retry-connector==3.0.2
@@ -385,7 +385,7 @@ bluemaestro-ble==0.2.3
bluetooth-adapters==0.15.3
# homeassistant.components.bluetooth
bluetooth-auto-recovery==1.0.3
bluetooth-auto-recovery==1.2.0
# homeassistant.components.bluetooth
# homeassistant.components.esphome
@@ -506,7 +506,7 @@ easyenergy==0.3.0
elgato==4.0.1
# homeassistant.components.elkm1
elkm1-lib==2.2.1
elkm1-lib==2.2.2
# homeassistant.components.elmax
elmax_api==0.0.4
@@ -679,7 +679,7 @@ hass-nabucasa==0.66.2
hassil==1.0.6
# homeassistant.components.tasmota
hatasmota==0.6.4
hatasmota==0.6.5
# homeassistant.components.jewish_calendar
hdate==0.10.4
@@ -700,7 +700,7 @@ hole==0.8.0
holidays==0.21.13
# homeassistant.components.frontend
home-assistant-frontend==20230501.0
home-assistant-frontend==20230503.3
# homeassistant.components.conversation
home-assistant-intents==2023.4.26
@@ -945,7 +945,7 @@ omnilogic==0.4.5
ondilo==0.2.0
# homeassistant.components.onvif
onvif-zeep-async==1.3.1
onvif-zeep-async==2.1.1
# homeassistant.components.opengarage
open-garage==0.2.0
@@ -1130,7 +1130,7 @@ pyblackbird==0.6
pybotvac==0.0.23
# homeassistant.components.braviatv
pybravia==0.3.2
pybravia==0.3.3
# homeassistant.components.cloudflare
pycfdns==2.0.1
@@ -1357,7 +1357,7 @@ pyotgw==2.1.3
pyotp==2.8.0
# homeassistant.components.overkiz
pyoverkiz==1.7.7
pyoverkiz==1.7.8
# homeassistant.components.openweathermap
pyowm==3.2.0
@@ -1611,7 +1611,7 @@ regenmaschine==2022.11.0
renault-api==0.1.13
# homeassistant.components.reolink
reolink-aio==0.5.13
reolink-aio==0.5.15
# homeassistant.components.python_script
restrictedpython==6.0
@@ -1714,7 +1714,7 @@ solax==0.3.0
somfy-mylink-synergy==1.0.6
# homeassistant.components.sonos
sonos-websocket==0.1.0
sonos-websocket==0.1.1
# homeassistant.components.marytts
speak2mary==1.4.0
@@ -1841,7 +1841,7 @@ ultraheat-api==0.5.1
unifi-discovery==1.1.7
# homeassistant.components.upb
upb_lib==0.5.3
upb_lib==0.5.4
# homeassistant.components.upcloud
upcloud-api==2.0.0
@@ -1858,7 +1858,7 @@ uvcclient==0.11.0
vallox-websocket-api==3.2.1
# homeassistant.components.rdw
vehicle==1.0.0
vehicle==1.0.1
# homeassistant.components.velbus
velbus-aio==2023.2.0
@@ -1870,10 +1870,10 @@ venstarcolortouch==0.19
vilfo-api-client==0.3.2
# homeassistant.components.voip
voip-utils==0.0.6
voip-utils==0.0.7
# homeassistant.components.volvooncall
volvooncall==0.10.2
volvooncall==0.10.3
# homeassistant.components.verisure
vsure==2.6.1
@@ -1964,7 +1964,7 @@ zeroconf==0.58.2
zeversolar==0.3.1
# homeassistant.components.zha
zha-quirks==0.0.98
zha-quirks==0.0.99
# homeassistant.components.zha
zigpy-deconz==0.21.0

View File

@@ -1,4 +1,6 @@
"""The tests for the Air Quality component."""
import pytest
from homeassistant.components.air_quality import ATTR_N2O, ATTR_OZONE, ATTR_PM_10
from homeassistant.const import (
ATTR_ATTRIBUTION,
@@ -9,6 +11,12 @@ from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
@pytest.fixture(autouse=True)
async def setup_homeassistant(hass: HomeAssistant):
"""Set up the homeassistant integration."""
await async_setup_component(hass, "homeassistant", {})
async def test_state(hass: HomeAssistant) -> None:
"""Test Air Quality state."""
config = {"air_quality": {"platform": "demo"}}

View File

@@ -84,3 +84,9 @@ async def test_airzone_create_binary_sensors(hass: HomeAssistant) -> None:
state = hass.states.get("binary_sensor.airzone_2_1_problem")
assert state.state == STATE_OFF
state = hass.states.get("binary_sensor.dkn_plus_battery_low")
assert state is None
state = hass.states.get("binary_sensor.dkn_plus_problem")
assert state.state == STATE_OFF

View File

@@ -145,6 +145,24 @@ async def test_airzone_create_climates(hass: HomeAssistant) -> None:
assert state.attributes.get(ATTR_TARGET_TEMP_STEP) == API_TEMPERATURE_STEP
assert state.attributes.get(ATTR_TEMPERATURE) == 19.0
state = hass.states.get("climate.dkn_plus")
assert state.state == HVACMode.HEAT_COOL
assert state.attributes.get(ATTR_CURRENT_HUMIDITY) is None
assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 21.7
assert state.attributes.get(ATTR_HVAC_ACTION) == HVACAction.COOLING
assert state.attributes.get(ATTR_HVAC_MODES) == [
HVACMode.FAN_ONLY,
HVACMode.COOL,
HVACMode.HEAT,
HVACMode.DRY,
HVACMode.HEAT_COOL,
HVACMode.OFF,
]
assert state.attributes.get(ATTR_MAX_TEMP) == 32.2
assert state.attributes.get(ATTR_MIN_TEMP) == 17.8
assert state.attributes.get(ATTR_TARGET_TEMP_STEP) == API_TEMPERATURE_STEP
assert state.attributes.get(ATTR_TEMPERATURE) == 22.8
async def test_airzone_climate_turn_on_off(hass: HomeAssistant) -> None:
"""Test turning on."""

View File

@@ -52,3 +52,9 @@ async def test_airzone_create_sensors(
state = hass.states.get("sensor.airzone_2_1_humidity")
assert state.state == "62"
state = hass.states.get("sensor.dkn_plus_temperature")
assert state.state == "21.7"
state = hass.states.get("sensor.dkn_plus_humidity")
assert state is None

View File

@@ -7,10 +7,16 @@ from aioairzone.const import (
API_COLD_ANGLE,
API_COLD_STAGE,
API_COLD_STAGES,
API_COOL_MAX_TEMP,
API_COOL_MIN_TEMP,
API_COOL_SET_POINT,
API_DATA,
API_ERRORS,
API_FLOOR_DEMAND,
API_HEAT_ANGLE,
API_HEAT_MAX_TEMP,
API_HEAT_MIN_TEMP,
API_HEAT_SET_POINT,
API_HEAT_STAGE,
API_HEAT_STAGES,
API_HUMIDITY,
@@ -25,6 +31,8 @@ from aioairzone.const import (
API_ROOM_TEMP,
API_SET_POINT,
API_SLEEP,
API_SPEED,
API_SPEEDS,
API_SYSTEM_FIRMWARE,
API_SYSTEM_ID,
API_SYSTEM_TYPE,
@@ -216,6 +224,39 @@ HVAC_MOCK = {
},
]
},
{
API_DATA: [
{
API_SYSTEM_ID: 3,
API_ZONE_ID: 1,
API_NAME: "DKN Plus",
API_ON: 1,
API_COOL_SET_POINT: 73,
API_COOL_MAX_TEMP: 90,
API_COOL_MIN_TEMP: 64,
API_HEAT_SET_POINT: 77,
API_HEAT_MAX_TEMP: 86,
API_HEAT_MIN_TEMP: 50,
API_MAX_TEMP: 90,
API_MIN_TEMP: 64,
API_SET_POINT: 73,
API_ROOM_TEMP: 71,
API_MODES: [4, 2, 3, 5, 7],
API_MODE: 7,
API_SPEEDS: 5,
API_SPEED: 2,
API_COLD_STAGES: 0,
API_COLD_STAGE: 0,
API_HEAT_STAGES: 0,
API_HEAT_STAGE: 0,
API_HUMIDITY: 0,
API_UNITS: 1,
API_ERRORS: [],
API_AIR_DEMAND: 1,
API_FLOOR_DEMAND: 0,
},
]
},
]
}

View File

@@ -0,0 +1,21 @@
"""Test config."""
import asyncio
from unittest.mock import patch
from homeassistant.core import HomeAssistant
from .test_common import get_default_config
async def test_enable_proactive_mode_in_parallel(hass: HomeAssistant) -> None:
"""Test enabling proactive mode does not happen in parallel."""
config = get_default_config(hass)
with patch(
"homeassistant.components.alexa.config.async_enable_proactive_mode"
) as mock_enable_proactive_mode:
await asyncio.gather(
config.async_enable_proactive_mode(), config.async_enable_proactive_mode()
)
mock_enable_proactive_mode.assert_awaited_once()

View File

@@ -39,6 +39,7 @@ def events(hass: HomeAssistant) -> list[Event]:
@pytest.fixture
async def mock_camera(hass: HomeAssistant) -> None:
"""Initialize a demo camera platform."""
assert await async_setup_component(hass, "homeassistant", {})
assert await async_setup_component(
hass, "camera", {camera.DOMAIN: {"platform": "demo"}}
)

View File

@@ -1539,6 +1539,7 @@ async def test_automation_restore_last_triggered_with_initial_state(
async def test_extraction_functions(hass: HomeAssistant) -> None:
"""Test extraction functions."""
await async_setup_component(hass, "homeassistant", {})
await async_setup_component(hass, "calendar", {"calendar": {"platform": "demo"}})
assert await async_setup_component(
hass,

View File

@@ -0,0 +1,11 @@
"""Test fixtures for calendar sensor platforms."""
import pytest
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
@pytest.fixture(autouse=True)
async def setup_homeassistant(hass: HomeAssistant):
"""Set up the homeassistant integration."""
await async_setup_component(hass, "homeassistant", {})

View File

@@ -1,6 +1,8 @@
"""The tests for calendar recorder."""
from datetime import timedelta
import pytest
from homeassistant.components.recorder import Recorder
from homeassistant.components.recorder.history import get_significant_states
from homeassistant.const import ATTR_FRIENDLY_NAME
@@ -12,9 +14,15 @@ from tests.common import async_fire_time_changed
from tests.components.recorder.common import async_wait_recording_done
@pytest.fixture(autouse=True)
async def setup_homeassistant():
"""Override the fixture in calendar.conftest."""
async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant) -> None:
"""Test sensor attributes to be excluded."""
now = dt_util.utcnow()
await async_setup_component(hass, "homeassistant", {})
await async_setup_component(hass, "calendar", {"calendar": {"platform": "demo"}})
await hass.async_block_till_done()

View File

@@ -5,11 +5,18 @@ import pytest
from homeassistant.components import camera
from homeassistant.components.camera.const import StreamType
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from .common import WEBRTC_ANSWER
@pytest.fixture(autouse=True)
async def setup_homeassistant(hass: HomeAssistant):
"""Set up the homeassistant integration."""
await async_setup_component(hass, "homeassistant", {})
@pytest.fixture(name="mock_camera")
async def mock_camera_fixture(hass):
"""Initialize a demo camera platform."""

View File

@@ -370,6 +370,7 @@ async def test_websocket_update_orientation_prefs(
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, mock_camera
) -> None:
"""Test updating camera preferences."""
await async_setup_component(hass, "homeassistant", {})
client = await hass_ws_client(hass)

View File

@@ -3,6 +3,8 @@ from __future__ import annotations
from datetime import timedelta
import pytest
from homeassistant.components import camera
from homeassistant.components.recorder import Recorder
from homeassistant.components.recorder.history import get_significant_states
@@ -20,9 +22,15 @@ from tests.common import async_fire_time_changed
from tests.components.recorder.common import async_wait_recording_done
@pytest.fixture(autouse=True)
async def setup_homeassistant():
"""Override the fixture in calendar.conftest."""
async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant) -> None:
"""Test camera registered attributes to be excluded."""
now = dt_util.utcnow()
await async_setup_component(hass, "homeassistant", {})
await async_setup_component(
hass, camera.DOMAIN, {camera.DOMAIN: {"platform": "demo"}}
)

View File

@@ -1888,6 +1888,7 @@ async def test_failed_cast_other_url(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
) -> None:
"""Test warning when casting from internal_url fails."""
await async_setup_component(hass, "homeassistant", {})
with assert_setup_component(1, tts.DOMAIN):
assert await async_setup_component(
hass,
@@ -1911,6 +1912,7 @@ async def test_failed_cast_internal_url(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
) -> None:
"""Test warning when casting from internal_url fails."""
await async_setup_component(hass, "homeassistant", {})
await async_process_ha_core_config(
hass,
{"internal_url": "http://example.local:8123"},
@@ -1939,6 +1941,7 @@ async def test_failed_cast_external_url(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
) -> None:
"""Test warning when casting from external_url fails."""
await async_setup_component(hass, "homeassistant", {})
await async_process_ha_core_config(
hass,
{"external_url": "http://example.com:8123"},
@@ -1969,6 +1972,7 @@ async def test_failed_cast_tts_base_url(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
) -> None:
"""Test warning when casting from tts.base_url fails."""
await async_setup_component(hass, "homeassistant", {})
with assert_setup_component(1, tts.DOMAIN):
assert await async_setup_component(
hass,

View File

@@ -29,6 +29,7 @@ from tests.components.recorder.common import async_wait_recording_done
async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant) -> None:
"""Test climate registered attributes to be excluded."""
now = dt_util.utcnow()
await async_setup_component(hass, "homeassistant", {})
await async_setup_component(
hass, climate.DOMAIN, {climate.DOMAIN: {"platform": "demo"}}
)

Some files were not shown because too many files have changed in this diff Show More