Compare commits

..

69 Commits

Author SHA1 Message Date
Paulus Schoutsen ced9248051 Bumped version to 2022.12.0b6 2022-12-06 13:22:52 -05:00
Bram Kragten 52ed121970 Update frontend to 20221206.0 (#83415) 2022-12-06 13:22:45 -05:00
Allen Porter a2ba126be1 Tighten validation on calendar create event websocket (#83413) 2022-12-06 13:22:44 -05:00
Martin Hjelmare 1cfd292075 Bypass zwave_js config validation if driver not ready (#83410) 2022-12-06 13:22:43 -05:00
Franck Nijhof 048553cd02 Fix Goalzero time to empty sensor device class (#83389)
* Fix Goalzero time to empty sensor device class

* Fix tests
2022-12-06 13:22:43 -05:00
epenet 3a7efddb4b Fix sensor schema for device classes (#83378) 2022-12-06 13:22:42 -05:00
Raman Gupta 861a8ee3c7 Fix zwave_js_value_updated event (#83358)
* Fix zwave_js_value_updated event

* Fix zwave_js_value_updated event
2022-12-06 13:22:41 -05:00
Marcel van der Veldt 2b0c0cc6d2 Replace fixtures for Matter tests (#83328)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2022-12-06 13:22:40 -05:00
Paulus Schoutsen d14655d891 Bumped version to 2022.12.0b5 2022-12-05 22:43:59 -05:00
Erik Montnemery 1f31e621c8 Fix repairing datetime precision for PostgreSQL (#83351) 2022-12-05 22:43:55 -05:00
puddly afd27792da Bump ZHA dependencies (#83350) 2022-12-05 22:43:55 -05:00
Bram Kragten 7fc3708f76 Update frontend to 20221205.0 (#83349) 2022-12-05 22:43:54 -05:00
Bouwe Westerdijk 351bdff531 Update plugwise to v0.25.14, improve number detection (#83345)
fixes undefined
2022-12-05 22:43:53 -05:00
J. Nick Koston 604c4588cc Bump aiohomekit to 2.4.1 (#83341) 2022-12-05 22:43:52 -05:00
Martin Hjelmare 218046bfa5 Deprecate the Xbox Live integration (#83339) 2022-12-05 22:43:52 -05:00
Michał Ajduk fed08a9d80 Fix Tuya number native value raw return (#83333)
fixes undefined
2022-12-05 22:43:51 -05:00
J. Nick Koston f951808863 Fix bluetooth device connection failure when device is seen by dbus but not bleak (#83281) 2022-12-05 22:43:50 -05:00
Maikel Punie c34f8dc246 Small fix for the velbus clear-cache service (#83279) 2022-12-05 22:43:50 -05:00
Christopher Bailey 8985a3cc06 Update UniFi Protect ring entity to use event entity (#83270) 2022-12-05 22:43:49 -05:00
Garrett b30c98c525 Bump subarulink to v0.7.0 (#83213)
fixes undefined
2022-12-05 22:43:48 -05:00
Stephan Singer 8c02c778d4 Add IPv6 sensor to fritz component (#75708)
* Add IPv6 sensor to fritz component

* Cast return type to string

* Make ipv6 sensor suitable

* simplify cast to str

* use extisting property

Co-authored-by: chemelli74 <simone.chemelli@gmail.com>
Co-authored-by: mib1185 <mail@mib85.de>
2022-12-05 22:43:47 -05:00
Paulus Schoutsen e862caa704 Bumped version to 2022.12.0b4 2022-12-04 21:17:16 -05:00
Joris Pelgröm 8d433aa481 Fix mobile_app returning cloud URL when not subscribed (#83257)
fixes undefined
2022-12-04 21:17:09 -05:00
Allen Porter d89d2d3b33 Bump ical to 4.2.1 (#83254) 2022-12-04 21:17:08 -05:00
jjlawren 8c92f99cda Do not discard zero positions for Sonos media players (#83249)
fixes undefined
2022-12-04 21:17:08 -05:00
J. Nick Koston 17cd7d0a85 Avoid regex overhead in processing esphome bluetooth advertisements (#83246) 2022-12-04 21:17:07 -05:00
Allen Porter 7da31f6ee4 Bump gcal_sync to 4.0.4 (#83245) 2022-12-04 21:17:06 -05:00
Allen Porter 5d8650484b Bump ical to 4.2.0 (#83244) 2022-12-04 21:17:05 -05:00
J. Nick Koston 1aa2820c63 Bump pySwitchbot to 0.22.0 (#83243)
fixes https://github.com/home-assistant/core/issues/82961
2022-12-04 21:17:04 -05:00
Simone Chemelli c1e808cc84 Fix missing title placeholders in Synology DSM reauth flow (#83238)
* Fix missing title placeholders in reauth flow

* fix tests

Co-authored-by: mib1185 <mail@mib85.de>
2022-12-04 21:17:03 -05:00
J. Nick Koston d6b691e3e1 Bump aioesphomeapi to 13.0.1 (#83223)
fixes https://github.com/home-assistant/core/issues/83212
2022-12-04 21:17:02 -05:00
Joe Rogers eb0450ad0c Handle numeric versions in mqtt update (#83218)
* Handle numeric versions in mqtt update

* Remove need for type:ignore

Co-authored-by: Jan Bouwhuis <jbouwh@users.noreply.github.com>

Co-authored-by: Jan Bouwhuis <jbouwh@users.noreply.github.com>
2022-12-04 21:17:01 -05:00
Michael Chisholm d14324c792 Update async-upnp-client to 0.32.3 (#83215) 2022-12-04 21:17:01 -05:00
J. Nick Koston d32c32608b Raise an exception when an esp proxy gets empty services during connection so callers know to retry (#83211) 2022-12-04 21:17:00 -05:00
J. Nick Koston be94e67ecc Restore HomeKit Controller BLE GSN at startup (#83206) 2022-12-04 21:16:59 -05:00
Olen fe170ccd44 Switch Twinkly to awesomeversion (#83205)
Switch to awesomeversion
2022-12-04 21:16:58 -05:00
Aidan Timson f396d66386 Update aiolyric to 1.0.9 (#83190) 2022-12-04 21:16:58 -05:00
Paulus Schoutsen 7f5ae00d58 Bumped version to 2022.12.0b3 2022-12-03 14:56:41 -05:00
Aaron Bach ccd5783daf Fix SimpliSafe service calls that require a device selector (#83200)
fixes undefined
2022-12-03 14:56:36 -05:00
J. Nick Koston f19322b1f3 Bump yalexs-ble to 1.10.2 (#83199) 2022-12-03 14:56:36 -05:00
J. Nick Koston c63bb0e1d5 Bump pySwitchbot to 0.20.8 (#83197)
fixes https://github.com/home-assistant/core/issues/80491
2022-12-03 14:56:35 -05:00
J. Nick Koston 284c226cc0 Bump aiohomekit to 2.3.6 (#83196) 2022-12-03 14:56:34 -05:00
Allen Porter 5c918e9053 Bump ical to 4.1.2 to fix emoji in local calendar (#83193)
Bump ical to 4.1.2
2022-12-03 14:56:33 -05:00
J. Nick Koston 1a2f23f110 Fix missing services with esp32 proxies (#83192) 2022-12-03 14:56:33 -05:00
Joakim Sørensen b015c5ab0c Add CLOSED as an expected type (#83180) 2022-12-03 14:56:32 -05:00
Daniel Hjelseth Høyer 31725df4d5 Update pyTibber to 0.26.3 (#83175)
Update Tibber lib 0.26.3

Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>

Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2022-12-03 14:56:31 -05:00
J. Nick Koston 968a3c4eb2 Bump pySwitchbot to 0.20.7 (#83170)
changelog: https://github.com/Danielhiversen/pySwitchbot/compare/0.20.5...0.20.7
2022-12-03 14:56:30 -05:00
Kevin Stillhammer 75038d420c Raise UpdateFailed when here_travel_time cannot find_location (#83157)
Fixes https://github.com/home-assistant/core/issues/83100
fixes undefined
2022-12-03 14:56:30 -05:00
Olen bb827a60ed Support older twinkly devices without effects (#83145)
fixes undefined
2022-12-03 14:56:29 -05:00
Joakim Sørensen a7993e0640 Bump hass-nabucasa from 0.56.0 to 0.59.0 (#82987)
* Bump hass-nabucasa from 0.56.0 to 0.58.0

* 0.59.0
2022-12-03 14:56:28 -05:00
Matthias Alphart fcb3445b8e KNX Config/OptionsFlow: Test connection to manually configured tunnel (#82872) 2022-12-03 14:56:27 -05:00
Lars 96cb856308 Add integration_type to AVM FRITZ!SmartHome (#81096) 2022-12-03 14:56:27 -05:00
Paulus Schoutsen 627f337e1e Bumped version to 2022.12.0b2 2022-12-02 21:01:38 -05:00
J. Nick Koston 5c8ccc89b1 Bump aiohomekit to 2.3.5 (#83168) 2022-12-02 21:01:31 -05:00
J. Nick Koston 2b4587a7a8 Fix race setting up homekit controller triggers (#83166)
fixes https://github.com/home-assistant/core/issues/83165
2022-12-02 21:01:30 -05:00
J. Nick Koston f5de016f25 Fix reauth with esphome when adding noise encryption (#83164)
* Fix reauth with esphome when adding noise encryption

fixes #80813

* fix with unique id
2022-12-02 21:01:29 -05:00
J. Nick Koston 81c7a24133 Fix logging the wrong bluetooth adapter while connecting and out of slots (#83158) 2022-12-02 21:01:28 -05:00
J. Nick Koston e13413ee09 Bump bluetooth-auto-recovery to 0.5.4 (#83155)
changelog: https://github.com/Bluetooth-Devices/bluetooth-auto-recovery/compare/v0.5.3...v0.5.4
2022-12-02 21:01:28 -05:00
J. Nick Koston 0be9391d79 Fix esphome ble client leaking notify on disconnect (#83106)
* Fix esphome ble client leaking notify on disconnect

needs: https://github.com/esphome/aioesphomeapi/pull/329

* leak

* more cleanup

* more cleanup

* bump
2022-12-02 21:01:27 -05:00
Olen daad93dd31 Fix twinkly effects (#83104)
Adding additional checks
2022-12-02 21:01:26 -05:00
J. Nick Koston f635751020 Bump bleak-retry-connector to 2.8.7 (#83095)
changelog: https://github.com/Bluetooth-Devices/bleak-retry-connector/compare/v2.8.6...v2.8.7
2022-12-02 21:01:25 -05:00
J. Nick Koston 67d03031d5 Bump aiohomekit to 2.3.4 (#83094) 2022-12-02 21:01:25 -05:00
Raman Gupta 83af8434cd Bump zwave-js-server-python to 0.43.1 (#83093) 2022-12-02 21:01:24 -05:00
J. Nick Koston f13f165d64 Fix wrong ble device being logged (#83091)
This code can be simplified a bit now that we
have the fast bleak lookup. We do connect
via the correct path, but we logged the wrong one
2022-12-02 21:01:23 -05:00
Christopher Bailey 1f804e2eed Bump pyunifiprotect to 4.5.2 (#83090) 2022-12-02 21:01:22 -05:00
Aaron Bach 52b3a309f8 Return empty data when OpenUV API call fails (#83089)
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
fixes undefined
2022-12-02 21:01:22 -05:00
Marc Mueller ee467e0f3a Fix prettier [ci] (#83077) 2022-12-02 21:01:21 -05:00
majuss 305dfda9ac Bump lupupy to 0.2.1 (#83071) 2022-12-02 21:01:20 -05:00
Alex Yao e6b0d93c1d Fix html5 Firefox Notifications (#82556)
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
fixes undefined
2022-12-02 21:01:19 -05:00
130 changed files with 14995 additions and 1699 deletions
@@ -2,7 +2,7 @@
"domain": "august",
"name": "August",
"documentation": "https://www.home-assistant.io/integrations/august",
"requirements": ["yalexs==1.2.6", "yalexs_ble==1.10.0"],
"requirements": ["yalexs==1.2.6", "yalexs_ble==1.10.2"],
"codeowners": ["@bdraco"],
"dhcp": [
{
@@ -7,9 +7,9 @@
"quality_scale": "internal",
"requirements": [
"bleak==0.19.2",
"bleak-retry-connector==2.8.6",
"bluetooth-adapters==0.11.0",
"bluetooth-auto-recovery==0.5.3",
"bleak-retry-connector==2.10.1",
"bluetooth-adapters==0.12.0",
"bluetooth-auto-recovery==0.5.4",
"bluetooth-data-tools==0.3.0",
"dbus-fast==1.75.0"
],
@@ -16,6 +16,7 @@ from bleak.backends.bluezdbus.advertisement_monitor import OrPattern
from bleak.backends.bluezdbus.scanner import BlueZScannerArgs
from bleak.backends.device import BLEDevice
from bleak.backends.scanner import AdvertisementData, AdvertisementDataCallback
from bleak_retry_connector import restore_discoveries
from bluetooth_adapters import DEFAULT_ADDRESS
from dbus_fast import InvalidMessageError
@@ -314,6 +315,7 @@ class HaScanner(BaseHaScanner):
self.scanning = True
self._async_setup_scanner_watchdog()
await restore_discoveries(self.scanner, self.adapter)
@hass_callback
def _async_setup_scanner_watchdog(self) -> None:
+23 -12
View File
@@ -12,7 +12,7 @@ from bleak import BleakClient, BleakError
from bleak.backends.client import BaseBleakClient, get_platform_client_backend_type
from bleak.backends.device import BLEDevice
from bleak.backends.scanner import AdvertisementDataCallback, BaseBleakScanner
from bleak_retry_connector import NO_RSSI_VALUE
from bleak_retry_connector import NO_RSSI_VALUE, ble_device_description, clear_cache
from homeassistant.core import CALLBACK_TYPE, callback as hass_callback
from homeassistant.helpers.frame import report
@@ -162,7 +162,6 @@ class HaBleakClientWrapper(BleakClient):
self.__address = address_or_ble_device
self.__disconnected_callback = disconnected_callback
self.__timeout = timeout
self.__ble_device: BLEDevice | None = None
self._backend: BaseBleakClient | None = None # type: ignore[assignment]
@property
@@ -170,6 +169,12 @@ class HaBleakClientWrapper(BleakClient):
"""Return True if the client is connected to a device."""
return self._backend is not None and self._backend.is_connected
async def clear_cache(self) -> bool:
"""Clear the GATT cache."""
if self._backend is not None and hasattr(self._backend, "clear_cache"):
return await self._backend.clear_cache() # type: ignore[no-any-return]
return await clear_cache(self.__address)
def set_disconnected_callback(
self,
callback: Callable[[BleakClient], None] | None,
@@ -183,17 +188,22 @@ class HaBleakClientWrapper(BleakClient):
async def connect(self, **kwargs: Any) -> bool:
"""Connect to the specified GATT server."""
assert models.MANAGER is not None
(
wrapped_backend,
self.__ble_device,
) = self._async_get_best_available_backend_and_device()
wrapped_backend = self._async_get_best_available_backend_and_device()
self._backend = wrapped_backend.client(
self.__ble_device,
wrapped_backend.device,
disconnected_callback=self.__disconnected_callback,
timeout=self.__timeout,
hass=models.MANAGER.hass,
)
return await super().connect(**kwargs)
if debug_logging := _LOGGER.isEnabledFor(logging.DEBUG):
# Only lookup the description if we are going to log it
description = ble_device_description(wrapped_backend.device)
rssi = wrapped_backend.device.rssi
_LOGGER.debug("%s: Connecting (last rssi: %s)", description, rssi)
connected = await super().connect(**kwargs)
if debug_logging:
_LOGGER.debug("%s: Connected (last rssi: %s)", description, rssi)
return connected
@hass_callback
def _async_get_backend_for_ble_device(
@@ -218,7 +228,7 @@ class HaBleakClientWrapper(BleakClient):
@hass_callback
def _async_get_best_available_backend_and_device(
self,
) -> tuple[_HaWrappedBleakBackend, BLEDevice]:
) -> _HaWrappedBleakBackend:
"""Get a best available backend and device for the given address.
This method will return the backend with the best rssi
@@ -235,9 +245,10 @@ class HaBleakClientWrapper(BleakClient):
or NO_RSSI_VALUE,
reverse=True,
):
ble_device = device_advertisement_data[0]
if backend := self._async_get_backend_for_ble_device(ble_device):
return backend, ble_device
if backend := self._async_get_backend_for_ble_device(
device_advertisement_data[0]
):
return backend
raise BleakError(
f"No backend with an available connection slot that can reach address {address} was found"
+59 -8
View File
@@ -1,10 +1,11 @@
"""Support for Google Calendar event device sensors."""
from __future__ import annotations
from collections.abc import Iterable
from collections.abc import Callable, Iterable
import dataclasses
import datetime
from http import HTTPStatus
from itertools import groupby
import logging
import re
from typing import Any, cast, final
@@ -365,17 +366,67 @@ class CalendarListView(http.HomeAssistantView):
return self.json(sorted(calendar_list, key=lambda x: cast(str, x["name"])))
def _has_same_type(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Verify that all values are of the same type."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
"""Test that all keys in the dict have values of the same type."""
uniq_values = groupby(type(obj[k]) for k in keys)
if len(list(uniq_values)) > 1:
raise vol.Invalid(f"Expected all values to be the same type: {keys}")
return obj
return validate
def _has_consistent_timezone(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Verify that all datetime values have a consistent timezone."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
"""Test that all keys that are datetime values have the same timezone."""
values = [obj[k] for k in keys]
if all(isinstance(value, datetime.datetime) for value in values):
uniq_values = groupby(value.tzinfo for value in values)
if len(list(uniq_values)) > 1:
raise vol.Invalid(
f"Expected all values to have the same timezone: {values}"
)
return obj
return validate
def _is_sorted(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Verify that the specified values are sequential."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
"""Test that all keys in the dict are in order."""
values = [obj[k] for k in keys]
if values != sorted(values):
raise vol.Invalid(f"Values were not in order: {values}")
return obj
return validate
@websocket_api.websocket_command(
{
vol.Required("type"): "calendar/event/create",
vol.Required("entity_id"): cv.entity_id,
vol.Required(CONF_EVENT): {
vol.Required(EVENT_START): vol.Any(cv.date, cv.datetime),
vol.Required(EVENT_END): vol.Any(cv.date, cv.datetime),
vol.Required(EVENT_SUMMARY): cv.string,
vol.Optional(EVENT_DESCRIPTION): cv.string,
vol.Optional(EVENT_RRULE): _validate_rrule,
},
CONF_EVENT: vol.Schema(
vol.All(
{
vol.Required(EVENT_START): vol.Any(cv.date, cv.datetime),
vol.Required(EVENT_END): vol.Any(cv.date, cv.datetime),
vol.Required(EVENT_SUMMARY): cv.string,
vol.Optional(EVENT_DESCRIPTION): cv.string,
vol.Optional(EVENT_RRULE): _validate_rrule,
},
_has_same_type(EVENT_START, EVENT_END),
_has_consistent_timezone(EVENT_START, EVENT_END),
_is_sorted(EVENT_START, EVENT_END),
)
),
}
)
@websocket_api.async_response
+20 -18
View File
@@ -36,22 +36,23 @@ from homeassistant.util.aiohttp import MockRequest
from . import account_link, http_api
from .client import CloudClient
from .const import (
CONF_ACCOUNT_LINK_URL,
CONF_ACME_DIRECTORY_SERVER,
CONF_ACCOUNT_LINK_SERVER,
CONF_ACCOUNTS_SERVER,
CONF_ACME_SERVER,
CONF_ALEXA,
CONF_ALEXA_ACCESS_TOKEN_URL,
CONF_ALEXA_SERVER,
CONF_ALIASES,
CONF_CLOUDHOOK_CREATE_URL,
CONF_CLOUDHOOK_SERVER,
CONF_COGNITO_CLIENT_ID,
CONF_ENTITY_CONFIG,
CONF_FILTER,
CONF_GOOGLE_ACTIONS,
CONF_GOOGLE_ACTIONS_REPORT_STATE_URL,
CONF_RELAYER,
CONF_REMOTE_API_URL,
CONF_SUBSCRIPTION_INFO_URL,
CONF_RELAYER_SERVER,
CONF_REMOTE_SNI_SERVER,
CONF_REMOTESTATE_SERVER,
CONF_THINGTALK_SERVER,
CONF_USER_POOL_ID,
CONF_VOICE_API_URL,
CONF_VOICE_SERVER,
DOMAIN,
MODE_DEV,
MODE_PROD,
@@ -107,17 +108,18 @@ CONFIG_SCHEMA = vol.Schema(
vol.Optional(CONF_COGNITO_CLIENT_ID): str,
vol.Optional(CONF_USER_POOL_ID): str,
vol.Optional(CONF_REGION): str,
vol.Optional(CONF_RELAYER): str,
vol.Optional(CONF_SUBSCRIPTION_INFO_URL): vol.Url(),
vol.Optional(CONF_CLOUDHOOK_CREATE_URL): vol.Url(),
vol.Optional(CONF_REMOTE_API_URL): vol.Url(),
vol.Optional(CONF_ACME_DIRECTORY_SERVER): vol.Url(),
vol.Optional(CONF_ALEXA): ALEXA_SCHEMA,
vol.Optional(CONF_GOOGLE_ACTIONS): GACTIONS_SCHEMA,
vol.Optional(CONF_ALEXA_ACCESS_TOKEN_URL): vol.Url(),
vol.Optional(CONF_GOOGLE_ACTIONS_REPORT_STATE_URL): vol.Url(),
vol.Optional(CONF_ACCOUNT_LINK_URL): vol.Url(),
vol.Optional(CONF_VOICE_API_URL): vol.Url(),
vol.Optional(CONF_ACCOUNT_LINK_SERVER): str,
vol.Optional(CONF_ACCOUNTS_SERVER): str,
vol.Optional(CONF_ACME_SERVER): str,
vol.Optional(CONF_ALEXA_SERVER): str,
vol.Optional(CONF_CLOUDHOOK_SERVER): str,
vol.Optional(CONF_RELAYER_SERVER): str,
vol.Optional(CONF_REMOTE_SNI_SERVER): str,
vol.Optional(CONF_REMOTESTATE_SERVER): str,
vol.Optional(CONF_THINGTALK_SERVER): str,
vol.Optional(CONF_VOICE_SERVER): str,
}
)
},
+11 -9
View File
@@ -47,16 +47,18 @@ CONF_COGNITO_CLIENT_ID = "cognito_client_id"
CONF_ENTITY_CONFIG = "entity_config"
CONF_FILTER = "filter"
CONF_GOOGLE_ACTIONS = "google_actions"
CONF_RELAYER = "relayer"
CONF_USER_POOL_ID = "user_pool_id"
CONF_SUBSCRIPTION_INFO_URL = "subscription_info_url"
CONF_CLOUDHOOK_CREATE_URL = "cloudhook_create_url"
CONF_REMOTE_API_URL = "remote_api_url"
CONF_ACME_DIRECTORY_SERVER = "acme_directory_server"
CONF_ALEXA_ACCESS_TOKEN_URL = "alexa_access_token_url"
CONF_GOOGLE_ACTIONS_REPORT_STATE_URL = "google_actions_report_state_url"
CONF_ACCOUNT_LINK_URL = "account_link_url"
CONF_VOICE_API_URL = "voice_api_url"
CONF_ACCOUNT_LINK_SERVER = "account_link_server"
CONF_ACCOUNTS_SERVER = "accounts_server"
CONF_ACME_SERVER = "acme_server"
CONF_ALEXA_SERVER = "alexa_server"
CONF_CLOUDHOOK_SERVER = "cloudhook_server"
CONF_RELAYER_SERVER = "relayer_server"
CONF_REMOTE_SNI_SERVER = "remote_sni_server"
CONF_REMOTESTATE_SERVER = "remotestate_server"
CONF_THINGTALK_SERVER = "thingtalk_server"
CONF_VOICE_SERVER = "voice_server"
MODE_DEV = "development"
MODE_PROD = "production"
+1 -1
View File
@@ -2,7 +2,7 @@
"domain": "cloud",
"name": "Home Assistant Cloud",
"documentation": "https://www.home-assistant.io/integrations/cloud",
"requirements": ["hass-nabucasa==0.56.0"],
"requirements": ["hass-nabucasa==0.59.0"],
"dependencies": ["http", "webhook"],
"after_dependencies": ["google_assistant", "alexa"],
"codeowners": ["@home-assistant/cloud"],
@@ -1,6 +1,5 @@
"""Provide info to system health."""
from hass_nabucasa import Cloud
from yarl import URL
from homeassistant.components import system_health
from homeassistant.core import HomeAssistant, callback
@@ -36,14 +35,14 @@ async def system_health_info(hass):
data["remote_server"] = cloud.remote.snitun_server
data["can_reach_cert_server"] = system_health.async_check_can_reach_url(
hass, cloud.acme_directory_server
hass, f"https://{cloud.acme_server}/directory"
)
data["can_reach_cloud_auth"] = system_health.async_check_can_reach_url(
hass,
f"https://cognito-idp.{cloud.region}.amazonaws.com/{cloud.user_pool_id}/.well-known/jwks.json",
)
data["can_reach_cloud"] = system_health.async_check_can_reach_url(
hass, URL(cloud.relayer).with_scheme("https").with_path("/status")
hass, f"https://{cloud.relayer_server}/status"
)
return data
@@ -3,7 +3,7 @@
"name": "DLNA Digital Media Renderer",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/dlna_dmr",
"requirements": ["async-upnp-client==0.32.2"],
"requirements": ["async-upnp-client==0.32.3"],
"dependencies": ["ssdp"],
"after_dependencies": ["media_source"],
"ssdp": [
@@ -3,7 +3,7 @@
"name": "DLNA Digital Media Server",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/dlna_dms",
"requirements": ["async-upnp-client==0.32.2"],
"requirements": ["async-upnp-client==0.32.3"],
"dependencies": ["ssdp"],
"after_dependencies": ["media_source"],
"ssdp": [
@@ -142,7 +142,9 @@ class ESPHomeClient(BaseBleakClient):
self._is_connected = False
self._mtu: int | None = None
self._cancel_connection_state: CALLBACK_TYPE | None = None
self._notify_cancels: dict[int, Callable[[], Coroutine[Any, Any, None]]] = {}
self._notify_cancels: dict[
int, tuple[Callable[[], Coroutine[Any, Any, None]], Callable[[], None]]
] = {}
self._disconnected_event: asyncio.Event | None = None
device_info = self.entry_data.device_info
assert device_info is not None
@@ -169,15 +171,22 @@ class ESPHomeClient(BaseBleakClient):
)
self._cancel_connection_state = None
def _async_ble_device_disconnected(self) -> None:
"""Handle the BLE device disconnecting from the ESP."""
was_connected = self._is_connected
def _async_disconnected_cleanup(self) -> None:
"""Clean up on disconnect."""
self.services = BleakGATTServiceCollection() # type: ignore[no-untyped-call]
self._is_connected = False
for _, notify_abort in self._notify_cancels.values():
notify_abort()
self._notify_cancels.clear()
if self._disconnected_event:
self._disconnected_event.set()
self._disconnected_event = None
self._unsubscribe_connection_state()
def _async_ble_device_disconnected(self) -> None:
"""Handle the BLE device disconnecting from the ESP."""
was_connected = self._is_connected
self._async_disconnected_cleanup()
if was_connected:
_LOGGER.debug(
"%s: %s - %s: BLE device disconnected",
@@ -186,7 +195,6 @@ class ESPHomeClient(BaseBleakClient):
self._ble_device.address,
)
self._async_call_bleak_disconnected_callback()
self._unsubscribe_connection_state()
def _async_esp_disconnected(self) -> None:
"""Handle the esp32 client disconnecting from hass."""
@@ -309,14 +317,27 @@ class ESPHomeClient(BaseBleakClient):
connected_future.cancel()
raise
await connected_future
await self.get_services(dangerous_use_bleak_cache=dangerous_use_bleak_cache)
try:
await self.get_services(dangerous_use_bleak_cache=dangerous_use_bleak_cache)
except asyncio.CancelledError:
# On cancel we must still raise cancelled error
# to avoid blocking the cancellation even if the
# disconnect call fails.
with contextlib.suppress(Exception):
await self.disconnect()
raise
except Exception:
await self.disconnect()
raise
self._disconnected_event = asyncio.Event()
return True
@api_error_as_bleak_error
async def disconnect(self) -> bool:
"""Disconnect from the peripheral device."""
self._unsubscribe_connection_state()
self._async_disconnected_cleanup()
await self._client.bluetooth_device_disconnect(self._address_as_int)
await self._wait_for_free_connection_slot(DISCONNECT_TIMEOUT)
return True
@@ -419,6 +440,12 @@ class ESPHomeClient(BaseBleakClient):
characteristic.handle,
)
)
if not esphome_services.services:
# If we got no services, we must have disconnected
# or something went wrong on the ESP32's BLE stack.
raise BleakError("Failed to get services from remote esp")
self.services = services
_LOGGER.debug(
"%s: %s - %s: Cached services saved",
@@ -441,6 +468,11 @@ class ESPHomeClient(BaseBleakClient):
raise BleakError(f"Characteristic {char_specifier} was not found!")
return characteristic
async def clear_cache(self) -> None:
"""Clear the GATT cache."""
self.entry_data.clear_gatt_services_cache(self._address_as_int)
self.entry_data.clear_gatt_mtu_cache(self._address_as_int)
@verify_connected
@api_error_as_bleak_error
async def read_gatt_char(
@@ -551,12 +583,13 @@ class ESPHomeClient(BaseBleakClient):
f"Characteristic {characteristic.uuid} does not have notify or indicate property set."
)
cancel_coro = await self._client.bluetooth_gatt_start_notify(
self._notify_cancels[
ble_handle
] = await self._client.bluetooth_gatt_start_notify(
self._address_as_int,
ble_handle,
lambda handle, data: callback(data),
)
self._notify_cancels[ble_handle] = cancel_coro
if self._connection_version < MIN_BLUETOOTH_PROXY_VERSION_HAS_CACHE:
return
@@ -604,8 +637,9 @@ class ESPHomeClient(BaseBleakClient):
characteristic = self._resolve_characteristic(char_specifier)
# Do not raise KeyError if notifications are not enabled on this characteristic
# to be consistent with the behavior of the BlueZ backend
if coro := self._notify_cancels.pop(characteristic.handle, None):
await coro()
if notify_cancel := self._notify_cancels.pop(characteristic.handle, None):
notify_stop, _ = notify_cancel
await notify_stop()
def __del__(self) -> None:
"""Destructor to make sure the connection state is unsubscribed."""
@@ -617,4 +651,4 @@ class ESPHomeClient(BaseBleakClient):
self._ble_device.address,
)
if not self._hass.loop.is_closed():
self._hass.loop.call_soon_threadsafe(self._unsubscribe_connection_state)
self._hass.loop.call_soon_threadsafe(self._async_disconnected_cleanup)
@@ -1,7 +1,6 @@
"""Bluetooth scanner for esphome."""
from __future__ import annotations
import re
from typing import Any
from aioesphomeapi import BluetoothLEAdvertisement
@@ -9,8 +8,6 @@ from aioesphomeapi import BluetoothLEAdvertisement
from homeassistant.components.bluetooth import BaseHaRemoteScanner
from homeassistant.core import callback
TWO_CHAR = re.compile("..")
class ESPHomeScanner(BaseHaRemoteScanner):
"""Scanner for esphome."""
@@ -18,9 +15,10 @@ class ESPHomeScanner(BaseHaRemoteScanner):
@callback
def async_on_advertisement(self, adv: BluetoothLEAdvertisement) -> None:
"""Call the registered callback."""
address = ":".join(TWO_CHAR.findall("%012X" % adv.address)) # must be upper
# The mac address is a uint64, but we need a string
mac_hex = f"{adv.address:012X}"
self._async_on_advertisement(
address,
f"{mac_hex[0:2]}:{mac_hex[2:4]}:{mac_hex[4:6]}:{mac_hex[6:8]}:{mac_hex[8:10]}:{mac_hex[10:12]}",
adv.rssi,
adv.name,
adv.service_uuids,
@@ -17,7 +17,7 @@ from aioesphomeapi import (
import voluptuous as vol
from homeassistant.components import dhcp, zeroconf
from homeassistant.config_entries import ConfigFlow
from homeassistant.config_entries import ConfigEntry, ConfigFlow
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT
from homeassistant.core import callback
from homeassistant.data_entry_flow import FlowResult
@@ -40,6 +40,7 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
self._password: str | None = None
self._noise_psk: str | None = None
self._device_info: DeviceInfo | None = None
self._reauth_entry: ConfigEntry | None = None
async def _async_step_user_base(
self, user_input: dict[str, Any] | None = None, error: str | None = None
@@ -72,6 +73,7 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a flow initialized by a reauth event."""
entry = self.hass.config_entries.async_get_entry(self.context["entry_id"])
assert entry is not None
self._reauth_entry = entry
self._host = entry.data[CONF_HOST]
self._port = entry.data[CONF_PORT]
self._password = entry.data[CONF_PASSWORD]
@@ -245,10 +247,11 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
CONF_PASSWORD: self._password or "",
CONF_NOISE_PSK: self._noise_psk or "",
}
if "entry_id" in self.context:
entry = self.hass.config_entries.async_get_entry(self.context["entry_id"])
assert entry is not None
self.hass.config_entries.async_update_entry(entry, data=config_data)
if self._reauth_entry:
entry = self._reauth_entry
self.hass.config_entries.async_update_entry(
entry, data=self._reauth_entry.data | config_data
)
# Reload the config entry to notify of updated config
self.hass.async_create_task(
self.hass.config_entries.async_reload(entry.entry_id)
@@ -332,7 +335,8 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
self._name = self._device_info.name
await self.async_set_unique_id(self._name, raise_on_progress=False)
self._abort_if_unique_id_configured(updates={CONF_HOST: self._host})
if not self._reauth_entry:
self._abort_if_unique_id_configured(updates={CONF_HOST: self._host})
return None
@@ -119,6 +119,10 @@ class RuntimeEntryData:
"""Set the BleakGATTServiceCollection for the given address."""
self._gatt_services_cache[address] = services
def clear_gatt_services_cache(self, address: int) -> None:
"""Clear the BleakGATTServiceCollection for the given address."""
self._gatt_services_cache.pop(address, None)
def get_gatt_mtu_cache(self, address: int) -> int | None:
"""Get the mtu cache for the given address."""
return self._gatt_mtu_cache.get(address)
@@ -127,6 +131,10 @@ class RuntimeEntryData:
"""Set the mtu cache for the given address."""
self._gatt_mtu_cache[address] = mtu
def clear_gatt_mtu_cache(self, address: int) -> None:
"""Clear the mtu cache for the given address."""
self._gatt_mtu_cache.pop(address, None)
@callback
def async_update_ble_connection_limits(self, free: int, limit: int) -> None:
"""Update the BLE connection limits."""
@@ -3,7 +3,7 @@
"name": "ESPHome",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/esphome",
"requirements": ["aioesphomeapi==12.2.1"],
"requirements": ["aioesphomeapi==13.0.1"],
"zeroconf": ["_esphomelib._tcp.local."],
"dhcp": [{ "registered_devices": true }],
"codeowners": ["@OttoWinter", "@jesserockz"],
+10
View File
@@ -663,6 +663,14 @@ class AvmWrapper(FritzBoxTools):
partial(self.get_wan_link_properties)
)
async def async_ipv6_active(self) -> bool:
"""Check ip an ipv6 is active on the WAn interface."""
def wrap_external_ipv6() -> str:
return str(self.fritz_status.external_ipv6)
return bool(await self.hass.async_add_executor_job(wrap_external_ipv6))
async def async_get_connection_info(self) -> ConnectionInfo:
"""Return ConnectionInfo data."""
@@ -671,6 +679,7 @@ class AvmWrapper(FritzBoxTools):
connection=link_properties.get("NewWANAccessType", "").lower(),
mesh_role=self.mesh_role,
wan_enabled=self.device_is_router,
ipv6_active=await self.async_ipv6_active(),
)
_LOGGER.debug(
"ConnectionInfo for FritzBox %s: %s",
@@ -1011,3 +1020,4 @@ class ConnectionInfo:
connection: str
mesh_role: MeshRoles
wan_enabled: bool
ipv6_active: bool
+12
View File
@@ -66,6 +66,11 @@ def _retrieve_external_ip_state(status: FritzStatus, last_value: str) -> str:
return status.external_ip # type: ignore[no-any-return]
def _retrieve_external_ipv6_state(status: FritzStatus, last_value: str) -> str:
"""Return external ipv6 from device."""
return str(status.external_ipv6)
def _retrieve_kb_s_sent_state(status: FritzStatus, last_value: str) -> float:
"""Return upload transmission rate."""
return round(status.transmission_rate[0] / 1000, 1) # type: ignore[no-any-return]
@@ -155,6 +160,13 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
icon="mdi:earth",
value_fn=_retrieve_external_ip_state,
),
FritzSensorEntityDescription(
key="external_ipv6",
name="External IPv6",
icon="mdi:earth",
value_fn=_retrieve_external_ipv6_state,
is_suitable=lambda info: info.ipv6_active,
),
FritzSensorEntityDescription(
key="device_uptime",
name="Device Uptime",
@@ -1,6 +1,7 @@
{
"domain": "fritzbox",
"name": "AVM FRITZ!SmartHome",
"integration_type": "hub",
"documentation": "https://www.home-assistant.io/integrations/fritzbox",
"requirements": ["pyfritzhome==0.6.7"],
"ssdp": [
@@ -2,7 +2,7 @@
"domain": "frontend",
"name": "Home Assistant Frontend",
"documentation": "https://www.home-assistant.io/integrations/frontend",
"requirements": ["home-assistant-frontend==20221201.1"],
"requirements": ["home-assistant-frontend==20221206.0"],
"dependencies": [
"api",
"auth",
+1 -1
View File
@@ -91,7 +91,7 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="timeToEmptyFull",
name="Time to empty/full",
device_class=TIME_MINUTES,
device_class=SensorDeviceClass.DURATION,
native_unit_of_measurement=TIME_MINUTES,
),
SensorEntityDescription(
@@ -4,7 +4,7 @@
"config_flow": true,
"dependencies": ["application_credentials"],
"documentation": "https://www.home-assistant.io/integrations/calendar.google/",
"requirements": ["gcal-sync==4.0.3", "oauth2client==4.1.3"],
"requirements": ["gcal-sync==4.0.4", "oauth2client==4.1.3"],
"codeowners": ["@allenporter"],
"iot_class": "cloud_polling",
"loggers": ["googleapiclient"]
@@ -14,7 +14,7 @@ from homeassistant.const import ATTR_ATTRIBUTION, UnitOfLength
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.location import find_coordinates
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt
from homeassistant.util.unit_conversion import DistanceConverter
@@ -215,13 +215,15 @@ def prepare_parameters(
def _from_entity_id(entity_id: str) -> list[str]:
coordinates = find_coordinates(hass, entity_id)
if coordinates is None:
raise InvalidCoordinatesException(f"No coordinates found for {entity_id}")
raise UpdateFailed(f"No coordinates found for {entity_id}")
if coordinates is entity_id:
raise UpdateFailed(f"Could not find entity {entity_id}")
try:
formatted_coordinates = coordinates.split(",")
vol.Schema(cv.gps(formatted_coordinates))
except (AttributeError, vol.ExactSequenceInvalid) as ex:
raise InvalidCoordinatesException(
f"{coordinates} are not valid coordinates"
raise UpdateFailed(
f"{entity_id} does not have valid coordinates: {coordinates}"
) from ex
return formatted_coordinates
@@ -275,7 +277,3 @@ def next_datetime(simple_time: time) -> datetime:
if combined < datetime.now():
combined = combined + timedelta(days=1)
return combined
class InvalidCoordinatesException(Exception):
"""Coordinates for origin or destination are malformed."""
@@ -25,7 +25,7 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.device_registry import DeviceEntryType
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.start import async_at_start
from homeassistant.helpers.start import async_at_started
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
@@ -134,7 +134,7 @@ class HERETravelTimeSensor(CoordinatorEntity, RestoreSensor):
async def _update_at_start(_):
await self.async_update()
self.async_on_remove(async_at_start(self.hass, _update_at_start))
self.async_on_remove(async_at_started(self.hass, _update_at_start))
@callback
def _handle_coordinator_update(self) -> None:
@@ -21,7 +21,7 @@ from homeassistant.helpers.typing import ConfigType
from .config_flow import normalize_hkid
from .connection import HKDevice
from .const import KNOWN_DEVICES, TRIGGERS
from .const import KNOWN_DEVICES
from .utils import async_get_controller
_LOGGER = logging.getLogger(__name__)
@@ -59,7 +59,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
await async_get_controller(hass)
hass.data[KNOWN_DEVICES] = {}
hass.data[TRIGGERS] = {}
async def _async_stop_homekit_controller(event: Event) -> None:
await asyncio.gather(
@@ -84,7 +84,7 @@ async def async_setup_entry(
entity.old_unique_id, entity.unique_id, Platform.BUTTON
)
async_add_entities(entities, True)
async_add_entities(entities)
return True
conn.add_char_factory(async_add_characteristic)
@@ -583,6 +583,7 @@ class HomekitControllerFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
accessories_state.config_num,
accessories_state.accessories.serialize(),
serialize_broadcast_key(accessories_state.broadcast_key),
accessories_state.state_num,
)
return self.async_create_entry(title=name, data=pairing_data)
@@ -20,7 +20,7 @@ from aiohomekit.model.services import Service
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_VIA_DEVICE, EVENT_HOMEASSISTANT_STARTED
from homeassistant.core import CALLBACK_TYPE, CoreState, Event, HomeAssistant, callback
from homeassistant.core import CoreState, Event, HomeAssistant, callback
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.dispatcher import async_dispatcher_send
@@ -116,11 +116,6 @@ class HKDevice:
self.pollable_characteristics: list[tuple[int, int]] = []
# If this is set polling is active and can be disabled by calling
# this method.
self._polling_interval_remover: CALLBACK_TYPE | None = None
self._ble_available_interval_remover: CALLBACK_TYPE | None = None
# Never allow concurrent polling of the same accessory or bridge
self._polling_lock = asyncio.Lock()
self._polling_lock_warned = False
@@ -185,8 +180,8 @@ class HKDevice:
self.available = available
async_dispatcher_send(self.hass, self.signal_state_updated)
async def _async_retry_populate_ble_accessory_state(self, event: Event) -> None:
"""Try again to populate the BLE accessory state.
async def _async_populate_ble_accessory_state(self, event: Event) -> None:
"""Populate the BLE accessory state without blocking startup.
If the accessory was asleep at startup we need to retry
since we continued on to allow startup to proceed.
@@ -194,6 +189,7 @@ class HKDevice:
If this fails the state may be inconsistent, but will
get corrected as soon as the accessory advertises again.
"""
self._async_start_polling()
try:
await self.pairing.async_populate_accessories_state(force_update=True)
except STARTUP_EXCEPTIONS as ex:
@@ -221,20 +217,28 @@ class HKDevice:
# so we only poll those chars but that is not possible
# yet.
attempts = None if self.hass.state == CoreState.running else 1
try:
await self.pairing.async_populate_accessories_state(
force_update=True, attempts=attempts
)
except AccessoryNotFoundError:
if transport != Transport.BLE or not pairing.accessories:
# BLE devices may sleep and we can't force a connection
raise
if (
transport == Transport.BLE
and pairing.accessories
and pairing.accessories.has_aid(1)
):
# The GSN gets restored and a catch up poll will be
# triggered via disconnected events automatically
# if we are out of sync. To be sure we are in sync;
# If for some reason the BLE connection failed
# previously we force an update after startup
# is complete.
entry.async_on_unload(
self.hass.bus.async_listen(
EVENT_HOMEASSISTANT_STARTED,
self._async_retry_populate_ble_accessory_state,
self._async_populate_ble_accessory_state,
)
)
else:
await self.pairing.async_populate_accessories_state(
force_update=True, attempts=attempts
)
self._async_start_polling()
entry.async_on_unload(pairing.dispatcher_connect(self.process_new_events))
entry.async_on_unload(
@@ -252,27 +256,34 @@ class HKDevice:
self.async_set_available_state(self.pairing.is_available)
# We use async_request_update to avoid multiple updates
# at the same time which would generate a spurious warning
# in the log about concurrent polling.
self._polling_interval_remover = async_track_time_interval(
self.hass, self.async_request_update, self.pairing.poll_interval
)
if transport == Transport.BLE:
# If we are using BLE, we need to periodically check of the
# BLE device is available since we won't get callbacks
# when it goes away since we HomeKit supports disconnected
# notifications and we cannot treat a disconnect as unavailability.
self._ble_available_interval_remover = async_track_time_interval(
self.hass,
self.async_update_available_state,
timedelta(seconds=BLE_AVAILABILITY_CHECK_INTERVAL),
entry.async_on_unload(
async_track_time_interval(
self.hass,
self.async_update_available_state,
timedelta(seconds=BLE_AVAILABILITY_CHECK_INTERVAL),
)
)
# BLE devices always get an RSSI sensor as well
if "sensor" not in self.platforms:
await self.async_load_platform("sensor")
@callback
def _async_start_polling(self) -> None:
"""Start polling for updates."""
# We use async_request_update to avoid multiple updates
# at the same time which would generate a spurious warning
# in the log about concurrent polling.
self.config_entry.async_on_unload(
async_track_time_interval(
self.hass, self.async_request_update, self.pairing.poll_interval
)
)
async def async_add_new_entities(self) -> None:
"""Add new entities to Home Assistant."""
await self.async_load_platforms()
@@ -529,9 +540,6 @@ class HKDevice:
async def async_unload(self) -> None:
"""Stop interacting with device and prepare for removal from hass."""
if self._polling_interval_remover:
self._polling_interval_remover()
await self.pairing.shutdown()
await self.hass.config_entries.async_unload_platforms(
@@ -224,7 +224,7 @@ async def async_setup_triggers_for_entry(
# They have to be different accessories (they can be on the same bridge)
# In practice, this is inline with what iOS actually supports AFAWCT.
device_id = conn.devices[aid]
if device_id in hass.data[TRIGGERS]:
if TRIGGERS in hass.data and device_id in hass.data[TRIGGERS]:
return False
# Just because we recognize the service type doesn't mean we can actually
@@ -246,15 +246,18 @@ def async_get_or_create_trigger_source(
hass: HomeAssistant, device_id: str
) -> TriggerSource:
"""Get or create a trigger source for a device id."""
if not (source := hass.data[TRIGGERS].get(device_id)):
trigger_sources: dict[str, TriggerSource] = hass.data.setdefault(TRIGGERS, {})
if not (source := trigger_sources.get(device_id)):
source = TriggerSource(hass)
hass.data[TRIGGERS][device_id] = source
trigger_sources[device_id] = source
return source
def async_fire_triggers(conn: HKDevice, events: dict[tuple[int, int], dict[str, Any]]):
"""Process events generated by a HomeKit accessory into automation triggers."""
trigger_sources: dict[str, TriggerSource] = conn.hass.data[TRIGGERS]
trigger_sources: dict[str, TriggerSource] = conn.hass.data.get(TRIGGERS, {})
if not trigger_sources:
return
for (aid, iid), ev in events.items():
if aid in conn.devices:
device_id = conn.devices[aid]
@@ -3,7 +3,7 @@
"name": "HomeKit Controller",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
"requirements": ["aiohomekit==2.3.3"],
"requirements": ["aiohomekit==2.4.1"],
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."],
"bluetooth": [{ "manufacturer_id": 76, "manufacturer_data_start": [6] }],
"dependencies": ["bluetooth", "zeroconf"],
@@ -78,7 +78,7 @@ async def async_setup_entry(
entity.old_unique_id, entity.unique_id, Platform.NUMBER
)
async_add_entities(entities, True)
async_add_entities(entities)
return True
conn.add_char_factory(async_add_characteristic)
@@ -61,11 +61,15 @@ class EntityMapStorage:
config_num: int,
accessories: list[Any],
broadcast_key: str | None = None,
state_num: int | None = None,
) -> Pairing:
"""Create a new pairing cache."""
_LOGGER.debug("Creating or updating entity map for %s", homekit_id)
data = Pairing(
config_num=config_num, accessories=accessories, broadcast_key=broadcast_key
config_num=config_num,
accessories=accessories,
broadcast_key=broadcast_key,
state_num=state_num,
)
self.storage_data[homekit_id] = data
self._async_schedule_save()
+22 -3
View File
@@ -99,6 +99,7 @@ SCHEMA_WS_APPKEY = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
# The number of days after the moment a notification is sent that a JWT
# is valid.
JWT_VALID_DAYS = 7
VAPID_CLAIM_VALID_HOURS = 12
KEYS_SCHEMA = vol.All(
dict,
@@ -514,7 +515,10 @@ class HTML5NotificationService(BaseNotificationService):
webpusher = WebPusher(info[ATTR_SUBSCRIPTION])
if self._vapid_prv and self._vapid_email:
vapid_headers = create_vapid_headers(
self._vapid_email, info[ATTR_SUBSCRIPTION], self._vapid_prv
self._vapid_email,
info[ATTR_SUBSCRIPTION],
self._vapid_prv,
timestamp,
)
vapid_headers.update({"urgency": priority, "priority": priority})
response = webpusher.send(
@@ -540,6 +544,12 @@ class HTML5NotificationService(BaseNotificationService):
_LOGGER.error("Error saving registration")
else:
_LOGGER.info("Configuration saved")
elif response.status_code > 399:
_LOGGER.error(
"There was an issue sending the notification %s: %s",
response.status,
response.text,
)
def add_jwt(timestamp, target, tag, jwt_secret):
@@ -556,14 +566,23 @@ def add_jwt(timestamp, target, tag, jwt_secret):
return jwt.encode(jwt_claims, jwt_secret)
def create_vapid_headers(vapid_email, subscription_info, vapid_private_key):
def create_vapid_headers(vapid_email, subscription_info, vapid_private_key, timestamp):
"""Create encrypted headers to send to WebPusher."""
if vapid_email and vapid_private_key and ATTR_ENDPOINT in subscription_info:
if (
vapid_email
and vapid_private_key
and ATTR_ENDPOINT in subscription_info
and timestamp
):
vapid_exp = datetime.fromtimestamp(timestamp) + timedelta(
hours=VAPID_CLAIM_VALID_HOURS
)
url = urlparse(subscription_info.get(ATTR_ENDPOINT))
vapid_claims = {
"sub": f"mailto:{vapid_email}",
"aud": f"{url.scheme}://{url.netloc}",
"exp": int(vapid_exp.timestamp()),
}
vapid = Vapid.from_string(private_key=vapid_private_key)
return vapid.sign(vapid_claims)
+32 -7
View File
@@ -7,9 +7,10 @@ from typing import Any, Final
import voluptuous as vol
from xknx import XKNX
from xknx.exceptions.exception import InvalidSecureConfiguration
from xknx.exceptions.exception import CommunicationError, InvalidSecureConfiguration
from xknx.io import DEFAULT_MCAST_GRP, DEFAULT_MCAST_PORT
from xknx.io.gateway_scanner import GatewayDescriptor, GatewayScanner
from xknx.io.self_description import request_description
from xknx.secure import load_keyring
from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow
@@ -204,8 +205,11 @@ class KNXCommonFlow(ABC, FlowHandler):
return await self.async_step_manual_tunnel()
errors: dict = {}
tunnel_options = [str(tunnel) for tunnel in self._found_tunnels]
tunnel_options.append(OPTION_MANUAL_TUNNEL)
tunnel_options = {
str(tunnel): f"{tunnel}{' 🔐' if tunnel.tunnelling_requires_secure else ''}"
for tunnel in self._found_tunnels
}
tunnel_options |= {OPTION_MANUAL_TUNNEL: OPTION_MANUAL_TUNNEL}
fields = {vol.Required(CONF_KNX_GATEWAY): vol.In(tunnel_options)}
return self.async_show_form(
@@ -230,17 +234,38 @@ class KNXCommonFlow(ABC, FlowHandler):
except vol.Invalid:
errors[CONF_KNX_LOCAL_IP] = "invalid_ip_address"
selected_tunnelling_type = user_input[CONF_KNX_TUNNELING_TYPE]
if not errors:
try:
self._selected_tunnel = await request_description(
gateway_ip=_host,
gateway_port=user_input[CONF_PORT],
local_ip=_local_ip,
route_back=user_input[CONF_KNX_ROUTE_BACK],
)
except CommunicationError:
errors["base"] = "cannot_connect"
else:
if bool(self._selected_tunnel.tunnelling_requires_secure) is not (
selected_tunnelling_type == CONF_KNX_TUNNELING_TCP_SECURE
):
errors[CONF_KNX_TUNNELING_TYPE] = "unsupported_tunnel_type"
elif (
selected_tunnelling_type == CONF_KNX_TUNNELING_TCP
and not self._selected_tunnel.supports_tunnelling_tcp
):
errors[CONF_KNX_TUNNELING_TYPE] = "unsupported_tunnel_type"
if not errors:
connection_type = user_input[CONF_KNX_TUNNELING_TYPE]
self.new_entry_data = KNXConfigEntryData(
connection_type=selected_tunnelling_type,
host=_host,
port=user_input[CONF_PORT],
route_back=user_input[CONF_KNX_ROUTE_BACK],
local_ip=_local_ip,
connection_type=connection_type,
)
if connection_type == CONF_KNX_TUNNELING_TCP_SECURE:
if selected_tunnelling_type == CONF_KNX_TUNNELING_TCP_SECURE:
return self.async_show_menu(
step_id="secure_key_source",
menu_options=["secure_knxkeys", "secure_routing_manual"],
@@ -299,7 +324,7 @@ class KNXCommonFlow(ABC, FlowHandler):
if self.show_advanced_options:
fields[vol.Optional(CONF_KNX_LOCAL_IP)] = _IP_SELECTOR
if not self._found_tunnels:
if not self._found_tunnels and not errors.get("base"):
errors["base"] = "no_tunnel_discovered"
return self.async_show_form(
step_id="manual_tunnel", data_schema=vol.Schema(fields), errors=errors
+4 -2
View File
@@ -99,7 +99,8 @@
"invalid_signature": "The password to decrypt the `.knxkeys` file is wrong.",
"file_not_found": "The specified `.knxkeys` file was not found in the path config/.storage/knx/",
"no_router_discovered": "No KNXnet/IP router was discovered on the network.",
"no_tunnel_discovered": "Could not find a KNX tunneling server on your network."
"no_tunnel_discovered": "Could not find a KNX tunneling server on your network.",
"unsupported_tunnel_type": "Selected tunnelling type not supported by gateway."
}
},
"options": {
@@ -214,7 +215,8 @@
"invalid_signature": "[%key:component::knx::config::error::invalid_signature%]",
"file_not_found": "[%key:component::knx::config::error::file_not_found%]",
"no_router_discovered": "[%key:component::knx::config::error::no_router_discovered%]",
"no_tunnel_discovered": "[%key:component::knx::config::error::no_tunnel_discovered%]"
"no_tunnel_discovered": "[%key:component::knx::config::error::no_tunnel_discovered%]",
"unsupported_tunnel_type": "[%key:component::knx::config::error::unsupported_tunnel_type%]"
}
}
}
@@ -3,7 +3,7 @@
"name": "Local Calendar",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/local_calendar",
"requirements": ["ical==4.1.1"],
"requirements": ["ical==4.2.1"],
"codeowners": ["@allenporter"],
"iot_class": "local_polling",
"loggers": ["ical"]
@@ -2,7 +2,7 @@
"domain": "lupusec",
"name": "Lupus Electronics LUPUSEC",
"documentation": "https://www.home-assistant.io/integrations/lupusec",
"requirements": ["lupupy==0.1.9"],
"requirements": ["lupupy==0.2.1"],
"codeowners": ["@majuss"],
"iot_class": "local_polling",
"loggers": ["lupupy"]
+1 -1
View File
@@ -4,7 +4,7 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/lyric",
"dependencies": ["application_credentials"],
"requirements": ["aiolyric==1.0.8"],
"requirements": ["aiolyric==1.0.9"],
"codeowners": ["@timmo001"],
"quality_scale": "silver",
"dhcp": [
@@ -91,8 +91,9 @@ class RegistrationsView(HomeAssistantView):
)
remote_ui_url = None
with suppress(hass.components.cloud.CloudNotAvailable):
remote_ui_url = cloud.async_remote_ui_url(hass)
if cloud.async_active_subscription(hass):
with suppress(hass.components.cloud.CloudNotAvailable):
remote_ui_url = cloud.async_remote_ui_url(hass)
return self.json(
{
@@ -693,8 +693,9 @@ async def webhook_get_config(
if CONF_CLOUDHOOK_URL in config_entry.data:
resp[CONF_CLOUDHOOK_URL] = config_entry.data[CONF_CLOUDHOOK_URL]
with suppress(hass.components.cloud.CloudNotAvailable):
resp[CONF_REMOTE_UI_URL] = cloud.async_remote_ui_url(hass)
if cloud.async_active_subscription(hass):
with suppress(hass.components.cloud.CloudNotAvailable):
resp[CONF_REMOTE_UI_URL] = cloud.async_remote_ui_url(hass)
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
+14 -6
View File
@@ -172,14 +172,22 @@ class MqttUpdate(MqttEntity, UpdateEntity, RestoreEntity):
)
return
json_payload = {}
json_payload: Any | dict = {}
try:
json_payload = json_loads(payload)
_LOGGER.debug(
"JSON payload detected after processing payload '%s' on topic %s",
json_payload,
msg.topic,
)
if isinstance(json_payload, dict):
_LOGGER.debug(
"JSON payload detected after processing payload '%s' on topic %s",
json_payload,
msg.topic,
)
else:
_LOGGER.debug(
"Non-dictionary JSON payload detected after processing payload '%s' on topic %s",
payload,
msg.topic,
)
json_payload = {"installed_version": payload}
except JSON_DECODE_EXCEPTIONS:
_LOGGER.debug(
"No valid (JSON) payload detected after processing payload '%s' on topic %s",
@@ -32,10 +32,10 @@ class InvalidApiKeyMonitor:
async def async_increment(self) -> None:
"""Increment the counter."""
LOGGER.debug("Invalid API key response detected (number %s)", self._count)
async with self._lock:
self._count += 1
if self._count > self.DEFAULT_FAILED_API_CALL_THRESHOLD:
LOGGER.info("Starting reauth after multiple failed API calls")
self._reauth_flow_manager.start_reauth()
async def async_reset(self) -> None:
@@ -114,10 +114,11 @@ class OpenUvCoordinator(DataUpdateCoordinator):
"""Fetch data from OpenUV."""
try:
data = await self.update_method()
except InvalidApiKeyError:
except InvalidApiKeyError as err:
await self._invalid_api_key_monitor.async_increment()
raise UpdateFailed(str(err)) from err
except OpenUvError as err:
raise UpdateFailed(f"Error during protection data update: {err}") from err
raise UpdateFailed(str(err)) from err
await self._invalid_api_key_monitor.async_reset()
return cast(dict[str, Any], data["result"])
@@ -2,7 +2,7 @@
"domain": "plugwise",
"name": "Plugwise",
"documentation": "https://www.home-assistant.io/integrations/plugwise",
"requirements": ["plugwise==0.25.12"],
"requirements": ["plugwise==0.25.14"],
"codeowners": ["@CoMPaTech", "@bouwew", "@brefra", "@frenck"],
"zeroconf": ["_plugwise._tcp.local."],
"config_flow": true,
+1 -1
View File
@@ -71,7 +71,7 @@ async def async_setup_entry(
entities: list[PlugwiseNumberEntity] = []
for device_id, device in coordinator.data.devices.items():
for description in NUMBER_TYPES:
if description.key in device:
if description.key in device and "setpoint" in device[description.key]:
entities.append(
PlugwiseNumberEntity(coordinator, device_id, description)
)
@@ -301,6 +301,22 @@ SENSORS: tuple[SensorEntityDescription, ...] = (
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="domestic_hot_water_setpoint",
name="DHW setpoint",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="maximum_boiler_temperature",
name="Maximum boiler temperature",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
)
@@ -161,7 +161,7 @@ def migrate_schema(
"Database is about to correct DB schema errors: %s",
", ".join(sorted(schema_errors)),
)
statistics_correct_db_schema(engine, session_maker, schema_errors)
statistics_correct_db_schema(instance, engine, session_maker, schema_errors)
def _create_index(
@@ -2404,7 +2404,10 @@ def validate_db_schema(
def correct_db_schema(
engine: Engine, session_maker: Callable[[], Session], schema_errors: set[str]
instance: Recorder,
engine: Engine,
session_maker: Callable[[], Session],
schema_errors: set[str],
) -> None:
"""Correct issues detected by validate_db_schema."""
from .migration import _modify_columns # pylint: disable=import-outside-toplevel
@@ -2450,12 +2453,16 @@ def correct_db_schema(
)
if f"{table.__tablename__}.µs precision" in schema_errors:
# Attempt to convert datetime columns to µs precision
if instance.dialect_name == SupportedDialect.MYSQL:
datetime_type = "DATETIME(6)"
else:
datetime_type = "TIMESTAMP(6) WITH TIME ZONE"
_modify_columns(
session_maker,
engine,
table.__tablename__,
[
"last_reset DATETIME(6)",
"start DATETIME(6)",
f"last_reset {datetime_type}",
f"start {datetime_type}",
],
)
@@ -8,7 +8,7 @@
"samsungctl[websocket]==0.7.1",
"samsungtvws[async,encrypted]==2.5.0",
"wakeonlan==2.1.0",
"async-upnp-client==0.32.2"
"async-upnp-client==0.32.3"
],
"ssdp": [
{
@@ -128,16 +128,18 @@ CONDITION_SCHEMA = vol.All(
CONF_IS_GAS,
CONF_IS_HUMIDITY,
CONF_IS_ILLUMINANCE,
CONF_IS_OZONE,
CONF_IS_MOISTURE,
CONF_IS_NITROGEN_DIOXIDE,
CONF_IS_NITROGEN_MONOXIDE,
CONF_IS_NITROUS_OXIDE,
CONF_IS_OZONE,
CONF_IS_POWER,
CONF_IS_POWER_FACTOR,
CONF_IS_PM1,
CONF_IS_PM10,
CONF_IS_PM25,
CONF_IS_PRECIPITATION,
CONF_IS_PRECIPITATION_INTENSITY,
CONF_IS_PRESSURE,
CONF_IS_REACTIVE_POWER,
CONF_IS_SIGNAL_STRENGTH,
@@ -145,6 +147,10 @@ CONDITION_SCHEMA = vol.All(
CONF_IS_TEMPERATURE,
CONF_IS_VOLATILE_ORGANIC_COMPOUNDS,
CONF_IS_VOLTAGE,
CONF_IS_VOLUME,
CONF_IS_WATER,
CONF_IS_WEIGHT,
CONF_IS_WIND_SPEED,
CONF_IS_VALUE,
]
),
@@ -138,6 +138,8 @@ TRIGGER_SCHEMA = vol.All(
CONF_PM25,
CONF_POWER,
CONF_POWER_FACTOR,
CONF_PRECIPITATION,
CONF_PRECIPITATION_INTENSITY,
CONF_PRESSURE,
CONF_REACTIVE_POWER,
CONF_SIGNAL_STRENGTH,
@@ -145,6 +147,10 @@ TRIGGER_SCHEMA = vol.All(
CONF_TEMPERATURE,
CONF_VOLATILE_ORGANIC_COMPOUNDS,
CONF_VOLTAGE,
CONF_VOLUME,
CONF_WATER,
CONF_WEIGHT,
CONF_WIND_SPEED,
CONF_VALUE,
]
),
@@ -237,11 +237,12 @@ def _async_get_system_for_service_call(
) is None:
raise ValueError("No base station registered for alarm control panel")
[system_id] = [
[system_id_str] = [
identity[1]
for identity in base_station_device_entry.identifiers
if identity[0] == DOMAIN
]
system_id = int(system_id_str)
for entry_id in base_station_device_entry.config_entries:
if (simplisafe := hass.data[DOMAIN].get(entry_id)) is None:
@@ -10,7 +10,8 @@ remove_pin:
selector:
device:
integration: simplisafe
model: alarm_control_panel
entity:
domain: alarm_control_panel
label_or_pin:
name: Label/PIN
description: The label/value to remove.
@@ -29,7 +30,8 @@ set_pin:
selector:
device:
integration: simplisafe
model: alarm_control_panel
entity:
domain: alarm_control_panel
label:
name: Label
description: The label of the PIN
@@ -55,7 +57,8 @@ set_system_properties:
selector:
device:
integration: simplisafe
model: alarm_control_panel
entity:
domain: alarm_control_panel
alarm_duration:
name: Alarm duration
description: The length of a triggered alarm
+3 -3
View File
@@ -43,11 +43,11 @@ DURATION_SECONDS = "duration_in_s"
POSITION_SECONDS = "position_in_s"
def _timespan_secs(timespan: str | None) -> None | float:
def _timespan_secs(timespan: str | None) -> None | int:
"""Parse a time-span into number of seconds."""
if timespan in UNAVAILABLE_VALUES:
return None
return time_period_str(timespan).total_seconds() # type: ignore[arg-type]
return int(time_period_str(timespan).total_seconds()) # type: ignore[arg-type]
class SonosMedia:
@@ -73,7 +73,7 @@ class SonosMedia:
self.title: str | None = None
self.uri: str | None = None
self.position: float | None = None
self.position: int | None = None
self.position_updated_at: datetime.datetime | None = None
def clear(self) -> None:
@@ -323,7 +323,7 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
@property
def media_position(self) -> int | None:
"""Position of current playing media in seconds."""
return int(self.media.position) if self.media.position else None
return self.media.position
@property
def media_position_updated_at(self) -> datetime.datetime | None:
+1 -1
View File
@@ -2,7 +2,7 @@
"domain": "ssdp",
"name": "Simple Service Discovery Protocol (SSDP)",
"documentation": "https://www.home-assistant.io/integrations/ssdp",
"requirements": ["async-upnp-client==0.32.2"],
"requirements": ["async-upnp-client==0.32.3"],
"dependencies": ["network"],
"after_dependencies": ["zeroconf"],
"codeowners": [],
@@ -3,7 +3,7 @@
"name": "Subaru",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/subaru",
"requirements": ["subarulink==0.6.1"],
"requirements": ["subarulink==0.7.0"],
"codeowners": ["@G-Two"],
"iot_class": "cloud_polling",
"loggers": ["stdiomask", "subarulink"]
-16
View File
@@ -14,12 +14,10 @@ from homeassistant.components.sensor import (
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ELECTRIC_POTENTIAL_VOLT,
LENGTH_KILOMETERS,
LENGTH_MILES,
PERCENTAGE,
PRESSURE_HPA,
TEMP_CELSIUS,
VOLUME_GALLONS,
VOLUME_LITERS,
)
@@ -117,20 +115,6 @@ API_GEN_2_SENSORS = [
native_unit_of_measurement=PRESSURE_HPA,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key=sc.EXTERNAL_TEMP,
device_class=SensorDeviceClass.TEMPERATURE,
name="External temp",
native_unit_of_measurement=TEMP_CELSIUS,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key=sc.BATTERY_VOLTAGE,
device_class=SensorDeviceClass.VOLTAGE,
name="12V battery voltage",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
state_class=SensorStateClass.MEASUREMENT,
),
]
# Sensors available to "Subaru Safety Plus" subscribers with PHEV vehicles
@@ -2,7 +2,7 @@
"domain": "switchbot",
"name": "SwitchBot",
"documentation": "https://www.home-assistant.io/integrations/switchbot",
"requirements": ["PySwitchbot==0.20.5"],
"requirements": ["PySwitchbot==0.22.0"],
"config_flow": true,
"dependencies": ["bluetooth"],
"codeowners": [
@@ -164,6 +164,7 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN):
use_ssl = user_input.get(CONF_SSL, DEFAULT_USE_SSL)
verify_ssl = user_input.get(CONF_VERIFY_SSL, DEFAULT_VERIFY_SSL)
otp_code = user_input.get(CONF_OTP_CODE)
friendly_name = user_input.get(CONF_NAME)
if not port:
if use_ssl is True:
@@ -229,7 +230,7 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN):
return self.async_abort(reason="reauth_successful")
return self.async_abort(reason="reconfigure_successful")
return self.async_create_entry(title=host, data=config_data)
return self.async_create_entry(title=friendly_name or host, data=config_data)
async def async_step_user(
self, user_input: dict[str, Any] | None = None
@@ -303,6 +304,8 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN):
async def async_step_reauth(self, entry_data: Mapping[str, Any]) -> FlowResult:
"""Perform reauth upon an API authentication error."""
self.reauth_conf = entry_data
self.context["title_placeholders"][CONF_HOST] = entry_data[CONF_HOST]
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
@@ -3,7 +3,7 @@
"domain": "tibber",
"name": "Tibber",
"documentation": "https://www.home-assistant.io/integrations/tibber",
"requirements": ["pyTibber==0.26.1"],
"requirements": ["pyTibber==0.26.3"],
"codeowners": ["@danielhiversen"],
"quality_scale": "silver",
"config_flow": true,
+1 -1
View File
@@ -410,7 +410,7 @@ class TuyaNumberEntity(TuyaEntity, NumberEntity):
return None
# Raw value
if not (value := self.device.status.get(self.entity_description.key)):
if (value := self.device.status.get(self.entity_description.key)) is None:
return None
return self._number.scale_value(value)
@@ -9,6 +9,7 @@ CONF_NAME = "name"
# Strongly named HA attributes keys
ATTR_HOST = "host"
ATTR_VERSION = "version"
# Keys of attributes read from the get_device_info
DEV_ID = "uuid"
@@ -27,3 +28,6 @@ HIDDEN_DEV_VALUES = (
"copyright", # We should not display a copyright "LEDWORKS 2018" in the Home-Assistant UI
"mac", # Does not report the actual device mac address
)
# Minimum version required to support effects
MIN_EFFECT_VERSION = "2.7.1"
+69 -33
View File
@@ -7,6 +7,7 @@ import logging
from typing import Any
from aiohttp import ClientError
from awesomeversion import AwesomeVersion
from ttls.client import Twinkly
from homeassistant.components.light import (
@@ -25,6 +26,7 @@ from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import (
ATTR_VERSION,
CONF_HOST,
CONF_ID,
CONF_NAME,
@@ -37,6 +39,7 @@ from .const import (
DEV_PROFILE_RGBW,
DOMAIN,
HIDDEN_DEV_VALUES,
MIN_EFFECT_VERSION,
)
_LOGGER = logging.getLogger(__name__)
@@ -96,6 +99,9 @@ class TwinklyLight(LightEntity):
self._attributes: dict[Any, Any] = {}
self._current_movie: dict[Any, Any] = {}
self._movies: list[Any] = []
self._software_version = ""
# We guess that most devices are "new" and support effects
self._attr_supported_features = LightEntityFeature.EFFECT
@property
def available(self) -> bool:
@@ -130,13 +136,9 @@ class TwinklyLight(LightEntity):
manufacturer="LEDWORKS",
model=self.model,
name=self.name,
sw_version=self._software_version,
)
@property
def supported_features(self) -> LightEntityFeature:
"""Return supported features."""
return LightEntityFeature.EFFECT
@property
def is_on(self) -> bool:
"""Return true if light is on."""
@@ -165,6 +167,19 @@ class TwinklyLight(LightEntity):
effect_list.append(f"{movie['id']} {movie['name']}")
return effect_list
async def async_added_to_hass(self) -> None:
"""Device is added to hass."""
software_version = await self._client.get_firmware_version()
if ATTR_VERSION in software_version:
self._software_version = software_version[ATTR_VERSION]
if AwesomeVersion(self._software_version) < AwesomeVersion(
MIN_EFFECT_VERSION
):
self._attr_supported_features = (
self.supported_features & ~LightEntityFeature.EFFECT
)
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn device on."""
if ATTR_BRIGHTNESS in kwargs:
@@ -178,36 +193,54 @@ class TwinklyLight(LightEntity):
await self._client.set_brightness(brightness)
if ATTR_RGBW_COLOR in kwargs:
if kwargs[ATTR_RGBW_COLOR] != self._attr_rgbw_color:
self._attr_rgbw_color = kwargs[ATTR_RGBW_COLOR]
if (
ATTR_RGBW_COLOR in kwargs
and kwargs[ATTR_RGBW_COLOR] != self._attr_rgbw_color
):
if isinstance(self._attr_rgbw_color, tuple):
await self._client.interview()
# Static color only supports rgb
await self._client.set_static_colour(
(
self._attr_rgbw_color[0],
self._attr_rgbw_color[1],
self._attr_rgbw_color[2],
)
await self._client.interview()
if LightEntityFeature.EFFECT & self.supported_features:
# Static color only supports rgb
await self._client.set_static_colour(
(
kwargs[ATTR_RGBW_COLOR][0],
kwargs[ATTR_RGBW_COLOR][1],
kwargs[ATTR_RGBW_COLOR][2],
)
await self._client.set_mode("color")
self._client.default_mode = "color"
)
await self._client.set_mode("color")
self._client.default_mode = "color"
else:
await self._client.set_cycle_colours(
(
kwargs[ATTR_RGBW_COLOR][3],
kwargs[ATTR_RGBW_COLOR][0],
kwargs[ATTR_RGBW_COLOR][1],
kwargs[ATTR_RGBW_COLOR][2],
)
)
await self._client.set_mode("movie")
self._client.default_mode = "movie"
self._attr_rgbw_color = kwargs[ATTR_RGBW_COLOR]
if ATTR_RGB_COLOR in kwargs:
if kwargs[ATTR_RGB_COLOR] != self._attr_rgb_color:
self._attr_rgb_color = kwargs[ATTR_RGB_COLOR]
if ATTR_RGB_COLOR in kwargs and kwargs[ATTR_RGB_COLOR] != self._attr_rgb_color:
if isinstance(self._attr_rgb_color, tuple):
await self._client.interview()
if LightEntityFeature.EFFECT & self.supported_features:
await self._client.set_static_colour(kwargs[ATTR_RGB_COLOR])
await self._client.set_mode("color")
self._client.default_mode = "color"
else:
await self._client.set_cycle_colours(kwargs[ATTR_RGB_COLOR])
await self._client.set_mode("movie")
self._client.default_mode = "movie"
await self._client.interview()
await self._client.set_static_colour(self._attr_rgb_color)
await self._client.set_mode("color")
self._client.default_mode = "color"
self._attr_rgb_color = kwargs[ATTR_RGB_COLOR]
if ATTR_EFFECT in kwargs:
if (
ATTR_EFFECT in kwargs
and LightEntityFeature.EFFECT & self.supported_features
):
movie_id = kwargs[ATTR_EFFECT].split(" ")[0]
if "id" not in self._current_movie or int(movie_id) != int(
self._current_movie["id"]
@@ -268,8 +301,9 @@ class TwinklyLight(LightEntity):
if key not in HIDDEN_DEV_VALUES:
self._attributes[key] = value
await self.async_update_movies()
await self.async_update_current_movie()
if LightEntityFeature.EFFECT & self.supported_features:
await self.async_update_movies()
await self.async_update_current_movie()
if not self._is_available:
_LOGGER.info("Twinkly '%s' is now available", self._client.host)
@@ -288,11 +322,13 @@ class TwinklyLight(LightEntity):
async def async_update_movies(self) -> None:
"""Update the list of movies (effects)."""
movies = await self._client.get_saved_movies()
if "movies" in movies:
_LOGGER.debug("Movies: %s", movies)
if movies and "movies" in movies:
self._movies = movies["movies"]
async def async_update_current_movie(self) -> None:
"""Update the current active movie."""
current_movie = await self._client.get_current_movie()
if "id" in current_movie:
_LOGGER.debug("Current movie: %s", current_movie)
if current_movie and "id" in current_movie:
self._current_movie = current_movie
@@ -67,14 +67,6 @@ MOUNT_DEVICE_CLASS_MAP = {
CAMERA_SENSORS: tuple[ProtectBinaryEntityDescription, ...] = (
ProtectBinaryEntityDescription(
key="doorbell",
name="Doorbell",
device_class=BinarySensorDeviceClass.OCCUPANCY,
icon="mdi:doorbell-video",
ufp_required_field="feature_flags.has_chime",
ufp_value="is_ringing",
),
ProtectBinaryEntityDescription(
key="dark",
name="Is Dark",
@@ -339,7 +331,16 @@ SENSE_SENSORS: tuple[ProtectBinaryEntityDescription, ...] = (
),
)
MOTION_SENSORS: tuple[ProtectBinaryEventEntityDescription, ...] = (
EVENT_SENSORS: tuple[ProtectBinaryEventEntityDescription, ...] = (
ProtectBinaryEventEntityDescription(
key="doorbell",
name="Doorbell",
device_class=BinarySensorDeviceClass.OCCUPANCY,
icon="mdi:doorbell-video",
ufp_required_field="feature_flags.has_chime",
ufp_value="is_ringing",
ufp_event_obj="last_ring_event",
),
ProtectBinaryEventEntityDescription(
key="motion",
name="Motion",
@@ -485,7 +486,7 @@ async def async_setup_entry(
ufp_device=device,
)
if device.is_adopted and isinstance(device, Camera):
entities += _async_motion_entities(data, ufp_device=device)
entities += _async_event_entities(data, ufp_device=device)
async_add_entities(entities)
entry.async_on_unload(
@@ -501,14 +502,14 @@ async def async_setup_entry(
lock_descs=DOORLOCK_SENSORS,
viewer_descs=VIEWER_SENSORS,
)
entities += _async_motion_entities(data)
entities += _async_event_entities(data)
entities += _async_nvr_entities(data)
async_add_entities(entities)
@callback
def _async_motion_entities(
def _async_event_entities(
data: ProtectData,
ufp_device: ProtectAdoptableDeviceModel | None = None,
) -> list[ProtectDeviceEntity]:
@@ -517,7 +518,7 @@ def _async_motion_entities(
data.get_by_types({ModelType.CAMERA}) if ufp_device is None else [ufp_device]
)
for device in devices:
for description in MOTION_SENSORS:
for description in EVENT_SENSORS:
if not description.has_required(device):
continue
entities.append(ProtectEventBinarySensor(data, device, description))
@@ -4,7 +4,7 @@
"integration_type": "hub",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/unifiprotect",
"requirements": ["pyunifiprotect==4.5.1", "unifi-discovery==1.1.7"],
"requirements": ["pyunifiprotect==4.5.2", "unifi-discovery==1.1.7"],
"dependencies": ["http", "repairs"],
"codeowners": ["@briis", "@AngellusMortis", "@bdraco"],
"quality_scale": "platinum",
@@ -521,7 +521,7 @@ NVR_DISABLED_SENSORS: tuple[ProtectSensorEntityDescription, ...] = (
),
)
MOTION_SENSORS: tuple[ProtectSensorEventEntityDescription, ...] = (
EVENT_SENSORS: tuple[ProtectSensorEventEntityDescription, ...] = (
ProtectSensorEventEntityDescription(
key="detected_object",
name="Detected Object",
@@ -641,7 +641,7 @@ async def async_setup_entry(
ufp_device=device,
)
if device.is_adopted_by_us and isinstance(device, Camera):
entities += _async_motion_entities(data, ufp_device=device)
entities += _async_event_entities(data, ufp_device=device)
async_add_entities(entities)
entry.async_on_unload(
@@ -659,14 +659,14 @@ async def async_setup_entry(
chime_descs=CHIME_SENSORS,
viewer_descs=VIEWER_SENSORS,
)
entities += _async_motion_entities(data)
entities += _async_event_entities(data)
entities += _async_nvr_entities(data)
async_add_entities(entities)
@callback
def _async_motion_entities(
def _async_event_entities(
data: ProtectData,
ufp_device: Camera | None = None,
) -> list[ProtectDeviceEntity]:
@@ -687,7 +687,7 @@ def _async_motion_entities(
if not device.feature_flags.has_smart_detect:
continue
for event_desc in MOTION_SENSORS:
for event_desc in EVENT_SENSORS:
if not event_desc.has_required(device):
continue
@@ -1,7 +1,7 @@
{
"state": {
"unifiprotect__license_plate": {
"none": "Clear"
}
"state": {
"unifiprotect__license_plate": {
"none": "Clear"
}
}
}
+1 -1
View File
@@ -3,7 +3,7 @@
"name": "UPnP/IGD",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/upnp",
"requirements": ["async-upnp-client==0.32.2", "getmac==0.8.2"],
"requirements": ["async-upnp-client==0.32.3", "getmac==0.8.2"],
"dependencies": ["network", "ssdp"],
"codeowners": ["@StevenLooman"],
"ssdp": [
+1 -1
View File
@@ -139,7 +139,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Handle a clear cache service call."""
# clear the cache
with suppress(FileNotFoundError):
if call.data[CONF_ADDRESS]:
if CONF_ADDRESS in call.data and call.data[CONF_ADDRESS]:
await hass.async_add_executor_job(
os.unlink,
hass.config.path(
@@ -214,7 +214,7 @@ class WebSocketHandler:
disconnect_warn = "Did not receive auth message within 10 seconds"
raise Disconnect from err
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING):
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING):
raise Disconnect
if msg.type != WSMsgType.TEXT:
@@ -238,7 +238,7 @@ class WebSocketHandler:
while not wsock.closed:
msg = await wsock.receive()
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING):
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING):
break
if msg.type != WSMsgType.TEXT:
@@ -13,6 +13,7 @@ from homeassistant.core import HomeAssistant, callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
_LOGGER = logging.getLogger(__name__)
@@ -36,6 +37,19 @@ def setup_platform(
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Xbox platform."""
create_issue(
hass,
"xbox_live",
"pending_removal",
breaks_in_ha_version="2023.2.0",
is_fixable=False,
severity=IssueSeverity.WARNING,
translation_key="pending_removal",
)
_LOGGER.warning(
"The Xbox Live integration is deprecated "
"and will be removed in Home Assistant 2023.2"
)
api = Client(api_key=config[CONF_API_KEY])
entities = []
@@ -0,0 +1,8 @@
{
"issues": {
"pending_removal": {
"title": "The Xbox Live integration is being removed",
"description": "The Xbox Live integration is pending removal from Home Assistant and will no longer be available as of Home Assistant 2023.2.\n\nThe integration is being removed, because it is only useful for the legacy device Xbox 360 and the upstream API now requires a paid subscription. Newer consoles are supported by the Xbox integration for free.\n\nRemove the Xbox Live YAML configuration from your configuration.yaml file and restart Home Assistant to fix this issue."
}
}
}
@@ -0,0 +1,8 @@
{
"issues": {
"pending_removal": {
"description": "The Xbox Live integration is pending removal from Home Assistant and will no longer be available as of Home Assistant 2023.2.\n\nThe integration is being removed, because it is only useful for the legacy device Xbox 360 and the upstream API now requires a paid subscription. Newer consoles are supported by the Xbox integration for free.\n\nRemove the Xbox Live YAML configuration from your configuration.yaml file and restart Home Assistant to fix this issue.",
"title": "The Xbox Live integration is being removed"
}
}
}
@@ -3,7 +3,7 @@
"name": "Yale Access Bluetooth",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/yalexs_ble",
"requirements": ["yalexs-ble==1.10.0"],
"requirements": ["yalexs-ble==1.10.2"],
"dependencies": ["bluetooth"],
"codeowners": ["@bdraco"],
"bluetooth": [
@@ -2,7 +2,7 @@
"domain": "yeelight",
"name": "Yeelight",
"documentation": "https://www.home-assistant.io/integrations/yeelight",
"requirements": ["yeelight==0.7.10", "async-upnp-client==0.32.2"],
"requirements": ["yeelight==0.7.10", "async-upnp-client==0.32.3"],
"codeowners": ["@zewelor", "@shenxn", "@starkillerOG", "@alexyao2015"],
"config_flow": true,
"dependencies": ["network"],
+1 -1
View File
@@ -9,7 +9,7 @@
"pyserial-asyncio==0.6",
"zha-quirks==0.0.87",
"zigpy-deconz==0.19.2",
"zigpy==0.52.2",
"zigpy==0.52.3",
"zigpy-xbee==0.16.2",
"zigpy-zigate==0.10.3",
"zigpy-znp==0.9.2"
@@ -717,7 +717,7 @@ class NodeEvents:
raw_value = value_ = value.value
if value.metadata.states:
value_ = value.metadata.states.get(str(value), value_)
value_ = value.metadata.states.get(str(value_), value_)
self.hass.bus.async_fire(
ZWAVE_JS_VALUE_UPDATED_EVENT,
@@ -4,6 +4,7 @@ from __future__ import annotations
from typing import cast
import voluptuous as vol
from zwave_js_server.client import Client as ZwaveClient
from zwave_js_server.const import ConfigurationValueType
from zwave_js_server.model.node import Node
from zwave_js_server.model.value import ConfigurationValue
@@ -12,7 +13,7 @@ from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import device_registry as dr
from .const import DOMAIN
from .const import DATA_CLIENT, DOMAIN
NODE_STATUSES = ["asleep", "awake", "dead", "alive"]
@@ -66,4 +67,9 @@ def async_bypass_dynamic_config_validation(hass: HomeAssistant, device_id: str)
),
None,
)
return not entry
if not entry:
return True
# The driver may not be ready when the config entry is loaded.
client: ZwaveClient = hass.data[DOMAIN][entry.entry_id][DATA_CLIENT]
return client.driver is None
@@ -3,7 +3,7 @@
"name": "Z-Wave",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/zwave_js",
"requirements": ["pyserial==3.5", "zwave-js-server-python==0.43.0"],
"requirements": ["pyserial==3.5", "zwave-js-server-python==0.43.1"],
"codeowners": ["@home-assistant/z-wave"],
"dependencies": ["usb", "http", "websocket_api"],
"iot_class": "local_push",
@@ -1,11 +1,13 @@
"""Helpers for Z-Wave JS custom triggers."""
from zwave_js_server.client import Client as ZwaveClient
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import ATTR_DEVICE_ID, ATTR_ENTITY_ID
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers.typing import ConfigType
from ..const import ATTR_CONFIG_ENTRY_ID, DOMAIN
from ..const import ATTR_CONFIG_ENTRY_ID, DATA_CLIENT, DOMAIN
@callback
@@ -19,9 +21,8 @@ def async_bypass_dynamic_config_validation(
ent_reg = er.async_get(hass)
trigger_devices = config.get(ATTR_DEVICE_ID, [])
trigger_entities = config.get(ATTR_ENTITY_ID, [])
return any(
entry.state != ConfigEntryState.LOADED
and (
for entry in hass.config_entries.async_entries(DOMAIN):
if entry.state != ConfigEntryState.LOADED and (
entry.entry_id == config.get(ATTR_CONFIG_ENTRY_ID)
or any(
device.id in trigger_devices
@@ -31,6 +32,12 @@ def async_bypass_dynamic_config_validation(
entity.entity_id in trigger_entities
for entity in er.async_entries_for_config_entry(ent_reg, entry.entry_id)
)
)
for entry in hass.config_entries.async_entries(DOMAIN)
)
):
return True
# The driver may not be ready when the config entry is loaded.
client: ZwaveClient = hass.data[DOMAIN][entry.entry_id][DATA_CLIENT]
if client.driver is None:
return True
return False
+1 -1
View File
@@ -8,7 +8,7 @@ from .backports.enum import StrEnum
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2022
MINOR_VERSION: Final = 12
PATCH_VERSION: Final = "0b1"
PATCH_VERSION: Final = "0b6"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
+6 -6
View File
@@ -4,25 +4,25 @@ aiodiscover==1.4.13
aiohttp==3.8.3
aiohttp_cors==0.7.0
astral==2.2
async-upnp-client==0.32.2
async-upnp-client==0.32.3
async_timeout==4.0.2
atomicwrites-homeassistant==1.4.1
attrs==21.2.0
awesomeversion==22.9.0
bcrypt==3.1.7
bleak-retry-connector==2.8.6
bleak-retry-connector==2.10.1
bleak==0.19.2
bluetooth-adapters==0.11.0
bluetooth-auto-recovery==0.5.3
bluetooth-adapters==0.12.0
bluetooth-auto-recovery==0.5.4
bluetooth-data-tools==0.3.0
certifi>=2021.5.30
ciso8601==2.2.0
cryptography==38.0.3
dbus-fast==1.75.0
fnvhash==0.1.0
hass-nabucasa==0.56.0
hass-nabucasa==0.59.0
home-assistant-bluetooth==1.8.1
home-assistant-frontend==20221201.1
home-assistant-frontend==20221206.0
httpx==0.23.1
ifaddr==0.1.7
janus==1.0.0
+1 -1
View File
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2022.12.0b1"
version = "2022.12.0b6"
license = {text = "Apache-2.0"}
description = "Open-source home automation platform running on Python 3."
readme = "README.rst"
+21 -21
View File
@@ -37,7 +37,7 @@ PyRMVtransport==0.3.3
PySocks==1.7.1
# homeassistant.components.switchbot
PySwitchbot==0.20.5
PySwitchbot==0.22.0
# homeassistant.components.transport_nsw
PyTransportNSW==0.1.1
@@ -156,7 +156,7 @@ aioecowitt==2022.11.0
aioemonitor==1.0.5
# homeassistant.components.esphome
aioesphomeapi==12.2.1
aioesphomeapi==13.0.1
# homeassistant.components.flo
aioflo==2021.11.0
@@ -174,7 +174,7 @@ aioguardian==2022.07.0
aioharmony==0.2.9
# homeassistant.components.homekit_controller
aiohomekit==2.3.3
aiohomekit==2.4.1
# homeassistant.components.emulated_hue
# homeassistant.components.http
@@ -208,7 +208,7 @@ aiolivisi==0.0.14
aiolookin==0.1.1
# homeassistant.components.lyric
aiolyric==1.0.8
aiolyric==1.0.9
# homeassistant.components.modern_forms
aiomodernforms==0.1.8
@@ -362,7 +362,7 @@ asterisk_mbox==0.5.0
# homeassistant.components.ssdp
# homeassistant.components.upnp
# homeassistant.components.yeelight
async-upnp-client==0.32.2
async-upnp-client==0.32.3
# homeassistant.components.supla
asyncpysupla==0.0.5
@@ -422,7 +422,7 @@ bimmer_connected==0.10.4
bizkaibus==0.1.1
# homeassistant.components.bluetooth
bleak-retry-connector==2.8.6
bleak-retry-connector==2.10.1
# homeassistant.components.bluetooth
bleak==0.19.2
@@ -447,10 +447,10 @@ bluemaestro-ble==0.2.0
# bluepy==1.3.0
# homeassistant.components.bluetooth
bluetooth-adapters==0.11.0
bluetooth-adapters==0.12.0
# homeassistant.components.bluetooth
bluetooth-auto-recovery==0.5.3
bluetooth-auto-recovery==0.5.4
# homeassistant.components.bluetooth
# homeassistant.components.led_ble
@@ -738,7 +738,7 @@ gTTS==2.2.4
garages-amsterdam==3.0.0
# homeassistant.components.google
gcal-sync==4.0.3
gcal-sync==4.0.4
# homeassistant.components.geniushub
geniushub-client==0.6.30
@@ -848,7 +848,7 @@ ha-philipsjs==2.9.0
habitipy==0.2.0
# homeassistant.components.cloud
hass-nabucasa==0.56.0
hass-nabucasa==0.59.0
# homeassistant.components.splunk
hass_splunk==0.1.1
@@ -884,7 +884,7 @@ hole==0.7.0
holidays==0.17.2
# homeassistant.components.frontend
home-assistant-frontend==20221201.1
home-assistant-frontend==20221206.0
# homeassistant.components.home_connect
homeconnect==0.7.2
@@ -926,7 +926,7 @@ ibm-watson==5.2.2
ibmiotf==0.3.4
# homeassistant.components.local_calendar
ical==4.1.1
ical==4.2.1
# homeassistant.components.ping
icmplib==3.0
@@ -1052,7 +1052,7 @@ london-tube-status==0.5
luftdaten==0.7.4
# homeassistant.components.lupusec
lupupy==0.1.9
lupupy==0.2.1
# homeassistant.components.lw12wifi
lw12==0.9.2
@@ -1335,7 +1335,7 @@ plexauth==0.0.6
plexwebsocket==0.0.13
# homeassistant.components.plugwise
plugwise==0.25.12
plugwise==0.25.14
# homeassistant.components.plum_lightpad
plumlightpad==0.0.11
@@ -1432,7 +1432,7 @@ pyRFXtrx==0.30.0
pySwitchmate==0.5.1
# homeassistant.components.tibber
pyTibber==0.26.1
pyTibber==0.26.3
# homeassistant.components.dlink
pyW215==0.7.0
@@ -2101,7 +2101,7 @@ pytrafikverket==0.2.2
pyudev==0.23.2
# homeassistant.components.unifiprotect
pyunifiprotect==4.5.1
pyunifiprotect==4.5.2
# homeassistant.components.uptimerobot
pyuptimerobot==22.2.0
@@ -2373,7 +2373,7 @@ streamlabswater==1.0.1
stringcase==1.2.0
# homeassistant.components.subaru
subarulink==0.6.1
subarulink==0.7.0
# homeassistant.components.solarlog
sunwatcher==0.2.1
@@ -2606,13 +2606,13 @@ xs1-api-client==3.0.0
yalesmartalarmclient==0.3.9
# homeassistant.components.yalexs_ble
yalexs-ble==1.10.0
yalexs-ble==1.10.2
# homeassistant.components.august
yalexs==1.2.6
# homeassistant.components.august
yalexs_ble==1.10.0
yalexs_ble==1.10.2
# homeassistant.components.yeelight
yeelight==0.7.10
@@ -2660,13 +2660,13 @@ zigpy-zigate==0.10.3
zigpy-znp==0.9.2
# homeassistant.components.zha
zigpy==0.52.2
zigpy==0.52.3
# homeassistant.components.zoneminder
zm-py==0.5.2
# homeassistant.components.zwave_js
zwave-js-server-python==0.43.0
zwave-js-server-python==0.43.1
# homeassistant.components.zwave_me
zwave_me_ws==0.3.0
+20 -20
View File
@@ -33,7 +33,7 @@ PyRMVtransport==0.3.3
PySocks==1.7.1
# homeassistant.components.switchbot
PySwitchbot==0.20.5
PySwitchbot==0.22.0
# homeassistant.components.transport_nsw
PyTransportNSW==0.1.1
@@ -143,7 +143,7 @@ aioecowitt==2022.11.0
aioemonitor==1.0.5
# homeassistant.components.esphome
aioesphomeapi==12.2.1
aioesphomeapi==13.0.1
# homeassistant.components.flo
aioflo==2021.11.0
@@ -158,7 +158,7 @@ aioguardian==2022.07.0
aioharmony==0.2.9
# homeassistant.components.homekit_controller
aiohomekit==2.3.3
aiohomekit==2.4.1
# homeassistant.components.emulated_hue
# homeassistant.components.http
@@ -186,7 +186,7 @@ aiolivisi==0.0.14
aiolookin==0.1.1
# homeassistant.components.lyric
aiolyric==1.0.8
aiolyric==1.0.9
# homeassistant.components.modern_forms
aiomodernforms==0.1.8
@@ -316,7 +316,7 @@ arcam-fmj==1.0.1
# homeassistant.components.ssdp
# homeassistant.components.upnp
# homeassistant.components.yeelight
async-upnp-client==0.32.2
async-upnp-client==0.32.3
# homeassistant.components.sleepiq
asyncsleepiq==1.2.3
@@ -346,7 +346,7 @@ bellows==0.34.5
bimmer_connected==0.10.4
# homeassistant.components.bluetooth
bleak-retry-connector==2.8.6
bleak-retry-connector==2.10.1
# homeassistant.components.bluetooth
bleak==0.19.2
@@ -361,10 +361,10 @@ blinkpy==0.19.2
bluemaestro-ble==0.2.0
# homeassistant.components.bluetooth
bluetooth-adapters==0.11.0
bluetooth-adapters==0.12.0
# homeassistant.components.bluetooth
bluetooth-auto-recovery==0.5.3
bluetooth-auto-recovery==0.5.4
# homeassistant.components.bluetooth
# homeassistant.components.led_ble
@@ -554,7 +554,7 @@ gTTS==2.2.4
garages-amsterdam==3.0.0
# homeassistant.components.google
gcal-sync==4.0.3
gcal-sync==4.0.4
# homeassistant.components.geocaching
geocachingapi==0.2.1
@@ -640,7 +640,7 @@ ha-philipsjs==2.9.0
habitipy==0.2.0
# homeassistant.components.cloud
hass-nabucasa==0.56.0
hass-nabucasa==0.59.0
# homeassistant.components.tasmota
hatasmota==0.6.1
@@ -664,7 +664,7 @@ hole==0.7.0
holidays==0.17.2
# homeassistant.components.frontend
home-assistant-frontend==20221201.1
home-assistant-frontend==20221206.0
# homeassistant.components.home_connect
homeconnect==0.7.2
@@ -691,7 +691,7 @@ iaqualink==0.5.0
ibeacon_ble==1.0.1
# homeassistant.components.local_calendar
ical==4.1.1
ical==4.2.1
# homeassistant.components.ping
icmplib==3.0
@@ -962,7 +962,7 @@ plexauth==0.0.6
plexwebsocket==0.0.13
# homeassistant.components.plugwise
plugwise==0.25.12
plugwise==0.25.14
# homeassistant.components.plum_lightpad
plumlightpad==0.0.11
@@ -1032,7 +1032,7 @@ pyMetno==0.9.0
pyRFXtrx==0.30.0
# homeassistant.components.tibber
pyTibber==0.26.1
pyTibber==0.26.3
# homeassistant.components.nextbus
py_nextbusnext==0.1.5
@@ -1464,7 +1464,7 @@ pytrafikverket==0.2.2
pyudev==0.23.2
# homeassistant.components.unifiprotect
pyunifiprotect==4.5.1
pyunifiprotect==4.5.2
# homeassistant.components.uptimerobot
pyuptimerobot==22.2.0
@@ -1652,7 +1652,7 @@ stookalert==0.1.4
stringcase==1.2.0
# homeassistant.components.subaru
subarulink==0.6.1
subarulink==0.7.0
# homeassistant.components.solarlog
sunwatcher==0.2.1
@@ -1816,13 +1816,13 @@ xmltodict==0.13.0
yalesmartalarmclient==0.3.9
# homeassistant.components.yalexs_ble
yalexs-ble==1.10.0
yalexs-ble==1.10.2
# homeassistant.components.august
yalexs==1.2.6
# homeassistant.components.august
yalexs_ble==1.10.0
yalexs_ble==1.10.2
# homeassistant.components.yeelight
yeelight==0.7.10
@@ -1855,10 +1855,10 @@ zigpy-zigate==0.10.3
zigpy-znp==0.9.2
# homeassistant.components.zha
zigpy==0.52.2
zigpy==0.52.3
# homeassistant.components.zwave_js
zwave-js-server-python==0.43.0
zwave-js-server-python==0.43.1
# homeassistant.components.zwave_me
zwave_me_ws==0.3.0
+4
View File
@@ -219,3 +219,7 @@ class MockBleakClient(BleakClient):
async def get_services(self, *args, **kwargs):
"""Mock get_services."""
return []
async def clear_cache(self, *args, **kwargs):
"""Mock clear_cache."""
return True
+57
View File
@@ -45,6 +45,7 @@ async def test_wrapped_bleak_client_raises_device_missing(hass, enable_bluetooth
await client.connect()
assert client.is_connected is False
await client.disconnect()
assert await client.clear_cache() is False
async def test_wrapped_bleak_client_set_disconnected_callback_before_connected(
@@ -168,6 +169,62 @@ async def test_ble_device_with_proxy_client_out_of_connections(
await client.disconnect()
async def test_ble_device_with_proxy_clear_cache(hass, enable_bluetooth, one_adapter):
"""Test we can clear cache on the proxy."""
manager = _get_manager()
switchbot_proxy_device_with_connection_slot = BLEDevice(
"44:44:33:11:23:45",
"wohand",
{
"connector": HaBluetoothConnector(
MockBleakClient, "mock_bleak_client", lambda: True
),
"path": "/org/bluez/hci0/dev_44_44_33_11_23_45",
},
rssi=-30,
)
switchbot_adv = generate_advertisement_data(
local_name="wohand", service_uuids=[], manufacturer_data={1: b"\x01"}
)
class FakeScanner(BaseHaScanner):
@property
def discovered_devices_and_advertisement_data(
self,
) -> dict[str, tuple[BLEDevice, AdvertisementData]]:
"""Return a list of discovered devices."""
return {
switchbot_proxy_device_with_connection_slot.address: (
switchbot_proxy_device_with_connection_slot,
switchbot_adv,
)
}
async def async_get_device_by_address(self, address: str) -> BLEDevice | None:
"""Return a list of discovered devices."""
if address == switchbot_proxy_device_with_connection_slot.address:
return switchbot_adv
return None
scanner = FakeScanner(hass, "esp32", "esp32")
cancel = manager.async_register_scanner(scanner, True)
inject_advertisement_with_source(
hass, switchbot_proxy_device_with_connection_slot, switchbot_adv, "esp32"
)
assert manager.async_discovered_devices(True) == [
switchbot_proxy_device_with_connection_slot
]
client = HaBleakClientWrapper(switchbot_proxy_device_with_connection_slot)
await client.connect()
assert client.is_connected is True
assert await client.clear_cache() is True
await client.disconnect()
cancel()
async def test_ble_device_with_proxy_client_out_of_connections_uses_best_available(
hass, enable_bluetooth, one_adapter
):
+6 -6
View File
@@ -129,7 +129,7 @@ async def test_alexa_config_report_state(hass, cloud_prefs, cloud_stub):
async def test_alexa_config_invalidate_token(hass, cloud_prefs, aioclient_mock):
"""Test Alexa config should expose using prefs."""
aioclient_mock.post(
"http://example/alexa_token",
"https://example/access_token",
json={
"access_token": "mock-token",
"event_endpoint": "http://example.com/alexa_endpoint",
@@ -142,7 +142,7 @@ async def test_alexa_config_invalidate_token(hass, cloud_prefs, aioclient_mock):
"mock-user-id",
cloud_prefs,
Mock(
alexa_access_token_url="http://example/alexa_token",
alexa_server="example",
auth=Mock(async_check_token=AsyncMock()),
websession=async_get_clientsession(hass),
),
@@ -181,7 +181,7 @@ async def test_alexa_config_fail_refresh_token(
"""Test Alexa config failing to refresh token."""
aioclient_mock.post(
"http://example/alexa_token",
"https://example/access_token",
json={
"access_token": "mock-token",
"event_endpoint": "http://example.com/alexa_endpoint",
@@ -198,7 +198,7 @@ async def test_alexa_config_fail_refresh_token(
"mock-user-id",
cloud_prefs,
Mock(
alexa_access_token_url="http://example/alexa_token",
alexa_server="example",
auth=Mock(async_check_token=AsyncMock()),
websession=async_get_clientsession(hass),
),
@@ -228,7 +228,7 @@ async def test_alexa_config_fail_refresh_token(
conf.async_invalidate_access_token()
aioclient_mock.clear_requests()
aioclient_mock.post(
"http://example/alexa_token",
"https://example/access_token",
json={"reason": reject_reason},
status=400,
)
@@ -254,7 +254,7 @@ async def test_alexa_config_fail_refresh_token(
# State reporting should now be re-enabled for Alexa
aioclient_mock.clear_requests()
aioclient_mock.post(
"http://example/alexa_token",
"https://example/access_token",
json={
"access_token": "mock-token",
"event_endpoint": "http://example.com/alexa_endpoint",
+3 -3
View File
@@ -21,7 +21,7 @@ from . import mock_cloud, mock_cloud_prefs
from tests.components.google_assistant import MockConfig
SUBSCRIPTION_INFO_URL = "https://api-test.hass.io/subscription_info"
SUBSCRIPTION_INFO_URL = "https://api-test.hass.io/payments/subscription_info"
@pytest.fixture(name="mock_cloud_login")
@@ -48,8 +48,8 @@ def setup_api_fixture(hass, aioclient_mock):
"cognito_client_id": "cognito_client_id",
"user_pool_id": "user_pool_id",
"region": "region",
"relayer": "relayer",
"subscription_info_url": SUBSCRIPTION_INFO_URL,
"relayer_server": "relayer",
"accounts_server": "api-test.hass.io",
"google_actions": {"filter": {"include_domains": "light"}},
"alexa": {
"filter": {"include_entities": ["light.kitchen", "switch.ac"]}
+15 -18
View File
@@ -26,13 +26,13 @@ async def test_constructor_loads_info_from_config(hass):
"cognito_client_id": "test-cognito_client_id",
"user_pool_id": "test-user_pool_id",
"region": "test-region",
"relayer": "test-relayer",
"subscription_info_url": "http://test-subscription-info-url",
"cloudhook_create_url": "http://test-cloudhook_create_url",
"remote_api_url": "http://test-remote_api_url",
"alexa_access_token_url": "http://test-alexa-token-url",
"acme_directory_server": "http://test-acme-directory-server",
"google_actions_report_state_url": "http://test-google-actions-report-state-url",
"relayer_server": "test-relayer-server",
"accounts_server": "test-acounts-server",
"cloudhook_server": "test-cloudhook-server",
"remote_sni_server": "test-remote-sni-server",
"alexa_server": "test-alexa-server",
"acme_server": "test-acme-server",
"remotestate_server": "test-remotestate-server",
},
},
)
@@ -43,16 +43,13 @@ async def test_constructor_loads_info_from_config(hass):
assert cl.cognito_client_id == "test-cognito_client_id"
assert cl.user_pool_id == "test-user_pool_id"
assert cl.region == "test-region"
assert cl.relayer == "test-relayer"
assert cl.subscription_info_url == "http://test-subscription-info-url"
assert cl.cloudhook_create_url == "http://test-cloudhook_create_url"
assert cl.remote_api_url == "http://test-remote_api_url"
assert cl.alexa_access_token_url == "http://test-alexa-token-url"
assert cl.acme_directory_server == "http://test-acme-directory-server"
assert (
cl.google_actions_report_state_url
== "http://test-google-actions-report-state-url"
)
assert cl.relayer_server == "test-relayer-server"
assert cl.iot.ws_server_url == "wss://test-relayer-server/websocket"
assert cl.accounts_server == "test-acounts-server"
assert cl.cloudhook_server == "test-cloudhook-server"
assert cl.alexa_server == "test-alexa-server"
assert cl.acme_server == "test-acme-server"
assert cl.remotestate_server == "test-remotestate-server"
async def test_remote_services(hass, mock_cloud_fixture, hass_read_only_user):
@@ -120,7 +117,7 @@ async def test_setup_existing_cloud_user(hass, hass_storage):
"cognito_client_id": "test-cognito_client_id",
"user_pool_id": "test-user_pool_id",
"region": "test-region",
"relayer": "test-relayer",
"relayer_server": "test-relayer-serer",
},
},
)
+3 -3
View File
@@ -13,7 +13,7 @@ from tests.common import get_system_health_info
async def test_cloud_system_health(hass, aioclient_mock):
"""Test cloud system health."""
aioclient_mock.get("https://cloud.bla.com/status", text="")
aioclient_mock.get("https://cert-server", text="")
aioclient_mock.get("https://cert-server/directory", text="")
aioclient_mock.get(
"https://cognito-idp.us-east-1.amazonaws.com/AAAA/.well-known/jwks.json",
exc=ClientError,
@@ -25,8 +25,8 @@ async def test_cloud_system_health(hass, aioclient_mock):
hass.data["cloud"] = Mock(
region="us-east-1",
user_pool_id="AAAA",
relayer="wss://cloud.bla.com/websocket_api",
acme_directory_server="https://cert-server",
relayer_server="cloud.bla.com",
acme_server="cert-server",
is_logged_in=True,
remote=Mock(is_connected=False, snitun_server="us-west-1"),
expiration_date=now,
@@ -559,6 +559,53 @@ async def test_reauth_confirm_invalid(hass, mock_client, mock_zeroconf):
assert result["errors"]
assert result["errors"]["base"] == "invalid_psk"
mock_client.device_info = AsyncMock(return_value=MockDeviceInfo(False, "test"))
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_NOISE_PSK: VALID_NOISE_PSK}
)
assert result["type"] == FlowResultType.ABORT
assert result["reason"] == "reauth_successful"
assert entry.data[CONF_NOISE_PSK] == VALID_NOISE_PSK
async def test_reauth_confirm_invalid_with_unique_id(hass, mock_client, mock_zeroconf):
"""Test reauth initiation with invalid PSK."""
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: "127.0.0.1", CONF_PORT: 6053, CONF_PASSWORD: ""},
unique_id="test",
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
"esphome",
context={
"source": config_entries.SOURCE_REAUTH,
"entry_id": entry.entry_id,
"unique_id": entry.unique_id,
},
)
mock_client.device_info.side_effect = InvalidEncryptionKeyAPIError
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_NOISE_PSK: INVALID_NOISE_PSK}
)
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "reauth_confirm"
assert result["errors"]
assert result["errors"]["base"] == "invalid_psk"
mock_client.device_info = AsyncMock(return_value=MockDeviceInfo(False, "test"))
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_NOISE_PSK: VALID_NOISE_PSK}
)
assert result["type"] == FlowResultType.ABORT
assert result["reason"] == "reauth_successful"
assert entry.data[CONF_NOISE_PSK] == VALID_NOISE_PSK
async def test_discovery_dhcp_updates_host(hass, mock_client):
"""Test dhcp discovery updates host and aborts."""
+1
View File
@@ -138,6 +138,7 @@ MOCK_FB_SERVICES: dict[str, dict] = {
"NewUptime": 35307,
},
"GetExternalIPAddress": {"NewExternalIPAddress": "1.2.3.4"},
"X_AVM_DE_GetExternalIPv6Address": {"NewExternalIPv6Address": "fec0::1"},
},
"WANPPPConnection1": {
"GetInfo": {
+4
View File
@@ -35,6 +35,10 @@ SENSOR_STATES: dict[str, dict[str, Any]] = {
ATTR_STATE: "1.2.3.4",
ATTR_ICON: "mdi:earth",
},
"sensor.mock_title_external_ipv6": {
ATTR_STATE: "fec0::1",
ATTR_ICON: "mdi:earth",
},
"sensor.mock_title_device_uptime": {
# ATTR_STATE: "2022-02-05T17:46:04+00:00",
ATTR_DEVICE_CLASS: SensorDeviceClass.TIMESTAMP,
+1 -1
View File
@@ -77,7 +77,7 @@ async def test_sensors(
assert state.attributes.get(ATTR_STATE_CLASS) is None
state = hass.states.get(f"sensor.{DEFAULT_NAME}_time_to_empty_full")
assert state.state == "-1"
assert state.attributes.get(ATTR_DEVICE_CLASS) == TIME_MINUTES
assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DURATION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TIME_MINUTES
assert state.attributes.get(ATTR_STATE_CLASS) is None
state = hass.states.get(f"sensor.{DEFAULT_NAME}_temperature")
@@ -330,7 +330,7 @@ async def test_destination_entity_not_found(hass: HomeAssistant, caplog):
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
assert "device_tracker.test are not valid coordinates" in caplog.text
assert "Could not find entity device_tracker.test" in caplog.text
@pytest.mark.usefixtures("valid_response")
@@ -356,7 +356,7 @@ async def test_origin_entity_not_found(hass: HomeAssistant, caplog):
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
assert "device_tracker.test are not valid coordinates" in caplog.text
assert "Could not find entity device_tracker.test" in caplog.text
@pytest.mark.usefixtures("valid_response")
@@ -386,7 +386,9 @@ async def test_invalid_destination_entity_state(hass: HomeAssistant, caplog):
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
assert "test_state are not valid coordinates" in caplog.text
assert (
"device_tracker.test does not have valid coordinates: test_state" in caplog.text
)
@pytest.mark.usefixtures("valid_response")
@@ -416,7 +418,9 @@ async def test_invalid_origin_entity_state(hass: HomeAssistant, caplog):
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
assert "test_state are not valid coordinates" in caplog.text
assert (
"device_tracker.test does not have valid coordinates: test_state" in caplog.text
)
async def test_route_not_found(hass: HomeAssistant, caplog):
+37 -52
View File
@@ -93,6 +93,7 @@ class TestHtml5Notify:
def test_dismissing_message(self, mock_wp):
"""Test dismissing message."""
hass = MagicMock()
mock_wp().send().status_code = 201
data = {"device": SUBSCRIPTION_1}
@@ -104,15 +105,13 @@ class TestHtml5Notify:
service.dismiss(target=["device", "non_existing"], data={"tag": "test"})
assert len(mock_wp.mock_calls) == 3
assert len(mock_wp.mock_calls) == 4
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_1["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_1["subscription"]
# Call to send
payload = json.loads(mock_wp.mock_calls[1][1][0])
payload = json.loads(mock_wp.mock_calls[3][1][0])
assert payload["dismiss"] is True
assert payload["tag"] == "test"
@@ -121,6 +120,7 @@ class TestHtml5Notify:
def test_sending_message(self, mock_wp):
"""Test sending message."""
hass = MagicMock()
mock_wp().send().status_code = 201
data = {"device": SUBSCRIPTION_1}
@@ -134,15 +134,13 @@ class TestHtml5Notify:
"Hello", target=["device", "non_existing"], data={"icon": "beer.png"}
)
assert len(mock_wp.mock_calls) == 3
assert len(mock_wp.mock_calls) == 4
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_1["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_1["subscription"]
# Call to send
payload = json.loads(mock_wp.mock_calls[1][1][0])
payload = json.loads(mock_wp.mock_calls[3][1][0])
assert payload["body"] == "Hello"
assert payload["icon"] == "beer.png"
@@ -151,6 +149,7 @@ class TestHtml5Notify:
def test_gcm_key_include(self, mock_wp):
"""Test if the gcm_key is only included for GCM endpoints."""
hass = MagicMock()
mock_wp().send().status_code = 201
data = {"chrome": SUBSCRIPTION_1, "firefox": SUBSCRIPTION_2}
@@ -167,21 +166,18 @@ class TestHtml5Notify:
assert len(mock_wp.mock_calls) == 6
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_1["subscription"]
assert mock_wp.mock_calls[3][1][0] == SUBSCRIPTION_2["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
assert mock_wp.mock_calls[5][0] == "().send().status_code.__eq__"
assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_1["subscription"]
assert mock_wp.mock_calls[4][1][0] == SUBSCRIPTION_2["subscription"]
# Get the keys passed to the WebPusher's send method
assert mock_wp.mock_calls[1][2]["gcm_key"] is not None
assert mock_wp.mock_calls[4][2]["gcm_key"] is None
assert mock_wp.mock_calls[3][2]["gcm_key"] is not None
assert mock_wp.mock_calls[5][2]["gcm_key"] is None
@patch("homeassistant.components.html5.notify.WebPusher")
def test_fcm_key_include(self, mock_wp):
"""Test if the FCM header is included."""
hass = MagicMock()
mock_wp().send().status_code = 201
data = {"chrome": SUBSCRIPTION_5}
@@ -193,20 +189,18 @@ class TestHtml5Notify:
service.send_message("Hello", target=["chrome"])
assert len(mock_wp.mock_calls) == 3
assert len(mock_wp.mock_calls) == 4
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_5["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"]
# Get the keys passed to the WebPusher's send method
assert mock_wp.mock_calls[1][2]["headers"]["Authorization"] is not None
assert mock_wp.mock_calls[3][2]["headers"]["Authorization"] is not None
@patch("homeassistant.components.html5.notify.WebPusher")
def test_fcm_send_with_unknown_priority(self, mock_wp):
"""Test if the gcm_key is only included for GCM endpoints."""
hass = MagicMock()
mock_wp().send().status_code = 201
data = {"chrome": SUBSCRIPTION_5}
@@ -218,20 +212,18 @@ class TestHtml5Notify:
service.send_message("Hello", target=["chrome"], priority="undefined")
assert len(mock_wp.mock_calls) == 3
assert len(mock_wp.mock_calls) == 4
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_5["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"]
# Get the keys passed to the WebPusher's send method
assert mock_wp.mock_calls[1][2]["headers"]["priority"] == "normal"
assert mock_wp.mock_calls[3][2]["headers"]["priority"] == "normal"
@patch("homeassistant.components.html5.notify.WebPusher")
def test_fcm_no_targets(self, mock_wp):
"""Test if the gcm_key is only included for GCM endpoints."""
hass = MagicMock()
mock_wp().send().status_code = 201
data = {"chrome": SUBSCRIPTION_5}
@@ -243,20 +235,18 @@ class TestHtml5Notify:
service.send_message("Hello")
assert len(mock_wp.mock_calls) == 3
assert len(mock_wp.mock_calls) == 4
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_5["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"]
# Get the keys passed to the WebPusher's send method
assert mock_wp.mock_calls[1][2]["headers"]["priority"] == "normal"
assert mock_wp.mock_calls[3][2]["headers"]["priority"] == "normal"
@patch("homeassistant.components.html5.notify.WebPusher")
def test_fcm_additional_data(self, mock_wp):
"""Test if the gcm_key is only included for GCM endpoints."""
hass = MagicMock()
mock_wp().send().status_code = 201
data = {"chrome": SUBSCRIPTION_5}
@@ -268,21 +258,18 @@ class TestHtml5Notify:
service.send_message("Hello", data={"mykey": "myvalue"})
assert len(mock_wp.mock_calls) == 3
assert len(mock_wp.mock_calls) == 4
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_5["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"]
# Get the keys passed to the WebPusher's send method
assert mock_wp.mock_calls[1][2]["headers"]["priority"] == "normal"
assert mock_wp.mock_calls[3][2]["headers"]["priority"] == "normal"
def test_create_vapid_withoutvapid():
"""Test creating empty vapid."""
resp = html5.create_vapid_headers(
vapid_email=None, vapid_private_key=None, subscription_info=None
vapid_email=None, vapid_private_key=None, subscription_info=None, timestamp=None
)
assert resp is None
@@ -478,6 +465,7 @@ async def test_callback_view_with_jwt(hass, hass_client):
client = await mock_client(hass, hass_client, registrations)
with patch("homeassistant.components.html5.notify.WebPusher") as mock_wp:
mock_wp().send().status_code = 201
await hass.services.async_call(
"notify",
"notify",
@@ -485,15 +473,13 @@ async def test_callback_view_with_jwt(hass, hass_client):
blocking=True,
)
assert len(mock_wp.mock_calls) == 3
assert len(mock_wp.mock_calls) == 4
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_1["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_1["subscription"]
# Call to send
push_payload = json.loads(mock_wp.mock_calls[1][1][0])
push_payload = json.loads(mock_wp.mock_calls[3][1][0])
assert push_payload["body"] == "Hello"
assert push_payload["icon"] == "beer.png"
@@ -514,6 +500,7 @@ async def test_send_fcm_without_targets(hass, hass_client):
registrations = {"device": SUBSCRIPTION_5}
await mock_client(hass, hass_client, registrations)
with patch("homeassistant.components.html5.notify.WebPusher") as mock_wp:
mock_wp().send().status_code = 201
await hass.services.async_call(
"notify",
"notify",
@@ -521,12 +508,10 @@ async def test_send_fcm_without_targets(hass, hass_client):
blocking=True,
)
assert len(mock_wp.mock_calls) == 3
assert len(mock_wp.mock_calls) == 4
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_5["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"]
async def test_send_fcm_expired(hass, hass_client):
+172 -9
View File
@@ -1,8 +1,8 @@
"""Test the KNX config flow."""
from unittest.mock import patch
from unittest.mock import Mock, patch
import pytest
from xknx.exceptions.exception import InvalidSecureConfiguration
from xknx.exceptions.exception import CommunicationError, InvalidSecureConfiguration
from xknx.io import DEFAULT_MCAST_GRP, DEFAULT_MCAST_PORT
from xknx.io.gateway_scanner import GatewayDescriptor
@@ -441,7 +441,11 @@ async def test_routing_secure_keyfile(
return_value=GatewayScannerMock(),
)
async def test_tunneling_setup_manual(
gateway_scanner_mock, hass: HomeAssistant, knx_setup, user_input, config_entry_data
_gateway_scanner_mock,
hass: HomeAssistant,
knx_setup,
user_input,
config_entry_data,
) -> None:
"""Test tunneling if no gateway was found found (or `manual` option was chosen)."""
result = await hass.config_entries.flow.async_init(
@@ -460,11 +464,21 @@ async def test_tunneling_setup_manual(
assert result2["step_id"] == "manual_tunnel"
assert result2["errors"] == {"base": "no_tunnel_discovered"}
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"],
user_input,
)
await hass.async_block_till_done()
with patch(
"homeassistant.components.knx.config_flow.request_description",
return_value=_gateway_descriptor(
user_input[CONF_HOST],
user_input[CONF_PORT],
supports_tunnelling_tcp=(
user_input[CONF_KNX_TUNNELING_TYPE] == CONF_KNX_TUNNELING_TCP
),
),
):
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"],
user_input,
)
await hass.async_block_till_done()
assert result3["type"] == FlowResultType.CREATE_ENTRY
assert result3["title"] == "Tunneling @ 192.168.0.1"
assert result3["data"] == config_entry_data
@@ -475,8 +489,146 @@ async def test_tunneling_setup_manual(
"homeassistant.components.knx.config_flow.GatewayScanner",
return_value=GatewayScannerMock(),
)
async def test_tunneling_setup_manual_request_description_error(
_gateway_scanner_mock,
hass: HomeAssistant,
knx_setup,
) -> None:
"""Test tunneling if no gateway was found found (or `manual` option was chosen)."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_KNX_CONNECTION_TYPE: CONF_KNX_TUNNELING,
},
)
assert result["step_id"] == "manual_tunnel"
assert result["errors"] == {"base": "no_tunnel_discovered"}
# TCP configured but not supported by gateway
with patch(
"homeassistant.components.knx.config_flow.request_description",
return_value=_gateway_descriptor(
"192.168.0.1",
3671,
supports_tunnelling_tcp=False,
),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_KNX_TUNNELING_TYPE: CONF_KNX_TUNNELING_TCP,
CONF_HOST: "192.168.0.1",
CONF_PORT: 3671,
},
)
assert result["step_id"] == "manual_tunnel"
assert result["errors"] == {
"base": "no_tunnel_discovered",
"tunneling_type": "unsupported_tunnel_type",
}
# TCP configured but Secure required by gateway
with patch(
"homeassistant.components.knx.config_flow.request_description",
return_value=_gateway_descriptor(
"192.168.0.1",
3671,
supports_tunnelling_tcp=True,
requires_secure=True,
),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_KNX_TUNNELING_TYPE: CONF_KNX_TUNNELING_TCP,
CONF_HOST: "192.168.0.1",
CONF_PORT: 3671,
},
)
assert result["step_id"] == "manual_tunnel"
assert result["errors"] == {
"base": "no_tunnel_discovered",
"tunneling_type": "unsupported_tunnel_type",
}
# Secure configured but not enabled on gateway
with patch(
"homeassistant.components.knx.config_flow.request_description",
return_value=_gateway_descriptor(
"192.168.0.1",
3671,
supports_tunnelling_tcp=True,
requires_secure=False,
),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_KNX_TUNNELING_TYPE: CONF_KNX_TUNNELING_TCP_SECURE,
CONF_HOST: "192.168.0.1",
CONF_PORT: 3671,
},
)
assert result["step_id"] == "manual_tunnel"
assert result["errors"] == {
"base": "no_tunnel_discovered",
"tunneling_type": "unsupported_tunnel_type",
}
# No connection to gateway
with patch(
"homeassistant.components.knx.config_flow.request_description",
side_effect=CommunicationError(""),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_KNX_TUNNELING_TYPE: CONF_KNX_TUNNELING_TCP,
CONF_HOST: "192.168.0.1",
CONF_PORT: 3671,
},
)
assert result["step_id"] == "manual_tunnel"
assert result["errors"] == {"base": "cannot_connect"}
# OK configuration
with patch(
"homeassistant.components.knx.config_flow.request_description",
return_value=_gateway_descriptor(
"192.168.0.1",
3671,
supports_tunnelling_tcp=True,
),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_KNX_TUNNELING_TYPE: CONF_KNX_TUNNELING_TCP,
CONF_HOST: "192.168.0.1",
CONF_PORT: 3671,
},
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.CREATE_ENTRY
assert result["title"] == "Tunneling @ 192.168.0.1"
assert result["data"] == {
**DEFAULT_ENTRY_DATA,
CONF_KNX_CONNECTION_TYPE: CONF_KNX_TUNNELING_TCP,
CONF_HOST: "192.168.0.1",
CONF_PORT: 3671,
}
knx_setup.assert_called_once()
@patch(
"homeassistant.components.knx.config_flow.GatewayScanner",
return_value=GatewayScannerMock(),
)
@patch(
"homeassistant.components.knx.config_flow.request_description",
return_value=_gateway_descriptor("192.168.0.2", 3675),
)
async def test_tunneling_setup_for_local_ip(
gateway_scanner_mock, hass: HomeAssistant, knx_setup
_request_description_mock, _gateway_scanner_mock, hass: HomeAssistant, knx_setup
) -> None:
"""Test tunneling if only one gateway is found."""
result = await hass.config_entries.flow.async_init(
@@ -715,7 +867,17 @@ async def _get_menu_step(hass: HomeAssistant) -> FlowResult:
return result3
@patch(
"homeassistant.components.knx.config_flow.request_description",
return_value=_gateway_descriptor(
"192.168.0.1",
3675,
supports_tunnelling_tcp=True,
requires_secure=True,
),
)
async def test_get_secure_menu_step_manual_tunnelling(
_request_description_mock,
hass: HomeAssistant,
):
"""Test flow reaches secure_tunnellinn menu step from manual tunnelling configuration."""
@@ -908,6 +1070,7 @@ async def test_options_flow_connection_type(
gateway = _gateway_descriptor("192.168.0.1", 3675)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
hass.data[DOMAIN] = Mock() # GatewayScanner uses running XKNX() in options flow
menu_step = await hass.config_entries.options.async_init(mock_config_entry.entry_id)
with patch(
@@ -612,3 +612,96 @@ async def test_all_day_iter_order(
events = await get_events("2022-10-06T00:00:00Z", "2022-10-09T00:00:00Z")
assert [event["summary"] for event in events] == event_order
async def test_start_end_types(
ws_client: ClientFixture,
setup_integration: None,
):
"""Test a start and end with different date and date time types."""
client = await ws_client()
result = await client.cmd(
"create",
{
"entity_id": TEST_ENTITY,
"event": {
"summary": "Bastille Day Party",
"dtstart": "1997-07-15",
"dtend": "1997-07-14T17:00:00+00:00",
},
},
)
assert not result.get("success")
assert "error" in result
assert "code" in result.get("error")
assert result["error"]["code"] == "invalid_format"
async def test_end_before_start(
ws_client: ClientFixture,
setup_integration: None,
):
"""Test an event with a start/end date time."""
client = await ws_client()
result = await client.cmd(
"create",
{
"entity_id": TEST_ENTITY,
"event": {
"summary": "Bastille Day Party",
"dtstart": "1997-07-15T04:00:00+00:00",
"dtend": "1997-07-14T17:00:00+00:00",
},
},
)
assert not result.get("success")
assert "error" in result
assert "code" in result.get("error")
assert result["error"]["code"] == "invalid_format"
async def test_invalid_recurrence_rule(
ws_client: ClientFixture,
setup_integration: None,
):
"""Test an event with a recurrence rule."""
client = await ws_client()
result = await client.cmd(
"create",
{
"entity_id": TEST_ENTITY,
"event": {
"summary": "Monday meeting",
"dtstart": "2022-08-29T09:00:00",
"dtend": "2022-08-29T10:00:00",
"rrule": "FREQ=invalid;'",
},
},
)
assert not result.get("success")
assert "error" in result
assert "code" in result.get("error")
assert result["error"]["code"] == "invalid_format"
async def test_invalid_date_formats(
ws_client: ClientFixture, setup_integration: None, get_events: GetEventsFn
):
"""Exercises a validation error within rfc5545 parsing in ical."""
client = await ws_client()
result = await client.cmd(
"create",
{
"entity_id": TEST_ENTITY,
"event": {
"summary": "Bastille Day Party",
# Can't mix offset aware and floating dates
"dtstart": "1997-07-15T04:00:00+08:00",
"dtend": "1997-07-14T17:00:00",
},
},
)
assert not result.get("success")
assert "error" in result
assert "code" in result.get("error")
assert result["error"]["code"] == "invalid_format"
+39 -111
View File
@@ -1,100 +1,19 @@
"""Provide common test tools."""
from __future__ import annotations
import asyncio
from functools import cache
import json
import logging
from typing import TYPE_CHECKING, Any
from unittest.mock import Mock, patch
from typing import Any
from unittest.mock import MagicMock
from matter_server.client import MatterClient
from matter_server.common.helpers.util import dataclass_from_dict
from matter_server.common.models.events import EventType
from matter_server.common.models.node import MatterNode
from matter_server.common.models.server_information import ServerInfo
import pytest
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry, load_fixture
if TYPE_CHECKING:
from homeassistant.core import HomeAssistant
MOCK_FABRIC_ID = 12341234
MOCK_COMPR_FABRIC_ID = 1234
# TEMP: Tests need to be fixed
pytestmark = pytest.mark.skip("all tests still WIP")
class MockClient(MatterClient):
"""Represent a mock Matter client."""
mock_client_disconnect: asyncio.Event
mock_commands: dict[type, Any] = {}
mock_sent_commands: list[dict[str, Any]] = []
def __init__(self) -> None:
"""Initialize the mock client."""
super().__init__("mock-url", None)
self.mock_commands: dict[type, Any] = {}
self.mock_sent_commands = []
self.server_info = ServerInfo(
fabric_id=MOCK_FABRIC_ID, compressed_fabric_id=MOCK_COMPR_FABRIC_ID
)
async def connect(self) -> None:
"""Connect to the Matter server."""
self.server_info = Mock(compressed_abric_d=MOCK_COMPR_FABRIC_ID)
async def listen(self, driver_ready: asyncio.Event) -> None:
"""Listen for events."""
driver_ready.set()
self.mock_client_disconnect = asyncio.Event()
await self.mock_client_disconnect.wait()
def mock_command(self, command_type: type, response: Any) -> None:
"""Mock a command."""
self.mock_commands[command_type] = response
async def async_send_command(
self,
command: str,
args: dict[str, Any],
require_schema: int | None = None,
) -> dict:
"""Send mock commands."""
if command == "device_controller.SendCommand" and (
(cmd_type := type(args.get("payload"))) in self.mock_commands
):
self.mock_sent_commands.append(args)
return self.mock_commands[cmd_type]
return await super().async_send_command(command, args, require_schema)
async def async_send_command_no_wait(
self, command: str, args: dict[str, Any], require_schema: int | None = None
) -> None:
"""Send a command without waiting for the response."""
if command == "SendCommand" and (
(cmd_type := type(args.get("payload"))) in self.mock_commands
):
self.mock_sent_commands.append(args)
return self.mock_commands[cmd_type]
return await super().async_send_command_no_wait(command, args, require_schema)
@pytest.fixture
async def mock_matter() -> Mock:
"""Mock matter fixture."""
return await get_mock_matter()
async def get_mock_matter() -> Mock:
"""Get mock Matter."""
return Mock(
adapter=Mock(logger=logging.getLogger("mock_matter")), client=MockClient()
)
@cache
def load_node_fixture(fixture: str) -> str:
@@ -108,39 +27,48 @@ def load_and_parse_node_fixture(fixture: str) -> dict[str, Any]:
async def setup_integration_with_node_fixture(
hass: HomeAssistant, hass_storage: dict[str, Any], node_fixture: str
hass: HomeAssistant,
node_fixture: str,
client: MagicMock,
) -> MatterNode:
"""Set up Matter integration with fixture as node."""
node_data = load_and_parse_node_fixture(node_fixture)
node = MatterNode(
await get_mock_matter(),
node = dataclass_from_dict(
MatterNode,
node_data,
)
client.get_nodes.return_value = [node]
client.get_node.return_value = node
config_entry = MockConfigEntry(
domain="matter", data={"url": "http://mock-matter-server-url"}
)
config_entry.add_to_hass(hass)
storage_key = f"matter_{config_entry.entry_id}"
hass_storage[storage_key] = {
"version": 1,
"minor_version": 0,
"key": storage_key,
"data": {
"compressed_fabric_id": MOCK_COMPR_FABRIC_ID,
"next_node_id": 4339,
"nodes": {str(node.node_id): node_data},
},
}
with patch(
"matter_server.client.matter.Client", return_value=node.matter.client
), patch(
"matter_server.client.model.node.MatterDeviceTypeInstance.subscribe_updates",
), patch(
"matter_server.client.model.node.MatterDeviceTypeInstance.update_attributes"
):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
return node
def set_node_attribute(
node: MatterNode,
endpoint: int,
cluster_id: int,
attribute_id: int,
value: Any,
) -> None:
"""Set a node attribute."""
attribute = node.attributes[f"{endpoint}/{cluster_id}/{attribute_id}"]
attribute.value = value
async def trigger_subscription_callback(
hass: HomeAssistant,
client: MagicMock,
event: EventType = EventType.ATTRIBUTE_UPDATED,
data: Any = None,
) -> None:
"""Trigger a subscription callback."""
callback = client.subscribe.call_args[0][0]
callback(event, data)
await hass.async_block_till_done()

Some files were not shown because too many files have changed in this diff Show More