Compare commits

..

35 Commits

Author SHA1 Message Date
Paulus Schoutsen 590d47aad0 Bumped version to 2022.10.0b3 2022-10-01 21:28:46 -04:00
J. Nick Koston 6c7060e0e2 Bump ibeacon-ble to 0.7.3 (#79443) 2022-10-01 21:28:40 -04:00
J. Nick Koston 2fed773b93 Bump bluetooth-adapters to 0.5.3 (#79442) 2022-10-01 21:28:39 -04:00
J. Nick Koston 71e320fc96 Bump dbus-fast to 1.18.0 (#79440)
Changelog: https://github.com/Bluetooth-Devices/dbus-fast/compare/v1.17.0...v1.18.0
2022-10-01 21:28:39 -04:00
Tobias Sauerwein de90358f2a Fix Netatmo scope issue with HA cloud (#79437)
Co-authored-by: Paulus Schoutsen <balloob@gmail.com>
2022-10-01 21:28:38 -04:00
Michael 2703bbc630 Fix checking of upgrade API availability during setup of Synology DSM integration (#79435) 2022-10-01 21:28:37 -04:00
Matrix a3833a408b Fix mqtt reconnect fail when token expired (#79428)
* fix mqtt reconnect fail when token expired

* suggest change
2022-10-01 21:28:37 -04:00
Hans Oischinger 933b84050e vicare: Don't create unsupportedd button entites (#79425)
Button entities should only be offered when the datapoint exists on
the API.
2022-10-01 21:28:36 -04:00
Shay Levy 046c3b4dd1 Bump aiowebostv to 0.2.1 (#79423) 2022-10-01 21:28:35 -04:00
Mick Vleeshouwer 7e8905758b Fix low speed cover in Overkiz integration (#79416)
Fix low speed cover
2022-10-01 21:28:35 -04:00
Shay Levy 97f5670fdc Fix unifiprotect test failing CI (#79406) 2022-10-01 21:28:34 -04:00
Maciej Bieniek b3c43b981a Do not use AQI device class for CAQI sensor in Airly integration (#79402) 2022-10-01 21:28:33 -04:00
uvjustin ebb213eb14 Fix onvif snapshot fallback (#79394)
Co-authored-by: Franck Nijhof <frenck@frenck.nl>
2022-10-01 21:28:33 -04:00
J. Nick Koston a0ba102492 Ensure bluetooth disconnect callback fires if esphome config entry is reloaded (#79389) 2022-10-01 21:28:32 -04:00
J. Nick Koston be036914a6 Improve robustness of linking homekit yaml to config entries (#79386) 2022-10-01 21:28:31 -04:00
G Johansson ecb934ee6a Fix _attr_name issue in Yale Smart Alarm (#79378)
Fix name issue
2022-10-01 21:28:31 -04:00
J. Nick Koston 783f514df3 Enable delete device support for iBeacon (#79339) 2022-10-01 21:28:30 -04:00
kingy444 2e2d8d1367 Powerview bump aiopvapi to 2.0.2 (#79274) 2022-10-01 21:28:29 -04:00
Allen Porter 1836dc4a3b Address Google Sheets PR feedback (#78889) 2022-10-01 21:28:29 -04:00
Paulus Schoutsen 62679950b1 Bumped version to 2022.10.0b2 2022-09-30 14:47:22 -04:00
Shay Levy 4db4ab1ab0 Make Shelly update sensors disabled by default (#79376) 2022-09-30 14:45:20 -04:00
Joakim Plate 2436e375bb Fjaraskupan stop on 0 percentage (#79367)
* Make sure fan turns off on 0 percentage

* Remember old percentage
2022-09-30 14:45:19 -04:00
epenet ac8805601d Realign util constants with 2022.9.7 (#79357) 2022-09-30 14:45:18 -04:00
epenet a5b8ec7113 Make temperature conversions private (#79349) 2022-09-30 14:45:17 -04:00
epenet 7e2af8685a Adjust icons with new device classes (#79348)
* Adjust icons with new device classes

* Fix mysensors tests

* Fix mysensors tests
2022-09-30 14:45:17 -04:00
Zack Barett 5bab83511a Add Third Reality to Zigbee Iot standards (#79341) 2022-09-30 14:45:16 -04:00
J. Nick Koston 3d4e5b88e0 Bump govee-ble to 0.19.1 to handle another H5181 (#79340)
fixes #79188
2022-09-30 14:45:15 -04:00
J. Nick Koston dcae000683 Remove iBeacon devices that rotate their major,minor and mac (#79338) 2022-09-30 14:45:15 -04:00
J. Nick Koston ffaa277f18 Switch to using new esphome bluetooth_proxy_version field (#79331) 2022-09-30 14:45:14 -04:00
Duco Sebel ffb8be167f Use SensorDeviceClass.VOLUME in HomeWizard (#79323)
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
2022-09-30 14:45:14 -04:00
J. Nick Koston 5c86e47e99 Handle short local names from esphome proxies (#79321) 2022-09-30 14:45:13 -04:00
Robert Hillis ba8b01597f Unregister Google sheets services during unload (#79314)
* Unregister services during unload - Google Sheets

* uno mas
2022-09-30 14:45:12 -04:00
Erik Montnemery 54ba4a74bc Improve naming of units used in statistics (#79276) 2022-09-30 14:45:11 -04:00
uvjustin adc5e9f215 Mask spotify content in owntone library (#79247) 2022-09-30 14:45:11 -04:00
Paulus Schoutsen e3d4a7cf22 Store alternative domain for Zeroconf homekit discovery (#79240) 2022-09-30 14:45:10 -04:00
79 changed files with 906 additions and 306 deletions
+5
View File
@@ -0,0 +1,5 @@
{
"domain": "third_reality",
"name": "Third Reality",
"iot_standards": ["zigbee"]
}
+1 -1
View File
@@ -68,7 +68,7 @@ class AirlySensorEntityDescription(SensorEntityDescription):
SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
AirlySensorEntityDescription(
key=ATTR_API_CAQI,
device_class=SensorDeviceClass.AQI,
icon="mdi:air-filter",
name=ATTR_API_CAQI,
native_unit_of_measurement="CAQI",
),
@@ -8,9 +8,9 @@
"requirements": [
"bleak==0.18.1",
"bleak-retry-connector==2.1.3",
"bluetooth-adapters==0.5.2",
"bluetooth-adapters==0.5.3",
"bluetooth-auto-recovery==0.3.3",
"dbus-fast==1.17.0"
"dbus-fast==1.18.0"
],
"codeowners": ["@bdraco"],
"config_flow": true,
+1 -1
View File
@@ -236,7 +236,7 @@ async def async_setup_entry( # noqa: C901
await cli.subscribe_states(entry_data.async_update_state)
await cli.subscribe_service_calls(async_on_service_call)
await cli.subscribe_home_assistant_states(async_on_state_subscription)
if entry_data.device_info.has_bluetooth_proxy:
if entry_data.device_info.bluetooth_proxy_version:
entry_data.disconnect_callbacks.append(
await async_connect_scanner(hass, entry, cli, entry_data)
)
@@ -4,7 +4,6 @@ from __future__ import annotations
import logging
from aioesphomeapi import APIClient
from awesomeversion import AwesomeVersion
from homeassistant.components.bluetooth import (
HaBluetoothConnector,
@@ -24,7 +23,6 @@ from ..entry_data import RuntimeEntryData
from .client import ESPHomeClient
from .scanner import ESPHomeScanner
CONNECTABLE_MIN_VERSION = AwesomeVersion("2022.10.0-dev")
_LOGGER = logging.getLogger(__name__)
@@ -53,10 +51,14 @@ async def async_connect_scanner(
assert entry.unique_id is not None
source = str(entry.unique_id)
new_info_callback = async_get_advertisement_callback(hass)
connectable = bool(
entry_data.device_info
and AwesomeVersion(entry_data.device_info.esphome_version)
>= CONNECTABLE_MIN_VERSION
assert entry_data.device_info is not None
version = entry_data.device_info.bluetooth_proxy_version
connectable = version >= 2
_LOGGER.debug(
"Connecting scanner for %s, version=%s, connectable=%s",
source,
version,
connectable,
)
connector = HaBluetoothConnector(
client=ESPHomeClient,
@@ -15,10 +15,9 @@ from bleak.backends.device import BLEDevice
from bleak.backends.service import BleakGATTServiceCollection
from bleak.exc import BleakError
from homeassistant.core import CALLBACK_TYPE, async_get_hass, callback as hass_callback
from homeassistant.core import CALLBACK_TYPE, async_get_hass
from ..domain_data import DomainData
from ..entry_data import RuntimeEntryData
from .characteristic import BleakGATTCharacteristicESPHome
from .descriptor import BleakGATTDescriptorESPHome
from .service import BleakGATTServiceESPHome
@@ -85,7 +84,9 @@ class ESPHomeClient(BaseBleakClient):
assert self._ble_device.details is not None
self._source = self._ble_device.details["source"]
self.domain_data = DomainData.get(async_get_hass())
self._client = self._async_get_entry_data().client
config_entry = self.domain_data.get_by_unique_id(self._source)
self.entry_data = self.domain_data.get_entry_data(config_entry)
self._client = self.entry_data.client
self._is_connected = False
self._mtu: int | None = None
self._cancel_connection_state: CALLBACK_TYPE | None = None
@@ -108,12 +109,6 @@ class ESPHomeClient(BaseBleakClient):
)
self._cancel_connection_state = None
@hass_callback
def _async_get_entry_data(self) -> RuntimeEntryData:
"""Get the entry data."""
config_entry = self.domain_data.get_by_unique_id(self._source)
return self.domain_data.get_entry_data(config_entry)
def _async_ble_device_disconnected(self) -> None:
"""Handle the BLE device disconnecting from the ESP."""
_LOGGER.debug("%s: BLE device disconnected", self._source)
@@ -125,8 +120,7 @@ class ESPHomeClient(BaseBleakClient):
def _async_esp_disconnected(self) -> None:
"""Handle the esp32 client disconnecting from hass."""
_LOGGER.debug("%s: ESP device disconnected", self._source)
entry_data = self._async_get_entry_data()
entry_data.disconnect_callbacks.remove(self._async_esp_disconnected)
self.entry_data.disconnect_callbacks.remove(self._async_esp_disconnected)
self._async_ble_device_disconnected()
def _async_call_bleak_disconnected_callback(self) -> None:
@@ -179,8 +173,7 @@ class ESPHomeClient(BaseBleakClient):
connected_future.set_exception(BleakError("Disconnected"))
return
entry_data = self._async_get_entry_data()
entry_data.disconnect_callbacks.append(self._async_esp_disconnected)
self.entry_data.disconnect_callbacks.append(self._async_esp_disconnected)
connected_future.set_result(connected)
timeout = kwargs.get("timeout", self._timeout)
@@ -203,14 +196,13 @@ class ESPHomeClient(BaseBleakClient):
async def _wait_for_free_connection_slot(self, timeout: float) -> None:
"""Wait for a free connection slot."""
entry_data = self._async_get_entry_data()
if entry_data.ble_connections_free:
if self.entry_data.ble_connections_free:
return
_LOGGER.debug(
"%s: Out of connection slots, waiting for a free one", self._source
)
async with async_timeout.timeout(timeout):
await entry_data.wait_for_ble_connections_free()
await self.entry_data.wait_for_ble_connections_free()
@property
def is_connected(self) -> bool:
@@ -83,15 +83,23 @@ class ESPHomeScanner(BaseHaScanner):
"""Call the registered callback."""
now = time.monotonic()
address = ":".join(TWO_CHAR.findall("%012X" % adv.address)) # must be upper
name = adv.name
if prev_discovery := self._discovered_devices.get(address):
# If the last discovery had the full local name
# and this one doesn't, keep the old one as we
# always want the full local name over the short one
if len(prev_discovery.name) > len(adv.name):
name = prev_discovery.name
advertisement_data = AdvertisementData( # type: ignore[no-untyped-call]
local_name=None if adv.name == "" else adv.name,
local_name=None if name == "" else name,
manufacturer_data=adv.manufacturer_data,
service_data=adv.service_data,
service_uuids=adv.service_uuids,
)
device = BLEDevice( # type: ignore[no-untyped-call]
address=address,
name=adv.name,
name=name,
details=self._details,
rssi=adv.rssi,
)
@@ -3,7 +3,7 @@
"name": "ESPHome",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/esphome",
"requirements": ["aioesphomeapi==10.14.0"],
"requirements": ["aioesphomeapi==11.0.0"],
"zeroconf": ["_esphomelib._tcp.local."],
"dhcp": [{ "registered_devices": true }],
"codeowners": ["@OttoWinter", "@jesserockz"],
+11 -4
View File
@@ -82,11 +82,18 @@ class Fan(CoordinatorEntity[Coordinator], FanEntity):
async def async_set_percentage(self, percentage: int) -> None:
"""Set speed."""
new_speed = percentage_to_ordered_list_item(
ORDERED_NAMED_FAN_SPEEDS, percentage
)
# Proactively update percentage to mange successive increases
self._percentage = percentage
async with self.coordinator.async_connect_and_update() as device:
await device.send_fan_speed(int(new_speed))
if percentage == 0:
await device.send_command(COMMAND_STOP_FAN)
else:
new_speed = percentage_to_ordered_list_item(
ORDERED_NAMED_FAN_SPEEDS, percentage
)
await device.send_fan_speed(int(new_speed))
async def async_turn_on(
self,
@@ -229,6 +229,10 @@ def create_browse_media_response(
if not children: # Directory searches will pass in subdirectories as children
children = []
for item in result:
if item.get("data_kind") == "spotify" or (
"path" in item and cast(str, item["path"]).startswith("spotify")
): # Exclude spotify data from Owntone library
continue
assert isinstance(item["uri"], str)
media_type = OWNTONE_TYPE_TO_MEDIA_TYPE[item["uri"].split(":")[1]]
title = item.get("name") or item.get("title") # only tracks use title
@@ -2,7 +2,6 @@
from __future__ import annotations
from datetime import datetime
from typing import cast
import aiohttp
from google.auth.exceptions import RefreshError
@@ -10,7 +9,7 @@ from google.oauth2.credentials import Credentials
from gspread import Client
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
@@ -69,6 +68,15 @@ def async_entry_has_scopes(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
hass.data[DOMAIN].pop(entry.entry_id)
loaded_entries = [
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
for service_name in hass.services.async_services()[DOMAIN]:
hass.services.async_remove(DOMAIN, service_name)
return True
@@ -96,12 +104,13 @@ async def async_setup_service(hass: HomeAssistant) -> None:
async def append_to_sheet(call: ServiceCall) -> None:
"""Append new line of data to a Google Sheets document."""
entry = cast(
ConfigEntry,
hass.config_entries.async_get_entry(call.data[DATA_CONFIG_ENTRY]),
entry: ConfigEntry | None = hass.config_entries.async_get_entry(
call.data[DATA_CONFIG_ENTRY]
)
session: OAuth2Session = hass.data[DOMAIN][entry.entry_id]
if not entry:
raise ValueError(f"Invalid config entry: {call.data[DATA_CONFIG_ENTRY]}")
if not (session := hass.data[DOMAIN].get(entry.entry_id)):
raise ValueError(f"Config entry not loaded: {call.data[DATA_CONFIG_ENTRY]}")
await session.async_ensure_token_valid()
await hass.async_add_executor_job(_append_to_sheet, call, entry)
@@ -8,7 +8,7 @@ from typing import Any
from google.oauth2.credentials import Credentials
from gspread import Client, GSpreadException
from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
from homeassistant.data_entry_flow import FlowResult
from homeassistant.helpers import config_entry_oauth2_flow
@@ -25,6 +25,8 @@ class OAuth2FlowHandler(
DOMAIN = DOMAIN
reauth_entry: ConfigEntry | None = None
@property
def logger(self) -> logging.Logger:
"""Return logger."""
@@ -42,6 +44,9 @@ class OAuth2FlowHandler(
async def async_step_reauth(self, entry_data: Mapping[str, Any]) -> FlowResult:
"""Perform reauth upon an API authentication error."""
self.reauth_entry = self.hass.config_entries.async_get_entry(
self.context["entry_id"]
)
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
@@ -52,40 +57,27 @@ class OAuth2FlowHandler(
return self.async_show_form(step_id="reauth_confirm")
return await self.async_step_user()
def _async_reauth_entry(self) -> ConfigEntry | None:
"""Return existing entry for reauth."""
if self.source != SOURCE_REAUTH or not (
entry_id := self.context.get("entry_id")
):
return None
return next(
(
entry
for entry in self._async_current_entries()
if entry.entry_id == entry_id
),
None,
)
async def async_oauth_create_entry(self, data: dict[str, Any]) -> FlowResult:
"""Create an entry for the flow, or update existing entry."""
service = Client(Credentials(data[CONF_TOKEN][CONF_ACCESS_TOKEN]))
if entry := self._async_reauth_entry():
if self.reauth_entry:
_LOGGER.debug("service.open_by_key")
try:
await self.hass.async_add_executor_job(
service.open_by_key,
entry.unique_id,
self.reauth_entry.unique_id,
)
except GSpreadException as err:
_LOGGER.error(
"Could not find spreadsheet '%s': %s", entry.unique_id, str(err)
"Could not find spreadsheet '%s': %s",
self.reauth_entry.unique_id,
str(err),
)
return self.async_abort(reason="open_spreadsheet_failure")
self.hass.config_entries.async_update_entry(entry, data=data)
await self.hass.config_entries.async_reload(entry.entry_id)
self.hass.config_entries.async_update_entry(self.reauth_entry, data=data)
await self.hass.config_entries.async_reload(self.reauth_entry.entry_id)
return self.async_abort(reason="reauth_successful")
try:
@@ -97,6 +89,7 @@ class OAuth2FlowHandler(
return self.async_abort(reason="create_spreadsheet_failure")
await self.async_set_unique_id(doc.id)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=DEFAULT_NAME, data=data, description_placeholders={"url": doc.url}
)
@@ -10,6 +10,10 @@
},
"auth": {
"title": "Link Google Account"
},
"reauth_confirm": {
"title": "[%key:common::config_flow::title::reauth%]",
"description": "The Google Sheets integration needs to re-authenticate your account"
}
},
"abort": {
@@ -37,6 +37,11 @@
"service_uuid": "00008551-0000-1000-8000-00805f9b34fb",
"connectable": false
},
{
"manufacturer_id": 53579,
"service_uuid": "00008151-0000-1000-8000-00805f9b34fb",
"connectable": false
},
{
"manufacturer_id": 43682,
"service_uuid": "00008151-0000-1000-8000-00805f9b34fb",
@@ -68,7 +73,7 @@
"connectable": false
}
],
"requirements": ["govee-ble==0.19.0"],
"requirements": ["govee-ble==0.19.1"],
"dependencies": ["bluetooth"],
"codeowners": ["@bdraco"],
"iot_class": "local_push"
+43 -28
View File
@@ -193,14 +193,21 @@ def _async_all_homekit_instances(hass: HomeAssistant) -> list[HomeKit]:
]
def _async_get_entries_by_name(
def _async_get_imported_entries_indices(
current_entries: list[ConfigEntry],
) -> dict[str, ConfigEntry]:
"""Return a dict of the entries by name."""
) -> tuple[dict[str, ConfigEntry], dict[int, ConfigEntry]]:
"""Return a dicts of the entries by name and port."""
# For backwards compat, its possible the first bridge is using the default
# name.
return {entry.data.get(CONF_NAME, BRIDGE_NAME): entry for entry in current_entries}
entries_by_name: dict[str, ConfigEntry] = {}
entries_by_port: dict[int, ConfigEntry] = {}
for entry in current_entries:
if entry.source != SOURCE_IMPORT:
continue
entries_by_name[entry.data.get(CONF_NAME, BRIDGE_NAME)] = entry
entries_by_port[entry.data.get(CONF_PORT, DEFAULT_PORT)] = entry
return entries_by_name, entries_by_port
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
@@ -218,10 +225,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
return True
current_entries = hass.config_entries.async_entries(DOMAIN)
entries_by_name = _async_get_entries_by_name(current_entries)
entries_by_name, entries_by_port = _async_get_imported_entries_indices(
current_entries
)
for index, conf in enumerate(config[DOMAIN]):
if _async_update_config_entry_if_from_yaml(hass, entries_by_name, conf):
if _async_update_config_entry_from_yaml(
hass, entries_by_name, entries_by_port, conf
):
continue
conf[CONF_ENTRY_INDEX] = index
@@ -237,8 +248,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
@callback
def _async_update_config_entry_if_from_yaml(
hass: HomeAssistant, entries_by_name: dict[str, ConfigEntry], conf: ConfigType
def _async_update_config_entry_from_yaml(
hass: HomeAssistant,
entries_by_name: dict[str, ConfigEntry],
entries_by_port: dict[int, ConfigEntry],
conf: ConfigType,
) -> bool:
"""Update a config entry with the latest yaml.
@@ -246,27 +260,24 @@ def _async_update_config_entry_if_from_yaml(
Returns False if there is no matching config entry
"""
bridge_name = conf[CONF_NAME]
if (
bridge_name in entries_by_name
and entries_by_name[bridge_name].source == SOURCE_IMPORT
if not (
matching_entry := entries_by_name.get(conf.get(CONF_NAME, BRIDGE_NAME))
or entries_by_port.get(conf.get(CONF_PORT, DEFAULT_PORT))
):
entry = entries_by_name[bridge_name]
# If they alter the yaml config we import the changes
# since there currently is no practical way to support
# all the options in the UI at this time.
data = conf.copy()
options = {}
for key in CONFIG_OPTIONS:
if key in data:
options[key] = data[key]
del data[key]
return False
hass.config_entries.async_update_entry(entry, data=data, options=options)
return True
# If they alter the yaml config we import the changes
# since there currently is no practical way to support
# all the options in the UI at this time.
data = conf.copy()
options = {}
for key in CONFIG_OPTIONS:
if key in data:
options[key] = data[key]
del data[key]
return False
hass.config_entries.async_update_entry(matching_entry, data=data, options=options)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
@@ -451,10 +462,14 @@ def _async_register_events_and_services(hass: HomeAssistant) -> None:
return
current_entries = hass.config_entries.async_entries(DOMAIN)
entries_by_name = _async_get_entries_by_name(current_entries)
entries_by_name, entries_by_port = _async_get_imported_entries_indices(
current_entries
)
for conf in config[DOMAIN]:
_async_update_config_entry_if_from_yaml(hass, entries_by_name, conf)
_async_update_config_entry_from_yaml(
hass, entries_by_name, entries_by_port, conf
)
reload_tasks = [
hass.config_entries.async_reload(entry.entry_id)
@@ -131,6 +131,7 @@ SENSORS: Final[tuple[SensorEntityDescription, ...]] = (
name="Total water usage",
native_unit_of_measurement=VOLUME_CUBIC_METERS,
icon="mdi:gauge",
device_class=SensorDeviceClass.VOLUME,
state_class=SensorStateClass.TOTAL_INCREASING,
),
)
@@ -2,7 +2,7 @@
"domain": "hunterdouglas_powerview",
"name": "Hunter Douglas PowerView",
"documentation": "https://www.home-assistant.io/integrations/hunterdouglas_powerview",
"requirements": ["aiopvapi==2.0.1"],
"requirements": ["aiopvapi==2.0.2"],
"codeowners": ["@bdraco", "@kingy444", "@trullock"],
"config_flow": true,
"homekit": {
+13 -1
View File
@@ -3,7 +3,7 @@ from __future__ import annotations
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import async_get
from homeassistant.helpers.device_registry import DeviceEntry, async_get
from .const import DOMAIN, PLATFORMS
from .coordinator import IBeaconCoordinator
@@ -22,3 +22,15 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
hass.data.pop(DOMAIN)
return unload_ok
async def async_remove_config_entry_device(
hass: HomeAssistant, config_entry: ConfigEntry, device_entry: DeviceEntry
) -> bool:
"""Remove iBeacon config entry from a device."""
coordinator: IBeaconCoordinator = hass.data[DOMAIN]
return not any(
identifier
for identifier in device_entry.identifiers
if identifier[0] == DOMAIN and coordinator.async_device_id_seen(identifier[1])
)
@@ -27,4 +27,9 @@ UPDATE_INTERVAL = timedelta(seconds=60)
# we will add it to the ignore list since its garbage data.
MAX_IDS = 10
# If a device broadcasts this many major minors for the same uuid
# we will add it to the ignore list since its garbage data.
MAX_IDS_PER_UUID = 50
CONF_IGNORE_ADDRESSES = "ignore_addresses"
CONF_IGNORE_UUIDS = "ignore_uuids"
@@ -23,8 +23,10 @@ from homeassistant.helpers.event import async_track_time_interval
from .const import (
CONF_IGNORE_ADDRESSES,
CONF_IGNORE_UUIDS,
DOMAIN,
MAX_IDS,
MAX_IDS_PER_UUID,
SIGNAL_IBEACON_DEVICE_NEW,
SIGNAL_IBEACON_DEVICE_SEEN,
SIGNAL_IBEACON_DEVICE_UNAVAILABLE,
@@ -115,6 +117,9 @@ class IBeaconCoordinator:
self._ignore_addresses: set[str] = set(
entry.data.get(CONF_IGNORE_ADDRESSES, [])
)
# iBeacon devices that do not follow the spec
# and broadcast custom data in the major and minor fields
self._ignore_uuids: set[str] = set(entry.data.get(CONF_IGNORE_UUIDS, []))
# iBeacons with fixed MAC addresses
self._last_ibeacon_advertisement_by_unique_id: dict[
@@ -131,6 +136,17 @@ class IBeaconCoordinator:
self._last_seen_by_group_id: dict[str, bluetooth.BluetoothServiceInfoBleak] = {}
self._unavailable_group_ids: set[str] = set()
# iBeacons with random MAC addresses, fixed UUID, random major/minor
self._major_minor_by_uuid: dict[str, set[tuple[int, int]]] = {}
@callback
def async_device_id_seen(self, device_id: str) -> bool:
"""Return True if the device_id has been seen since boot."""
return bool(
device_id in self._last_ibeacon_advertisement_by_unique_id
or device_id in self._last_seen_by_group_id
)
@callback
def _async_handle_unavailable(
self, service_info: bluetooth.BluetoothServiceInfoBleak
@@ -146,6 +162,25 @@ class IBeaconCoordinator:
"""Cancel unavailable tracking for an address."""
self._unavailable_trackers.pop(address)()
@callback
def _async_ignore_uuid(self, uuid: str) -> None:
"""Ignore an UUID that does not follow the spec and any entities created by it."""
self._ignore_uuids.add(uuid)
major_minor_by_uuid = self._major_minor_by_uuid.pop(uuid)
unique_ids_to_purge = set()
for major, minor in major_minor_by_uuid:
group_id = f"{uuid}_{major}_{minor}"
if unique_ids := self._unique_ids_by_group_id.pop(group_id, None):
unique_ids_to_purge.update(unique_ids)
for address in self._addresses_by_group_id.pop(group_id, []):
self._async_cancel_unavailable_tracker(address)
self._unique_ids_by_address.pop(address)
self._group_ids_by_address.pop(address)
self._async_purge_untrackable_entities(unique_ids_to_purge)
entry_data = self._entry.data
new_data = entry_data | {CONF_IGNORE_UUIDS: list(self._ignore_uuids)}
self.hass.config_entries.async_update_entry(self._entry, data=new_data)
@callback
def _async_ignore_address(self, address: str) -> None:
"""Ignore an address that does not follow the spec and any entities created by it."""
@@ -203,7 +238,20 @@ class IBeaconCoordinator:
return
if not (ibeacon_advertisement := parse(service_info)):
return
group_id = f"{ibeacon_advertisement.uuid}_{ibeacon_advertisement.major}_{ibeacon_advertisement.minor}"
uuid_str = str(ibeacon_advertisement.uuid)
if uuid_str in self._ignore_uuids:
return
major = ibeacon_advertisement.major
minor = ibeacon_advertisement.minor
major_minor_by_uuid = self._major_minor_by_uuid.setdefault(uuid_str, set())
if len(major_minor_by_uuid) + 1 > MAX_IDS_PER_UUID:
self._async_ignore_uuid(uuid_str)
return
major_minor_by_uuid.add((major, minor))
group_id = f"{uuid_str}_{major}_{minor}"
if group_id in self._group_ids_random_macs:
self._async_update_ibeacon_with_random_mac(
@@ -4,7 +4,7 @@
"documentation": "https://www.home-assistant.io/integrations/ibeacon",
"dependencies": ["bluetooth"],
"bluetooth": [{ "manufacturer_id": 76, "manufacturer_data_start": [2, 21] }],
"requirements": ["ibeacon_ble==0.7.1"],
"requirements": ["ibeacon_ble==0.7.3"],
"codeowners": ["@bdraco"],
"iot_class": "local_push",
"loggers": ["bleak"],
@@ -109,7 +109,6 @@ ROBOT_SENSOR_MAP: dict[type[Robot], list[RobotSensorEntityDescription]] = {
RobotSensorEntityDescription[LitterRobot4](
key="pet_weight",
name="Pet weight",
icon="mdi:scale",
native_unit_of_measurement=MASS_POUNDS,
device_class=SensorDeviceClass.WEIGHT,
),
@@ -22,6 +22,7 @@ import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.event import async_track_state_change_event
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util.unit_conversion import TemperatureConverter
_LOGGER = logging.getLogger(__name__)
@@ -218,7 +219,7 @@ class MoldIndicator(SensorEntity):
# convert to celsius if necessary
if unit == TEMP_FAHRENHEIT:
return util.temperature.fahrenheit_to_celsius(temp)
return TemperatureConverter.convert(temp, TEMP_FAHRENHEIT, TEMP_CELSIUS)
if unit == TEMP_CELSIUS:
return temp
_LOGGER.error(
@@ -385,13 +386,13 @@ class MoldIndicator(SensorEntity):
}
dewpoint = (
util.temperature.celsius_to_fahrenheit(self._dewpoint)
TemperatureConverter.convert(self._dewpoint, TEMP_CELSIUS, TEMP_FAHRENHEIT)
if self._dewpoint is not None
else None
)
crit_temp = (
util.temperature.celsius_to_fahrenheit(self._crit_temp)
TemperatureConverter.convert(self._crit_temp, TEMP_CELSIUS, TEMP_FAHRENHEIT)
if self._crit_temp is not None
else None
)
@@ -95,13 +95,11 @@ SENSORS: dict[str, SensorEntityDescription] = {
key="V_WEIGHT",
native_unit_of_measurement=MASS_KILOGRAMS,
device_class=SensorDeviceClass.WEIGHT,
icon="mdi:weight-kilogram",
),
"V_DISTANCE": SensorEntityDescription(
key="V_DISTANCE",
native_unit_of_measurement=LENGTH_METERS,
device_class=SensorDeviceClass.DISTANCE,
icon="mdi:ruler",
),
"V_IMPEDANCE": SensorEntityDescription(
key="V_IMPEDANCE",
+12 -2
View File
@@ -137,9 +137,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
raise ConfigEntryAuthFailed("Token not valid, trigger renewal") from ex
raise ConfigEntryNotReady from ex
if sorted(session.token["scope"]) != sorted(NETATMO_SCOPES):
if entry.data["auth_implementation"] == cloud.DOMAIN:
required_scopes = {
scope
for scope in NETATMO_SCOPES
if scope not in ("access_doorbell", "read_doorbell")
}
else:
required_scopes = set(NETATMO_SCOPES)
if not (set(session.token["scope"]) & required_scopes):
_LOGGER.debug(
"Scope is invalid: %s != %s", session.token["scope"], NETATMO_SCOPES
"Session is missing scopes: %s",
required_scopes - set(session.token["scope"]),
)
raise ConfigEntryAuthFailed("Token scope not valid, trigger renewal")
@@ -54,7 +54,14 @@ class NetatmoFlowHandler(
@property
def extra_authorize_data(self) -> dict:
"""Extra data that needs to be appended to the authorize url."""
return {"scope": " ".join(ALL_SCOPES)}
exclude = []
if self.flow_impl.name == "Home Assistant Cloud":
exclude = ["access_doorbell", "read_doorbell"]
scopes = [scope for scope in ALL_SCOPES if scope not in exclude]
scopes.sort()
return {"scope": " ".join(scopes)}
async def async_step_user(self, user_input: dict | None = None) -> FlowResult:
"""Handle a flow start."""
+8 -3
View File
@@ -142,16 +142,21 @@ class ONVIFCameraEntity(ONVIFBaseEntity, Camera):
if self.device.capabilities.snapshot:
try:
image = await self.device.device.get_snapshot(
if image := await self.device.device.get_snapshot(
self.profile.token, self._basic_auth
)
return image
):
return image
except ONVIFError as err:
LOGGER.error(
"Fetch snapshot image failed from %s, falling back to FFmpeg; %s",
self.device.name,
err,
)
else:
LOGGER.error(
"Fetch snapshot image failed from %s, falling back to FFmpeg",
self.device.name,
)
assert self._stream_uri
return await ffmpeg.async_get_image(
@@ -9,6 +9,7 @@ from homeassistant.components.weather import (
ATTR_CONDITION_CLEAR_NIGHT,
ATTR_CONDITION_SUNNY,
)
from homeassistant.const import TEMP_CELSIUS, TEMP_KELVIN
from homeassistant.helpers import sun
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt
@@ -191,7 +192,9 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
def _fmt_dewpoint(dewpoint):
"""Format the dewpoint data."""
if dewpoint is not None:
return round(TemperatureConverter.kelvin_to_celsius(dewpoint), 1)
return round(
TemperatureConverter.convert(dewpoint, TEMP_KELVIN, TEMP_CELSIUS), 1
)
return None
@staticmethod
@@ -152,7 +152,7 @@ class LowSpeedCover(VerticalCover):
) -> None:
"""Initialize the device."""
super().__init__(device_url, coordinator)
self._attr_name = f"{self._attr_name} Low Speed"
self._attr_name = "Low speed"
self._attr_unique_id = f"{self._attr_unique_id}_low_speed"
async def async_set_cover_position(self, **kwargs: Any) -> None:
@@ -348,7 +348,9 @@ class PrometheusMetrics:
with suppress(ValueError):
value = self.state_as_number(state)
if state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_FAHRENHEIT:
value = TemperatureConverter.fahrenheit_to_celsius(value)
value = TemperatureConverter.convert(
value, TEMP_FAHRENHEIT, TEMP_CELSIUS
)
metric.labels(**self._labels(state)).set(value)
def _handle_device_tracker(self, state):
@@ -394,7 +396,7 @@ class PrometheusMetrics:
def _handle_climate_temp(self, state, attr, metric_name, metric_description):
if temp := state.attributes.get(attr):
if self._climate_units == TEMP_FAHRENHEIT:
temp = TemperatureConverter.fahrenheit_to_celsius(temp)
temp = TemperatureConverter.convert(temp, TEMP_FAHRENHEIT, TEMP_CELSIUS)
metric = self._metric(
metric_name,
self.prometheus_cli.Gauge,
@@ -507,7 +509,9 @@ class PrometheusMetrics:
try:
value = self.state_as_number(state)
if state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_FAHRENHEIT:
value = TemperatureConverter.fahrenheit_to_celsius(value)
value = TemperatureConverter.convert(
value, TEMP_FAHRENHEIT, TEMP_CELSIUS
)
_metric.labels(**self._labels(state)).set(value)
except ValueError:
pass
+4 -2
View File
@@ -485,11 +485,13 @@ class Recorder(threading.Thread):
statistic_id: str,
start_time: datetime,
sum_adjustment: float,
display_unit: str,
adjustment_unit: str,
) -> None:
"""Adjust statistics."""
self.queue_task(
AdjustStatisticsTask(statistic_id, start_time, sum_adjustment, display_unit)
AdjustStatisticsTask(
statistic_id, start_time, sum_adjustment, adjustment_unit
)
)
@callback
@@ -899,7 +899,7 @@ def list_statistic_ids(
result = {
meta["statistic_id"]: {
"display_unit_of_measurement": meta["state_unit_of_measurement"],
"state_unit_of_measurement": meta["state_unit_of_measurement"],
"has_mean": meta["has_mean"],
"has_sum": meta["has_sum"],
"name": meta["name"],
@@ -926,7 +926,7 @@ def list_statistic_ids(
"has_sum": meta["has_sum"],
"name": meta["name"],
"source": meta["source"],
"display_unit_of_measurement": meta["state_unit_of_measurement"],
"state_unit_of_measurement": meta["state_unit_of_measurement"],
"unit_class": _get_unit_class(meta["unit_of_measurement"]),
"unit_of_measurement": meta["unit_of_measurement"],
}
@@ -939,7 +939,7 @@ def list_statistic_ids(
"has_sum": info["has_sum"],
"name": info.get("name"),
"source": info["source"],
"display_unit_of_measurement": info["display_unit_of_measurement"],
"state_unit_of_measurement": info["state_unit_of_measurement"],
"statistics_unit_of_measurement": info["unit_of_measurement"],
"unit_class": info["unit_class"],
}
@@ -1605,7 +1605,7 @@ def adjust_statistics(
statistic_id: str,
start_time: datetime,
sum_adjustment: float,
display_unit: str,
adjustment_unit: str,
) -> bool:
"""Process an add_statistics job."""
@@ -1617,7 +1617,9 @@ def adjust_statistics(
return True
statistic_unit = metadata[statistic_id][1]["unit_of_measurement"]
convert = _get_display_to_statistic_unit_converter(display_unit, statistic_unit)
convert = _get_display_to_statistic_unit_converter(
adjustment_unit, statistic_unit
)
sum_adjustment = convert(sum_adjustment)
_adjust_sum_statistics(
+3 -3
View File
@@ -163,7 +163,7 @@ class AdjustStatisticsTask(RecorderTask):
statistic_id: str
start_time: datetime
sum_adjustment: float
display_unit: str
adjustment_unit: str
def run(self, instance: Recorder) -> None:
"""Run statistics task."""
@@ -172,7 +172,7 @@ class AdjustStatisticsTask(RecorderTask):
self.statistic_id,
self.start_time,
self.sum_adjustment,
self.display_unit,
self.adjustment_unit,
):
return
# Schedule a new adjust statistics task if this one didn't finish
@@ -181,7 +181,7 @@ class AdjustStatisticsTask(RecorderTask):
self.statistic_id,
self.start_time,
self.sum_adjustment,
self.display_unit,
self.adjustment_unit,
)
)
@@ -291,7 +291,7 @@ def ws_change_statistics_unit(
vol.Required("statistic_id"): str,
vol.Required("start_time"): str,
vol.Required("adjustment"): vol.Any(float, int),
vol.Required("display_unit"): vol.Any(str, None),
vol.Required("adjustment_unit_of_measurement"): vol.Any(str, None),
}
)
@websocket_api.async_response
@@ -320,25 +320,26 @@ async def ws_adjust_sum_statistics(
return
metadata = metadatas[0]
def valid_units(statistics_unit: str | None, display_unit: str | None) -> bool:
if statistics_unit == display_unit:
def valid_units(statistics_unit: str | None, adjustment_unit: str | None) -> bool:
if statistics_unit == adjustment_unit:
return True
converter = STATISTIC_UNIT_TO_UNIT_CONVERTER.get(statistics_unit)
if converter is not None and display_unit in converter.VALID_UNITS:
if converter is not None and adjustment_unit in converter.VALID_UNITS:
return True
return False
stat_unit = metadata["statistics_unit_of_measurement"]
if not valid_units(stat_unit, msg["display_unit"]):
adjustment_unit = msg["adjustment_unit_of_measurement"]
if not valid_units(stat_unit, adjustment_unit):
connection.send_error(
msg["id"],
"invalid_units",
f"Can't convert {stat_unit} to {msg['display_unit']}",
f"Can't convert {stat_unit} to {adjustment_unit}",
)
return
get_instance(hass).async_adjust_statistics(
msg["statistic_id"], start_time, msg["adjustment"], msg["display_unit"]
msg["statistic_id"], start_time, msg["adjustment"], adjustment_unit
)
connection.send_result(msg["id"])
+2 -2
View File
@@ -70,7 +70,7 @@ REST_UPDATES: Final = {
install=lambda wrapper: wrapper.async_trigger_ota_update(),
device_class=UpdateDeviceClass.FIRMWARE,
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=True,
entity_registry_enabled_default=False,
),
"fwupdate_beta": RestUpdateDescription(
name="Beta Firmware Update",
@@ -94,7 +94,7 @@ RPC_UPDATES: Final = {
install=lambda wrapper: wrapper.async_trigger_ota_update(),
device_class=UpdateDeviceClass.FIRMWARE,
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=True,
entity_registry_enabled_default=False,
),
"fwupdate_beta": RpcUpdateDescription(
name="Beta Firmware Update",
@@ -110,7 +110,7 @@ class SynoApi:
# check if upgrade is available
try:
self.dsm.upgrade.update()
except SynologyDSMAPIErrorException as ex:
except SYNOLOGY_CONNECTION_EXCEPTIONS as ex:
self._with_upgrade = False
self.dsm.reset(SynoCoreUpgrade.API_KEY)
LOGGER.debug("Disabled fetching upgrade data during setup: %s", ex)
@@ -34,6 +34,14 @@ class ViCareRequiredKeysMixin:
value_getter: Callable[[Device], bool]
@dataclass()
class ViCareRequiredKeysMixinWithSet:
"""Mixin for required keys with setter."""
value_getter: Callable[[Device], bool]
value_setter: Callable[[Device], bool]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up from config entry."""
_LOGGER.debug("Setting up ViCare component")
+16 -4
View File
@@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import DeviceInfo, EntityCategory
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import ViCareRequiredKeysMixin
from . import ViCareRequiredKeysMixinWithSet
from .const import DOMAIN, VICARE_API, VICARE_DEVICE_CONFIG, VICARE_NAME
_LOGGER = logging.getLogger(__name__)
@@ -27,7 +27,9 @@ BUTTON_DHW_ACTIVATE_ONETIME_CHARGE = "activate_onetimecharge"
@dataclass
class ViCareButtonEntityDescription(ButtonEntityDescription, ViCareRequiredKeysMixin):
class ViCareButtonEntityDescription(
ButtonEntityDescription, ViCareRequiredKeysMixinWithSet
):
"""Describes ViCare button sensor entity."""
@@ -37,7 +39,8 @@ BUTTON_DESCRIPTIONS: tuple[ViCareButtonEntityDescription, ...] = (
name="Activate one-time charge",
icon="mdi:shower-head",
entity_category=EntityCategory.CONFIG,
value_getter=lambda api: api.activateOneTimeCharge(),
value_getter=lambda api: api.getOneTimeCharge(),
value_setter=lambda api: api.activateOneTimeCharge(),
),
)
@@ -54,6 +57,15 @@ async def async_setup_entry(
entities = []
for description in BUTTON_DESCRIPTIONS:
try:
description.value_getter(api)
_LOGGER.debug("Found entity %s", description.name)
except PyViCareNotSupportedFeatureError:
_LOGGER.info("Feature not supported %s", description.name)
continue
except AttributeError:
_LOGGER.debug("Attribute Error %s", name)
continue
entity = ViCareButton(
f"{name} {description.name}",
api,
@@ -83,7 +95,7 @@ class ViCareButton(ButtonEntity):
"""Handle the button press."""
try:
with suppress(PyViCareNotSupportedFeatureError):
self.entity_description.value_getter(self._api)
self.entity_description.value_setter(self._api)
except requests.exceptions.ConnectionError:
_LOGGER.error("Unable to retrieve data from ViCare server")
except ValueError:
@@ -3,7 +3,7 @@
"name": "LG webOS Smart TV",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/webostv",
"requirements": ["aiowebostv==0.2.0"],
"requirements": ["aiowebostv==0.2.1"],
"codeowners": ["@bendavid", "@thecode"],
"ssdp": [{ "st": "urn:lge-com:service:webos-second-screen:1" }],
"quality_scale": "platinum",
@@ -14,6 +14,7 @@ from homeassistant.components.alarm_control_panel import (
AlarmControlPanelEntityFeature,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_NAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddEntitiesCallback
@@ -80,7 +81,7 @@ class YaleAlarmDevice(YaleAlarmEntity, AlarmControlPanelEntity):
)
except YALE_ALL_ERRORS as error:
raise HomeAssistantError(
f"Could not set alarm for {self._attr_name}: {error}"
f"Could not set alarm for {self.coordinator.entry.data[CONF_NAME]}: {error}"
) from error
if alarm_state:
@@ -46,7 +46,7 @@ class YaleDoorlock(YaleEntity, LockEntity):
"""Initialize the Yale Lock Device."""
super().__init__(coordinator, data)
self._attr_code_format = f"^\\d{code_format}$"
self.lock_name = data["name"]
self.lock_name: str = data["name"]
async def async_unlock(self, **kwargs: Any) -> None:
"""Send unlock command."""
@@ -79,14 +79,14 @@ class YaleDoorlock(YaleEntity, LockEntity):
)
except YALE_ALL_ERRORS as error:
raise HomeAssistantError(
f"Could not set lock for {self._attr_name}: {error}"
f"Could not set lock for {self.lock_name}: {error}"
) from error
if lock_state:
self.coordinator.data["lock_map"][self._attr_unique_id] = command
self.async_write_ha_state()
return
raise HomeAssistantError("Could set lock, check system ready for lock.")
raise HomeAssistantError("Could not set lock, check system ready for lock.")
@property
def is_locked(self) -> bool | None:
+13 -1
View File
@@ -12,7 +12,7 @@ from yolink.model import BRDP
from yolink.mqtt_client import MqttClient
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.const import EVENT_HOMEASSISTANT_STOP, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow
@@ -110,11 +110,23 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
device_coordinators[device.device_id] = device_coordinator
hass.data[DOMAIN][entry.entry_id][ATTR_COORDINATORS] = device_coordinators
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
async def shutdown_subscription(event) -> None:
"""Shutdown mqtt message subscription."""
await yolink_mqtt_client.shutdown_home_subscription()
entry.async_on_unload(
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown_subscription)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
await hass.data[DOMAIN][entry.entry_id][
ATTR_MQTT_CLIENT
].shutdown_home_subscription()
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
@@ -3,7 +3,7 @@
"name": "YoLink",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/yolink",
"requirements": ["yolink-api==0.0.9"],
"requirements": ["yolink-api==0.1.0"],
"dependencies": ["auth", "application_credentials"],
"codeowners": ["@matrixd2"],
"iot_class": "cloud_push"
@@ -404,6 +404,7 @@ class ZeroconfDiscovery:
_LOGGER.debug("Discovered new device %s %s", name, info)
props: dict[str, str] = info.properties
domain = None
# If we can handle it as a HomeKit discovery, we do that here.
if service_type in HOMEKIT_TYPES and (
@@ -458,10 +459,17 @@ class ZeroconfDiscovery:
matcher_domain = matcher["domain"]
assert isinstance(matcher_domain, str)
context = {
"source": config_entries.SOURCE_ZEROCONF,
}
if domain:
# Domain of integration that offers alternative API to handle this device.
context["alternative_domain"] = domain
discovery_flow.async_create_flow(
self.hass,
matcher_domain,
{"source": config_entries.SOURCE_ZEROCONF},
context,
info,
)
+1 -1
View File
@@ -8,7 +8,7 @@ from .backports.enum import StrEnum
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2022
MINOR_VERSION: Final = 10
PATCH_VERSION: Final = "0b1"
PATCH_VERSION: Final = "0b3"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
+6
View File
@@ -84,6 +84,12 @@ BLUETOOTH: list[dict[str, bool | str | int | list[int]]] = [
"service_uuid": "00008551-0000-1000-8000-00805f9b34fb",
"connectable": False,
},
{
"domain": "govee_ble",
"manufacturer_id": 53579,
"service_uuid": "00008151-0000-1000-8000-00805f9b34fb",
"connectable": False,
},
{
"domain": "govee_ble",
"manufacturer_id": 43682,
@@ -4412,6 +4412,12 @@
"iot_class": "local_polling",
"name": "Thinking Cleaner"
},
"third_reality": {
"name": "Third Reality",
"iot_standards": [
"zigbee"
]
},
"thomson": {
"config_flow": false,
"iot_class": "local_polling",
+2 -2
View File
@@ -12,12 +12,12 @@ awesomeversion==22.9.0
bcrypt==3.1.7
bleak-retry-connector==2.1.3
bleak==0.18.1
bluetooth-adapters==0.5.2
bluetooth-adapters==0.5.3
bluetooth-auto-recovery==0.3.3
certifi>=2021.5.30
ciso8601==2.2.0
cryptography==38.0.1
dbus-fast==1.17.0
dbus-fast==1.18.0
fnvhash==0.1.0
hass-nabucasa==0.56.0
home-assistant-bluetooth==1.3.0
+24
View File
@@ -1,6 +1,8 @@
"""Distance util functions."""
from __future__ import annotations
from collections.abc import Callable
from homeassistant.const import ( # pylint: disable=unused-import # noqa: F401
LENGTH,
LENGTH_CENTIMETERS,
@@ -19,6 +21,28 @@ from .unit_conversion import DistanceConverter
VALID_UNITS = DistanceConverter.VALID_UNITS
TO_METERS: dict[str, Callable[[float], float]] = {
LENGTH_METERS: lambda meters: meters,
LENGTH_MILES: lambda miles: miles * 1609.344,
LENGTH_YARD: lambda yards: yards * 0.9144,
LENGTH_FEET: lambda feet: feet * 0.3048,
LENGTH_INCHES: lambda inches: inches * 0.0254,
LENGTH_KILOMETERS: lambda kilometers: kilometers * 1000,
LENGTH_CENTIMETERS: lambda centimeters: centimeters * 0.01,
LENGTH_MILLIMETERS: lambda millimeters: millimeters * 0.001,
}
METERS_TO: dict[str, Callable[[float], float]] = {
LENGTH_METERS: lambda meters: meters,
LENGTH_MILES: lambda meters: meters * 0.000621371,
LENGTH_YARD: lambda meters: meters * 1.09361,
LENGTH_FEET: lambda meters: meters * 3.28084,
LENGTH_INCHES: lambda meters: meters * 39.3701,
LENGTH_KILOMETERS: lambda meters: meters * 0.001,
LENGTH_CENTIMETERS: lambda meters: meters * 100,
LENGTH_MILLIMETERS: lambda meters: meters * 1000,
}
def convert(value: float, from_unit: str, to_unit: str) -> float:
"""Convert one unit of measurement to another."""
+1 -1
View File
@@ -26,7 +26,7 @@ from .unit_conversion import ( # pylint: disable=unused-import # noqa: F401
)
# pylint: disable-next=protected-access
UNIT_CONVERSION = SpeedConverter._UNIT_CONVERSION
UNIT_CONVERSION: dict[str, float] = SpeedConverter._UNIT_CONVERSION
VALID_UNITS = SpeedConverter.VALID_UNITS
+12 -12
View File
@@ -297,19 +297,19 @@ class TemperatureConverter(BaseUnitConverter):
if from_unit == TEMP_CELSIUS:
if to_unit == TEMP_FAHRENHEIT:
return cls.celsius_to_fahrenheit(value, interval)
return cls._celsius_to_fahrenheit(value, interval)
if to_unit == TEMP_KELVIN:
return cls.celsius_to_kelvin(value, interval)
return cls._celsius_to_kelvin(value, interval)
raise HomeAssistantError(
UNIT_NOT_RECOGNIZED_TEMPLATE.format(to_unit, cls.UNIT_CLASS)
)
if from_unit == TEMP_FAHRENHEIT:
if to_unit == TEMP_CELSIUS:
return cls.fahrenheit_to_celsius(value, interval)
return cls._fahrenheit_to_celsius(value, interval)
if to_unit == TEMP_KELVIN:
return cls.celsius_to_kelvin(
cls.fahrenheit_to_celsius(value, interval), interval
return cls._celsius_to_kelvin(
cls._fahrenheit_to_celsius(value, interval), interval
)
raise HomeAssistantError(
UNIT_NOT_RECOGNIZED_TEMPLATE.format(to_unit, cls.UNIT_CLASS)
@@ -317,10 +317,10 @@ class TemperatureConverter(BaseUnitConverter):
if from_unit == TEMP_KELVIN:
if to_unit == TEMP_CELSIUS:
return cls.kelvin_to_celsius(value, interval)
return cls._kelvin_to_celsius(value, interval)
if to_unit == TEMP_FAHRENHEIT:
return cls.celsius_to_fahrenheit(
cls.kelvin_to_celsius(value, interval), interval
return cls._celsius_to_fahrenheit(
cls._kelvin_to_celsius(value, interval), interval
)
raise HomeAssistantError(
UNIT_NOT_RECOGNIZED_TEMPLATE.format(to_unit, cls.UNIT_CLASS)
@@ -330,28 +330,28 @@ class TemperatureConverter(BaseUnitConverter):
)
@classmethod
def fahrenheit_to_celsius(cls, fahrenheit: float, interval: bool = False) -> float:
def _fahrenheit_to_celsius(cls, fahrenheit: float, interval: bool = False) -> float:
"""Convert a temperature in Fahrenheit to Celsius."""
if interval:
return fahrenheit / 1.8
return (fahrenheit - 32.0) / 1.8
@classmethod
def kelvin_to_celsius(cls, kelvin: float, interval: bool = False) -> float:
def _kelvin_to_celsius(cls, kelvin: float, interval: bool = False) -> float:
"""Convert a temperature in Kelvin to Celsius."""
if interval:
return kelvin
return kelvin - 273.15
@classmethod
def celsius_to_fahrenheit(cls, celsius: float, interval: bool = False) -> float:
def _celsius_to_fahrenheit(cls, celsius: float, interval: bool = False) -> float:
"""Convert a temperature in Celsius to Fahrenheit."""
if interval:
return celsius * 1.8
return celsius * 1.8 + 32.0
@classmethod
def celsius_to_kelvin(cls, celsius: float, interval: bool = False) -> float:
def _celsius_to_kelvin(cls, celsius: float, interval: bool = False) -> float:
"""Convert a temperature in Celsius to Kelvin."""
if interval:
return celsius
-2
View File
@@ -15,8 +15,6 @@ from homeassistant.helpers.frame import report
from .unit_conversion import VolumeConverter
# pylint: disable-next=protected-access
UNIT_CONVERSION = VolumeConverter._UNIT_CONVERSION
VALID_UNITS = VolumeConverter.VALID_UNITS
+1 -1
View File
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2022.10.0b1"
version = "2022.10.0b3"
license = {text = "Apache-2.0"}
description = "Open-source home automation platform running on Python 3."
readme = "README.rst"
+8 -8
View File
@@ -156,7 +156,7 @@ aioecowitt==2022.09.3
aioemonitor==1.0.5
# homeassistant.components.esphome
aioesphomeapi==10.14.0
aioesphomeapi==11.0.0
# homeassistant.components.flo
aioflo==2021.11.0
@@ -229,7 +229,7 @@ aioopenexchangerates==0.4.0
aiopulse==0.4.3
# homeassistant.components.hunterdouglas_powerview
aiopvapi==2.0.1
aiopvapi==2.0.2
# homeassistant.components.pvpc_hourly_pricing
aiopvpc==3.0.0
@@ -285,7 +285,7 @@ aiovlc==0.1.0
aiowatttime==0.1.1
# homeassistant.components.webostv
aiowebostv==0.2.0
aiowebostv==0.2.1
# homeassistant.components.yandex_transport
aioymaps==1.2.2
@@ -435,7 +435,7 @@ bluemaestro-ble==0.2.0
# bluepy==1.3.0
# homeassistant.components.bluetooth
bluetooth-adapters==0.5.2
bluetooth-adapters==0.5.3
# homeassistant.components.bluetooth
bluetooth-auto-recovery==0.3.3
@@ -540,7 +540,7 @@ datadog==0.15.0
datapoint==0.9.8
# homeassistant.components.bluetooth
dbus-fast==1.17.0
dbus-fast==1.18.0
# homeassistant.components.debugpy
debugpy==1.6.3
@@ -783,7 +783,7 @@ googlemaps==2.5.1
goslide-api==0.5.1
# homeassistant.components.govee_ble
govee-ble==0.19.0
govee-ble==0.19.1
# homeassistant.components.remote_rpi_gpio
gpiozero==1.6.2
@@ -898,7 +898,7 @@ iammeter==0.1.7
iaqualink==0.4.1
# homeassistant.components.ibeacon
ibeacon_ble==0.7.1
ibeacon_ble==0.7.3
# homeassistant.components.watson_tts
ibm-watson==5.2.2
@@ -2577,7 +2577,7 @@ yeelight==0.7.10
yeelightsunflower==0.0.10
# homeassistant.components.yolink
yolink-api==0.0.9
yolink-api==0.1.0
# homeassistant.components.youless
youless-api==0.16
+8 -8
View File
@@ -143,7 +143,7 @@ aioecowitt==2022.09.3
aioemonitor==1.0.5
# homeassistant.components.esphome
aioesphomeapi==10.14.0
aioesphomeapi==11.0.0
# homeassistant.components.flo
aioflo==2021.11.0
@@ -204,7 +204,7 @@ aioopenexchangerates==0.4.0
aiopulse==0.4.3
# homeassistant.components.hunterdouglas_powerview
aiopvapi==2.0.1
aiopvapi==2.0.2
# homeassistant.components.pvpc_hourly_pricing
aiopvpc==3.0.0
@@ -260,7 +260,7 @@ aiovlc==0.1.0
aiowatttime==0.1.1
# homeassistant.components.webostv
aiowebostv==0.2.0
aiowebostv==0.2.1
# homeassistant.components.yandex_transport
aioymaps==1.2.2
@@ -349,7 +349,7 @@ blinkpy==0.19.2
bluemaestro-ble==0.2.0
# homeassistant.components.bluetooth
bluetooth-adapters==0.5.2
bluetooth-adapters==0.5.3
# homeassistant.components.bluetooth
bluetooth-auto-recovery==0.3.3
@@ -420,7 +420,7 @@ datadog==0.15.0
datapoint==0.9.8
# homeassistant.components.bluetooth
dbus-fast==1.17.0
dbus-fast==1.18.0
# homeassistant.components.debugpy
debugpy==1.6.3
@@ -587,7 +587,7 @@ google-nest-sdm==2.0.0
googlemaps==2.5.1
# homeassistant.components.govee_ble
govee-ble==0.19.0
govee-ble==0.19.1
# homeassistant.components.gree
greeclimate==1.3.0
@@ -669,7 +669,7 @@ hyperion-py==0.7.5
iaqualink==0.4.1
# homeassistant.components.ibeacon
ibeacon_ble==0.7.1
ibeacon_ble==0.7.3
# homeassistant.components.ping
icmplib==3.0
@@ -1784,7 +1784,7 @@ yalexs==1.2.4
yeelight==0.7.10
# homeassistant.components.yolink
yolink-api==0.0.9
yolink-api==0.1.0
# homeassistant.components.youless
youless-api==0.16
+2 -1
View File
@@ -11,6 +11,7 @@ from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_DEVICE_CLASS,
ATTR_ENTITY_ID,
ATTR_ICON,
ATTR_UNIT_OF_MEASUREMENT,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
PERCENTAGE,
@@ -37,7 +38,7 @@ async def test_sensor(hass, aioclient_mock):
assert state.state == "7"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "CAQI"
assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.AQI
assert state.attributes.get(ATTR_ICON) == "mdi:air-filter"
entry = registry.async_get("sensor.home_caqi")
assert entry
+2 -2
View File
@@ -63,21 +63,21 @@ async def test_demo_statistics(hass, recorder_mock):
list_statistic_ids, hass
)
assert {
"display_unit_of_measurement": "°C",
"has_mean": True,
"has_sum": False,
"name": "Outdoor temperature",
"source": "demo",
"state_unit_of_measurement": "°C",
"statistic_id": "demo:temperature_outdoor",
"statistics_unit_of_measurement": "°C",
"unit_class": "temperature",
} in statistic_ids
assert {
"display_unit_of_measurement": "kWh",
"has_mean": False,
"has_sum": True,
"name": "Energy consumption 1",
"source": "demo",
"state_unit_of_measurement": "kWh",
"statistic_id": "demo:energy_consumption_kwh",
"statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
@@ -4,7 +4,11 @@ from http import HTTPStatus
from unittest.mock import patch
from homeassistant.components import media_source, spotify
from homeassistant.components.forked_daapd.browse_media import create_media_content_id
from homeassistant.components.forked_daapd.browse_media import (
MediaContent,
create_media_content_id,
is_owntone_media_content_id,
)
from homeassistant.components.media_player import BrowseMedia, MediaClass, MediaType
from homeassistant.components.spotify.const import (
MEDIA_PLAYER_PREFIX as SPOTIFY_MEDIA_PLAYER_PREFIX,
@@ -111,6 +115,16 @@ async def test_async_browse_media(hass, hass_ws_client, config_entry):
"length_ms": 2951554,
"uri": "library:artist:3815427709949443149",
},
{
"id": "456",
"name": "Spotify Artist",
"name_sort": "Spotify Artist",
"album_count": 1,
"track_count": 10,
"length_ms": 2254,
"uri": "spotify:artist:abc123",
"data_kind": "spotify",
},
]
mock_api.return_value.get_genres.return_value = [
{"name": "Classical"},
@@ -127,6 +141,13 @@ async def test_async_browse_media(hass, hass_ws_client, config_entry):
"smart_playlist": False,
"uri": "library:playlist:1",
},
{
"id": 2,
"name": "Spotify Playlist",
"path": "spotify:playlist:abc123",
"smart_playlist": False,
"uri": "library:playlist:2",
},
]
# Request browse root through WebSocket
@@ -150,6 +171,11 @@ async def test_async_browse_media(hass, hass_ws_client, config_entry):
"""Browse the children of this BrowseMedia."""
nonlocal msg_id
for child in children:
# Assert Spotify content is not passed through as Owntone media
assert not (
is_owntone_media_content_id(child["media_content_id"])
and "Spotify" in MediaContent(child["media_content_id"]).title
)
if child["can_expand"]:
await client.send_json(
{
@@ -312,3 +312,66 @@ async def test_reauth_abort(
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result.get("type") == "abort"
assert result.get("reason") == "open_spreadsheet_failure"
async def test_already_configured(
hass: HomeAssistant,
hass_client_no_auth,
aioclient_mock,
current_request_with_host,
setup_credentials,
mock_client,
) -> None:
"""Test case where config flow discovers unique id was already configured."""
config_entry = MockConfigEntry(
domain=DOMAIN,
unique_id=SHEET_ID,
data={
"token": {
"access_token": "mock-access-token",
},
},
)
config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
"google_sheets", context={"source": config_entries.SOURCE_USER}
)
state = config_entry_oauth2_flow._encode_jwt(
hass,
{
"flow_id": result["flow_id"],
"redirect_uri": "https://example.com/auth/external/callback",
},
)
assert result["url"] == (
f"{oauth2client.GOOGLE_AUTH_URI}?response_type=code&client_id={CLIENT_ID}"
"&redirect_uri=https://example.com/auth/external/callback"
f"&state={state}&scope=https://www.googleapis.com/auth/drive.file"
"&access_type=offline&prompt=consent"
)
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
# Prepare fake client library response when creating the sheet
mock_create = Mock()
mock_create.return_value.id = SHEET_ID
mock_client.return_value.create = mock_create
aioclient_mock.post(
oauth2client.GOOGLE_TOKEN_URI,
json={
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
},
)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result.get("type") == "abort"
assert result.get("reason") == "already_configured"
+85 -10
View File
@@ -14,6 +14,7 @@ from homeassistant.components.application_credentials import (
from homeassistant.components.google_sheets import DOMAIN
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ServiceNotFound
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry
@@ -75,15 +76,6 @@ async def mock_setup_integration(
yield func
# Verify clean unload
entries = hass.config_entries.async_entries(DOMAIN)
assert len(entries) == 1
await hass.config_entries.async_unload(entries[0].entry_id)
await hass.async_block_till_done()
assert not hass.data.get(DOMAIN)
assert entries[0].state is ConfigEntryState.NOT_LOADED
async def test_setup_success(
hass: HomeAssistant, setup_integration: ComponentSetup
@@ -95,6 +87,13 @@ async def test_setup_success(
assert len(entries) == 1
assert entries[0].state is ConfigEntryState.LOADED
await hass.config_entries.async_unload(entries[0].entry_id)
await hass.async_block_till_done()
assert not hass.data.get(DOMAIN)
assert entries[0].state is ConfigEntryState.NOT_LOADED
assert not len(hass.services.async_services().get(DOMAIN, {}))
@pytest.mark.parametrize(
"scopes",
@@ -193,7 +192,7 @@ async def test_append_sheet(
setup_integration: ComponentSetup,
config_entry: MockConfigEntry,
) -> None:
"""Test successful setup and unload."""
"""Test service call appending to a sheet."""
await setup_integration()
entries = hass.config_entries.async_entries(DOMAIN)
@@ -212,3 +211,79 @@ async def test_append_sheet(
blocking=True,
)
assert len(mock_client.mock_calls) == 8
async def test_append_sheet_invalid_config_entry(
hass: HomeAssistant,
setup_integration: ComponentSetup,
config_entry: MockConfigEntry,
expires_at: int,
scopes: list[str],
) -> None:
"""Test service call with invalid config entries."""
config_entry2 = MockConfigEntry(
domain=DOMAIN,
unique_id=TEST_SHEET_ID + "2",
data={
"auth_implementation": DOMAIN,
"token": {
"access_token": "mock-access-token",
"refresh_token": "mock-refresh-token",
"expires_at": expires_at,
"scope": " ".join(scopes),
},
},
)
config_entry2.add_to_hass(hass)
await setup_integration()
assert config_entry.state is ConfigEntryState.LOADED
assert config_entry2.state is ConfigEntryState.LOADED
# Exercise service call on a config entry that does not exist
with pytest.raises(ValueError, match="Invalid config entry"):
await hass.services.async_call(
DOMAIN,
"append_sheet",
{
"config_entry": config_entry.entry_id + "XXX",
"worksheet": "Sheet1",
"data": {"foo": "bar"},
},
blocking=True,
)
# Unload the config entry invoke the service on the unloaded entry id
await hass.config_entries.async_unload(config_entry2.entry_id)
await hass.async_block_till_done()
assert config_entry2.state is ConfigEntryState.NOT_LOADED
with pytest.raises(ValueError, match="Config entry not loaded"):
await hass.services.async_call(
DOMAIN,
"append_sheet",
{
"config_entry": config_entry2.entry_id,
"worksheet": "Sheet1",
"data": {"foo": "bar"},
},
blocking=True,
)
# Unloading the other config entry will de-register the service
await hass.config_entries.async_unload(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state is ConfigEntryState.NOT_LOADED
with pytest.raises(ServiceNotFound):
await hass.services.async_call(
DOMAIN,
"append_sheet",
{
"config_entry": config_entry.entry_id,
"worksheet": "Sheet1",
"data": {"foo": "bar"},
},
blocking=True,
)
+77 -1
View File
@@ -35,7 +35,7 @@ from homeassistant.components.homekit.const import (
from homeassistant.components.homekit.type_triggers import DeviceTriggerAccessory
from homeassistant.components.homekit.util import get_persist_fullpath_for_entry_id
from homeassistant.components.sensor import SensorDeviceClass
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_ZEROCONF
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_DEVICE_ID,
@@ -1394,6 +1394,82 @@ async def test_yaml_updates_update_config_entry_for_name(hass, mock_async_zeroco
mock_homekit().async_start.assert_called()
async def test_yaml_can_link_with_default_name(hass, mock_async_zeroconf):
"""Test async_setup with imported config linked by default name."""
entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_IMPORT,
data={},
options={},
)
entry.add_to_hass(hass)
with patch(f"{PATH_HOMEKIT}.HomeKit") as mock_homekit, patch(
"homeassistant.components.network.async_get_source_ip", return_value="1.2.3.4"
):
mock_homekit.return_value = homekit = Mock()
type(homekit).async_start = AsyncMock()
assert await async_setup_component(
hass,
"homekit",
{"homekit": {"entity_config": {"camera.back_camera": {"stream_count": 3}}}},
)
await hass.async_block_till_done()
mock_homekit.reset_mock()
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert entry.options["entity_config"]["camera.back_camera"]["stream_count"] == 3
async def test_yaml_can_link_with_port(hass, mock_async_zeroconf):
"""Test async_setup with imported config linked by port."""
entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_IMPORT,
data={"name": "random", "port": 12345},
options={},
)
entry.add_to_hass(hass)
entry2 = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_IMPORT,
data={"name": "random", "port": 12346},
options={},
)
entry2.add_to_hass(hass)
entry3 = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_ZEROCONF,
data={"name": "random", "port": 12347},
options={},
)
entry3.add_to_hass(hass)
with patch(f"{PATH_HOMEKIT}.HomeKit") as mock_homekit, patch(
"homeassistant.components.network.async_get_source_ip", return_value="1.2.3.4"
):
mock_homekit.return_value = homekit = Mock()
type(homekit).async_start = AsyncMock()
assert await async_setup_component(
hass,
"homekit",
{
"homekit": {
"port": 12345,
"entity_config": {"camera.back_camera": {"stream_count": 3}},
}
},
)
await hass.async_block_till_done()
mock_homekit.reset_mock()
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert entry.options["entity_config"]["camera.back_camera"]["stream_count"] == 3
assert entry2.options == {}
assert entry3.options == {}
async def test_homekit_uses_system_zeroconf(hass, hk_driver, mock_async_zeroconf):
"""Test HomeKit uses system zeroconf."""
entry = MockConfigEntry(
+1 -1
View File
@@ -609,7 +609,7 @@ async def test_sensor_entity_total_liters(
assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.TOTAL_INCREASING
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == VOLUME_CUBIC_METERS
assert ATTR_DEVICE_CLASS not in state.attributes
assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.VOLUME
assert state.attributes.get(ATTR_ICON) == "mdi:gauge"
@@ -127,3 +127,71 @@ async def test_ignore_default_name(hass):
)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids()) == before_entity_count
async def test_rotating_major_minor_and_mac(hass):
"""Test the different uuid, major, minor from many addresses removes all associated entities."""
entry = MockConfigEntry(
domain=DOMAIN,
)
entry.add_to_hass(hass)
before_entity_count = len(hass.states.async_entity_ids("device_tracker"))
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
for i in range(100):
service_info = BluetoothServiceInfo(
name="BlueCharm_177999",
address=f"AA:BB:CC:DD:EE:{i:02X}",
rssi=-63,
service_data={},
manufacturer_data={
76: b"\x02\x15BlueCharmBeacons"
+ bytearray([i])
+ b"\xfe"
+ bytearray([i])
+ b"U\xc5"
},
service_uuids=[],
source="local",
)
inject_bluetooth_service_info(hass, service_info)
await hass.async_block_till_done()
await hass.async_block_till_done()
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids("device_tracker")) == before_entity_count
async def test_rotating_major_minor_and_mac_no_name(hass):
"""Test no-name devices with different uuid, major, minor from many addresses removes all associated entities."""
entry = MockConfigEntry(
domain=DOMAIN,
)
entry.add_to_hass(hass)
before_entity_count = len(hass.states.async_entity_ids("device_tracker"))
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
for i in range(51):
service_info = BluetoothServiceInfo(
name=f"AA:BB:CC:DD:EE:{i:02X}",
address=f"AA:BB:CC:DD:EE:{i:02X}",
rssi=-63,
service_data={},
manufacturer_data={
76: b"\x02\x15BlueCharmBeacons"
+ bytearray([i])
+ b"\xfe"
+ bytearray([i])
+ b"U\xc5"
},
service_uuids=[],
source="local",
)
inject_bluetooth_service_info(hass, service_info)
await hass.async_block_till_done()
await hass.async_block_till_done()
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids("device_tracker")) == before_entity_count
+70
View File
@@ -0,0 +1,70 @@
"""Test the ibeacon init."""
import pytest
from homeassistant.components.ibeacon.const import DOMAIN
from homeassistant.helpers import device_registry as dr
from homeassistant.setup import async_setup_component
from . import BLUECHARM_BEACON_SERVICE_INFO
from tests.common import MockConfigEntry
from tests.components.bluetooth import inject_bluetooth_service_info
@pytest.fixture(autouse=True)
def mock_bluetooth(enable_bluetooth):
"""Auto mock bluetooth."""
async def remove_device(ws_client, device_id, config_entry_id):
"""Remove config entry from a device."""
await ws_client.send_json(
{
"id": 5,
"type": "config/device_registry/remove_config_entry",
"config_entry_id": config_entry_id,
"device_id": device_id,
}
)
response = await ws_client.receive_json()
return response["success"]
async def test_device_remove_devices(hass, hass_ws_client):
"""Test we can only remove a device that no longer exists."""
entry = MockConfigEntry(
domain=DOMAIN,
)
entry.add_to_hass(hass)
assert await async_setup_component(hass, "config", {})
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
inject_bluetooth_service_info(hass, BLUECHARM_BEACON_SERVICE_INFO)
await hass.async_block_till_done()
device_registry = dr.async_get(hass)
device_entry = device_registry.async_get_device(
{
(
DOMAIN,
"426c7565-4368-6172-6d42-6561636f6e73_3838_4949_61DE521B-F0BF-9F44-64D4-75BBE1738105",
)
},
{},
)
assert (
await remove_device(await hass_ws_client(hass), device_entry.id, entry.entry_id)
is False
)
dead_device_entry = device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, "not_seen")},
)
assert (
await remove_device(
await hass_ws_client(hass), dead_device_entry.id, entry.entry_id
)
is True
)
+2 -1
View File
@@ -117,7 +117,8 @@ async def test_distance_sensor(
assert state
assert state.state == "15"
assert state.attributes[ATTR_ICON] == "mdi:ruler"
assert state.attributes[ATTR_DEVICE_CLASS] == SensorDeviceClass.DISTANCE
assert ATTR_ICON not in state.attributes
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == "cm"
+13 -13
View File
@@ -25,7 +25,7 @@ from tests.common import async_capture_events, async_fire_time_changed
async def test_setup_component_with_webhook(hass, config_entry, netatmo_auth):
"""Test setup with webhook."""
with selected_platforms(["camera"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -132,7 +132,7 @@ IMAGE_BYTES_FROM_STREAM = b"test stream image bytes"
async def test_camera_image_local(hass, config_entry, requests_mock, netatmo_auth):
"""Test retrieval or local camera image."""
with selected_platforms(["camera"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -158,7 +158,7 @@ async def test_camera_image_local(hass, config_entry, requests_mock, netatmo_aut
async def test_camera_image_vpn(hass, config_entry, requests_mock, netatmo_auth):
"""Test retrieval of remote camera image."""
with selected_platforms(["camera"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -182,7 +182,7 @@ async def test_camera_image_vpn(hass, config_entry, requests_mock, netatmo_auth)
async def test_service_set_person_away(hass, config_entry, netatmo_auth):
"""Test service to set person as away."""
with selected_platforms(["camera"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -219,7 +219,7 @@ async def test_service_set_person_away(hass, config_entry, netatmo_auth):
async def test_service_set_person_away_invalid_person(hass, config_entry, netatmo_auth):
"""Test service to set invalid person as away."""
with selected_platforms(["camera"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -247,7 +247,7 @@ async def test_service_set_persons_home_invalid_person(
):
"""Test service to set invalid persons as home."""
with selected_platforms(["camera"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -273,7 +273,7 @@ async def test_service_set_persons_home_invalid_person(
async def test_service_set_persons_home(hass, config_entry, netatmo_auth):
"""Test service to set persons as home."""
with selected_platforms(["camera"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -297,7 +297,7 @@ async def test_service_set_persons_home(hass, config_entry, netatmo_auth):
async def test_service_set_camera_light(hass, config_entry, netatmo_auth):
"""Test service to set the outdoor camera light mode."""
with selected_platforms(["camera"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -327,7 +327,7 @@ async def test_service_set_camera_light(hass, config_entry, netatmo_auth):
async def test_service_set_camera_light_invalid_type(hass, config_entry, netatmo_auth):
"""Test service to set the indoor camera light mode."""
with selected_platforms(["camera"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -377,7 +377,7 @@ async def test_camera_reconnect_webhook(hass, config_entry):
mock_auth.return_value.async_addwebhook.side_effect = AsyncMock()
mock_auth.return_value.async_dropwebhook.side_effect = AsyncMock()
mock_webhook.return_value = "https://example.com"
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -412,7 +412,7 @@ async def test_camera_reconnect_webhook(hass, config_entry):
async def test_webhook_person_event(hass, config_entry, netatmo_auth):
"""Test that person events are handled."""
with selected_platforms(["camera"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -469,7 +469,7 @@ async def test_setup_component_no_devices(hass, config_entry):
mock_auth.return_value.async_addwebhook.side_effect = AsyncMock()
mock_auth.return_value.async_dropwebhook.side_effect = AsyncMock()
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert fake_post_hits == 9
@@ -508,7 +508,7 @@ async def test_camera_image_raises_exception(hass, config_entry, requests_mock):
mock_auth.return_value.async_addwebhook.side_effect = AsyncMock()
mock_auth.return_value.async_dropwebhook.side_effect = AsyncMock()
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
camera_entity_indoor = "camera.hall"
+12 -12
View File
@@ -27,7 +27,7 @@ from .common import selected_platforms, simulate_webhook
async def test_webhook_event_handling_thermostats(hass, config_entry, netatmo_auth):
"""Test service and webhook event handling with thermostats."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -204,7 +204,7 @@ async def test_service_preset_mode_frost_guard_thermostat(
):
"""Test service with frost guard preset for thermostats."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -277,7 +277,7 @@ async def test_service_preset_mode_frost_guard_thermostat(
async def test_service_preset_modes_thermostat(hass, config_entry, netatmo_auth):
"""Test service with preset modes for thermostats."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -356,7 +356,7 @@ async def test_service_preset_modes_thermostat(hass, config_entry, netatmo_auth)
async def test_webhook_event_handling_no_data(hass, config_entry, netatmo_auth):
"""Test service and webhook event handling with erroneous data."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -405,7 +405,7 @@ async def test_webhook_event_handling_no_data(hass, config_entry, netatmo_auth):
async def test_service_schedule_thermostats(hass, config_entry, caplog, netatmo_auth):
"""Test service for selecting Netatmo schedule with thermostats."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -458,7 +458,7 @@ async def test_service_preset_mode_already_boost_valves(
):
"""Test service with boost preset for valves when already in boost mode."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -536,7 +536,7 @@ async def test_service_preset_mode_already_boost_valves(
async def test_service_preset_mode_boost_valves(hass, config_entry, netatmo_auth):
"""Test service with boost preset for valves."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -586,7 +586,7 @@ async def test_service_preset_mode_boost_valves(hass, config_entry, netatmo_auth
async def test_service_preset_mode_invalid(hass, config_entry, caplog, netatmo_auth):
"""Test service with invalid preset."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -604,7 +604,7 @@ async def test_service_preset_mode_invalid(hass, config_entry, caplog, netatmo_a
async def test_valves_service_turn_off(hass, config_entry, netatmo_auth):
"""Test service turn off for valves."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -654,7 +654,7 @@ async def test_valves_service_turn_off(hass, config_entry, netatmo_auth):
async def test_valves_service_turn_on(hass, config_entry, netatmo_auth):
"""Test service turn on for valves."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -699,7 +699,7 @@ async def test_valves_service_turn_on(hass, config_entry, netatmo_auth):
async def test_webhook_home_id_mismatch(hass, config_entry, netatmo_auth):
"""Test service turn on for valves."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -737,7 +737,7 @@ async def test_webhook_home_id_mismatch(hass, config_entry, netatmo_auth):
async def test_webhook_set_point(hass, config_entry, netatmo_auth):
"""Test service turn on for valves."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
+1 -1
View File
@@ -17,7 +17,7 @@ from .common import selected_platforms
async def test_cover_setup_and_services(hass, config_entry, netatmo_auth):
"""Test setup and services."""
with selected_platforms(["cover"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
+1 -1
View File
@@ -121,7 +121,7 @@ async def test_setup_component_with_config(hass, config_entry):
async def test_setup_component_with_webhook(hass, config_entry, netatmo_auth):
"""Test setup and teardown of the netatmo component with webhook registration."""
with selected_platforms(["camera", "climate", "light", "sensor"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
+3 -3
View File
@@ -17,7 +17,7 @@ from tests.test_util.aiohttp import AiohttpClientMockResponse
async def test_camera_light_setup_and_services(hass, config_entry, netatmo_auth):
"""Test camera ligiht setup and services."""
with selected_platforms(["light"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -108,7 +108,7 @@ async def test_setup_component_no_devices(hass, config_entry):
mock_auth.return_value.async_addwebhook.side_effect = AsyncMock()
mock_auth.return_value.async_dropwebhook.side_effect = AsyncMock()
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
# Fake webhook activation
@@ -126,7 +126,7 @@ async def test_setup_component_no_devices(hass, config_entry):
async def test_light_setup_and_services(hass, config_entry, netatmo_auth):
"""Test setup and services."""
with selected_platforms(["light"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
+1 -1
View File
@@ -14,7 +14,7 @@ from .common import selected_platforms, simulate_webhook
async def test_select_schedule_thermostats(hass, config_entry, caplog, netatmo_auth):
"""Test service for selecting Netatmo schedule with thermostats."""
with selected_platforms(["climate", "select"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
+4 -4
View File
@@ -12,7 +12,7 @@ from .common import TEST_TIME, selected_platforms
async def test_weather_sensor(hass, config_entry, netatmo_auth):
"""Test weather sensor setup."""
with patch("time.time", return_value=TEST_TIME), selected_platforms(["sensor"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -27,7 +27,7 @@ async def test_weather_sensor(hass, config_entry, netatmo_auth):
async def test_public_weather_sensor(hass, config_entry, netatmo_auth):
"""Test public weather sensor setup."""
with patch("time.time", return_value=TEST_TIME), selected_platforms(["sensor"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -182,7 +182,7 @@ async def test_weather_sensor_enabling(
suggested_object_id=name,
disabled_by=None,
)
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
@@ -195,7 +195,7 @@ async def test_climate_battery_sensor(hass, config_entry, netatmo_auth):
with patch("time.time", return_value=TEST_TIME), selected_platforms(
["sensor", "climate"]
):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
+1 -1
View File
@@ -14,7 +14,7 @@ from .common import selected_platforms
async def test_switch_setup_and_services(hass, config_entry, netatmo_auth):
"""Test setup and services."""
with selected_platforms(["switch"]):
await hass.config_entries.async_setup(config_entry.entry_id)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
+3 -3
View File
@@ -525,12 +525,12 @@ async def test_import_statistics(
statistic_ids = list_statistic_ids(hass)
assert statistic_ids == [
{
"display_unit_of_measurement": "kWh",
"has_mean": False,
"has_sum": True,
"statistic_id": statistic_id,
"name": "Total imported energy",
"source": source,
"state_unit_of_measurement": "kWh",
"statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
}
@@ -621,12 +621,12 @@ async def test_import_statistics(
statistic_ids = list_statistic_ids(hass)
assert statistic_ids == [
{
"display_unit_of_measurement": "MWh",
"has_mean": False,
"has_sum": True,
"statistic_id": statistic_id,
"name": "Total imported energy renamed",
"source": source,
"state_unit_of_measurement": "MWh",
"statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
}
@@ -682,7 +682,7 @@ async def test_import_statistics(
"statistic_id": statistic_id,
"start_time": period2.isoformat(),
"adjustment": 1000.0,
"display_unit": "MWh",
"adjustment_unit_of_measurement": "MWh",
}
)
response = await client.receive_json()
+23 -23
View File
@@ -651,7 +651,7 @@ async def test_list_statistic_ids(
"has_sum": has_sum,
"name": None,
"source": "recorder",
"display_unit_of_measurement": display_unit,
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}
@@ -673,7 +673,7 @@ async def test_list_statistic_ids(
"has_sum": has_sum,
"name": None,
"source": "recorder",
"display_unit_of_measurement": display_unit,
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}
@@ -698,7 +698,7 @@ async def test_list_statistic_ids(
"has_sum": has_sum,
"name": None,
"source": "recorder",
"display_unit_of_measurement": display_unit,
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}
@@ -719,7 +719,7 @@ async def test_list_statistic_ids(
"has_sum": has_sum,
"name": None,
"source": "recorder",
"display_unit_of_measurement": display_unit,
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}
@@ -903,11 +903,11 @@ async def test_update_statistics_metadata(
assert response["result"] == [
{
"statistic_id": "sensor.test",
"display_unit_of_measurement": "kW",
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": "kW",
"statistics_unit_of_measurement": "kW",
"unit_class": None,
}
@@ -931,11 +931,11 @@ async def test_update_statistics_metadata(
assert response["result"] == [
{
"statistic_id": "sensor.test",
"display_unit_of_measurement": "kW",
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": "kW",
"statistics_unit_of_measurement": new_unit,
"unit_class": new_unit_class,
}
@@ -995,11 +995,11 @@ async def test_change_statistics_unit(hass, hass_ws_client, recorder_mock):
assert response["result"] == [
{
"statistic_id": "sensor.test",
"display_unit_of_measurement": "kW",
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": "kW",
"statistics_unit_of_measurement": "kW",
"unit_class": None,
}
@@ -1051,11 +1051,11 @@ async def test_change_statistics_unit(hass, hass_ws_client, recorder_mock):
assert response["result"] == [
{
"statistic_id": "sensor.test",
"display_unit_of_measurement": "kW",
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": "kW",
"statistics_unit_of_measurement": "W",
"unit_class": "power",
}
@@ -1104,11 +1104,11 @@ async def test_change_statistics_unit_errors(
expected_statistic_ids = [
{
"statistic_id": "sensor.test",
"display_unit_of_measurement": "kW",
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": "kW",
"statistics_unit_of_measurement": "kW",
"unit_class": None,
}
@@ -1483,11 +1483,11 @@ async def test_get_statistics_metadata(
assert response["result"] == [
{
"statistic_id": "test:total_gas",
"display_unit_of_measurement": unit,
"has_mean": has_mean,
"has_sum": has_sum,
"name": "Total imported energy",
"source": "test",
"state_unit_of_measurement": unit,
"statistics_unit_of_measurement": unit,
"unit_class": unit_class,
}
@@ -1511,11 +1511,11 @@ async def test_get_statistics_metadata(
assert response["result"] == [
{
"statistic_id": "sensor.test",
"display_unit_of_measurement": attributes["unit_of_measurement"],
"has_mean": has_mean,
"has_sum": has_sum,
"name": None,
"source": "recorder",
"state_unit_of_measurement": attributes["unit_of_measurement"],
"statistics_unit_of_measurement": unit,
"unit_class": unit_class,
}
@@ -1539,11 +1539,11 @@ async def test_get_statistics_metadata(
assert response["result"] == [
{
"statistic_id": "sensor.test",
"display_unit_of_measurement": attributes["unit_of_measurement"],
"has_mean": has_mean,
"has_sum": has_sum,
"name": None,
"source": "recorder",
"state_unit_of_measurement": attributes["unit_of_measurement"],
"statistics_unit_of_measurement": unit,
"unit_class": unit_class,
}
@@ -1635,12 +1635,12 @@ async def test_import_statistics(
statistic_ids = list_statistic_ids(hass) # TODO
assert statistic_ids == [
{
"display_unit_of_measurement": "kWh",
"has_mean": False,
"has_sum": True,
"statistic_id": statistic_id,
"name": "Total imported energy",
"source": source,
"state_unit_of_measurement": "kWh",
"statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
}
@@ -1864,12 +1864,12 @@ async def test_adjust_sum_statistics_energy(
statistic_ids = list_statistic_ids(hass) # TODO
assert statistic_ids == [
{
"display_unit_of_measurement": "kWh",
"has_mean": False,
"has_sum": True,
"statistic_id": statistic_id,
"name": "Total imported energy",
"source": source,
"state_unit_of_measurement": "kWh",
"statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
}
@@ -1898,7 +1898,7 @@ async def test_adjust_sum_statistics_energy(
"statistic_id": statistic_id,
"start_time": period2.isoformat(),
"adjustment": 1000.0,
"display_unit": "kWh",
"adjustment_unit_of_measurement": "kWh",
}
)
response = await client.receive_json()
@@ -1941,7 +1941,7 @@ async def test_adjust_sum_statistics_energy(
"statistic_id": statistic_id,
"start_time": period2.isoformat(),
"adjustment": 2.0,
"display_unit": "MWh",
"adjustment_unit_of_measurement": "MWh",
}
)
response = await client.receive_json()
@@ -2062,12 +2062,12 @@ async def test_adjust_sum_statistics_gas(
statistic_ids = list_statistic_ids(hass) # TODO
assert statistic_ids == [
{
"display_unit_of_measurement": "",
"has_mean": False,
"has_sum": True,
"statistic_id": statistic_id,
"name": "Total imported energy",
"source": source,
"state_unit_of_measurement": "",
"statistics_unit_of_measurement": "",
"unit_class": "volume",
}
@@ -2096,7 +2096,7 @@ async def test_adjust_sum_statistics_gas(
"statistic_id": statistic_id,
"start_time": period2.isoformat(),
"adjustment": 1000.0,
"display_unit": "",
"adjustment_unit_of_measurement": "",
}
)
response = await client.receive_json()
@@ -2139,7 +2139,7 @@ async def test_adjust_sum_statistics_gas(
"statistic_id": statistic_id,
"start_time": period2.isoformat(),
"adjustment": 35.3147, # ~1 m³
"display_unit": "ft³",
"adjustment_unit_of_measurement": "ft³",
}
)
response = await client.receive_json()
@@ -2276,12 +2276,12 @@ async def test_adjust_sum_statistics_errors(
statistic_ids = list_statistic_ids(hass)
assert statistic_ids == [
{
"display_unit_of_measurement": state_unit,
"has_mean": False,
"has_sum": True,
"statistic_id": statistic_id,
"name": "Total imported energy",
"source": source,
"state_unit_of_measurement": state_unit,
"statistics_unit_of_measurement": statistic_unit,
"unit_class": unit_class,
}
@@ -2311,7 +2311,7 @@ async def test_adjust_sum_statistics_errors(
"statistic_id": "sensor.does_not_exist",
"start_time": period2.isoformat(),
"adjustment": 1000.0,
"display_unit": statistic_unit,
"adjustment_unit_of_measurement": statistic_unit,
}
)
response = await client.receive_json()
@@ -2331,7 +2331,7 @@ async def test_adjust_sum_statistics_errors(
"statistic_id": statistic_id,
"start_time": period2.isoformat(),
"adjustment": 1000.0,
"display_unit": unit,
"adjustment_unit_of_measurement": unit,
}
)
response = await client.receive_json()
@@ -2351,7 +2351,7 @@ async def test_adjust_sum_statistics_errors(
"statistic_id": statistic_id,
"start_time": period2.isoformat(),
"adjustment": 1000.0,
"display_unit": unit,
"adjustment_unit_of_measurement": unit,
}
)
response = await client.receive_json()
+38 -38
View File
@@ -136,11 +136,11 @@ def test_compile_hourly_statistics(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": display_unit,
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}
@@ -210,12 +210,12 @@ def test_compile_hourly_statistics_purged_state_changes(
statistic_ids = list_statistic_ids(hass)
assert statistic_ids == [
{
"display_unit_of_measurement": display_unit,
"statistic_id": "sensor.test1",
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}
@@ -281,31 +281,31 @@ def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": "°C",
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": "°C",
"statistics_unit_of_measurement": "°C",
"unit_class": "temperature",
},
{
"statistic_id": "sensor.test6",
"display_unit_of_measurement": "°C",
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": "°C",
"statistics_unit_of_measurement": "°C",
"unit_class": "temperature",
},
{
"statistic_id": "sensor.test7",
"display_unit_of_measurement": "°C",
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": "°C",
"statistics_unit_of_measurement": "°C",
"unit_class": "temperature",
},
@@ -436,11 +436,11 @@ async def test_compile_hourly_sum_statistics_amount(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": display_unit,
"has_mean": False,
"has_sum": True,
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}
@@ -516,7 +516,7 @@ async def test_compile_hourly_sum_statistics_amount(
"statistic_id": "sensor.test1",
"start_time": period1.isoformat(),
"adjustment": 100.0,
"display_unit": display_unit,
"adjustment_unit_of_measurement": display_unit,
}
)
response = await client.receive_json()
@@ -536,7 +536,7 @@ async def test_compile_hourly_sum_statistics_amount(
"statistic_id": "sensor.test1",
"start_time": period2.isoformat(),
"adjustment": -400.0,
"display_unit": display_unit,
"adjustment_unit_of_measurement": display_unit,
}
)
response = await client.receive_json()
@@ -629,11 +629,11 @@ def test_compile_hourly_sum_statistics_amount_reset_every_state_change(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": display_unit,
"has_mean": False,
"has_sum": True,
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}
@@ -730,11 +730,11 @@ def test_compile_hourly_sum_statistics_amount_invalid_last_reset(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": display_unit,
"has_mean": False,
"has_sum": True,
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}
@@ -815,11 +815,11 @@ def test_compile_hourly_sum_statistics_nan_inf_state(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": display_unit,
"has_mean": False,
"has_sum": True,
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}
@@ -929,11 +929,11 @@ def test_compile_hourly_sum_statistics_negative_state(
wait_recording_done(hass)
statistic_ids = list_statistic_ids(hass)
assert {
"name": None,
"display_unit_of_measurement": display_unit,
"has_mean": False,
"has_sum": True,
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistic_id": entity_id,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
@@ -1018,11 +1018,11 @@ def test_compile_hourly_sum_statistics_total_no_reset(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": display_unit,
"has_mean": False,
"has_sum": True,
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}
@@ -1121,11 +1121,11 @@ def test_compile_hourly_sum_statistics_total_increasing(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": display_unit,
"has_mean": False,
"has_sum": True,
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}
@@ -1235,11 +1235,11 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": display_unit,
"has_mean": False,
"has_sum": True,
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}
@@ -1330,11 +1330,11 @@ def test_compile_hourly_energy_statistics_unsupported(hass_recorder, caplog):
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": "kWh",
"has_mean": False,
"has_sum": True,
"name": None,
"source": "recorder",
"state_unit_of_measurement": "kWh",
"statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
}
@@ -1423,31 +1423,31 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": "kWh",
"has_mean": False,
"has_sum": True,
"name": None,
"source": "recorder",
"state_unit_of_measurement": "kWh",
"statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
},
{
"statistic_id": "sensor.test2",
"display_unit_of_measurement": "kWh",
"has_mean": False,
"has_sum": True,
"name": None,
"source": "recorder",
"state_unit_of_measurement": "kWh",
"statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
},
{
"statistic_id": "sensor.test3",
"display_unit_of_measurement": "Wh",
"has_mean": False,
"has_sum": True,
"name": None,
"source": "recorder",
"state_unit_of_measurement": "Wh",
"statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
},
@@ -1807,11 +1807,11 @@ def test_list_statistic_ids(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": display_unit,
"has_mean": statistic_type == "mean",
"has_sum": statistic_type == "sum",
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
},
@@ -1822,11 +1822,11 @@ def test_list_statistic_ids(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": display_unit,
"has_mean": statistic_type == "mean",
"has_sum": statistic_type == "sum",
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
},
@@ -1913,11 +1913,11 @@ def test_compile_hourly_statistics_changing_units_1(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": display_unit,
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
},
@@ -1949,11 +1949,11 @@ def test_compile_hourly_statistics_changing_units_1(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": display_unit,
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
},
@@ -2025,11 +2025,11 @@ def test_compile_hourly_statistics_changing_units_2(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": "cats",
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": "cats",
"statistics_unit_of_measurement": "cats",
"unit_class": unit_class,
},
@@ -2091,11 +2091,11 @@ def test_compile_hourly_statistics_changing_units_3(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": display_unit,
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
},
@@ -2127,11 +2127,11 @@ def test_compile_hourly_statistics_changing_units_3(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": display_unit,
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
},
@@ -2193,11 +2193,11 @@ def test_compile_hourly_statistics_changing_device_class_1(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": state_unit,
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": state_unit,
"statistics_unit_of_measurement": state_unit,
"unit_class": unit_class,
},
@@ -2239,11 +2239,11 @@ def test_compile_hourly_statistics_changing_device_class_1(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": state_unit,
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": state_unit,
"statistics_unit_of_measurement": state_unit,
"unit_class": unit_class,
},
@@ -2302,11 +2302,11 @@ def test_compile_hourly_statistics_changing_device_class_1(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": state_unit,
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": state_unit,
"statistics_unit_of_measurement": state_unit,
"unit_class": unit_class,
},
@@ -2382,11 +2382,11 @@ def test_compile_hourly_statistics_changing_device_class_2(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": display_unit,
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistic_unit,
"unit_class": unit_class,
},
@@ -2432,11 +2432,11 @@ def test_compile_hourly_statistics_changing_device_class_2(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": display_unit,
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistic_unit,
"unit_class": unit_class,
},
@@ -2502,11 +2502,11 @@ def test_compile_hourly_statistics_changing_statistics(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": None,
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": None,
"statistics_unit_of_measurement": None,
"unit_class": None,
},
@@ -2539,11 +2539,11 @@ def test_compile_hourly_statistics_changing_statistics(
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": None,
"has_mean": False,
"has_sum": True,
"name": None,
"source": "recorder",
"state_unit_of_measurement": None,
"statistics_unit_of_measurement": None,
"unit_class": None,
},
@@ -2734,41 +2734,41 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": "%",
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": "%",
"statistics_unit_of_measurement": "%",
"unit_class": None,
},
{
"statistic_id": "sensor.test2",
"display_unit_of_measurement": "%",
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": "%",
"statistics_unit_of_measurement": "%",
"unit_class": None,
},
{
"statistic_id": "sensor.test3",
"display_unit_of_measurement": "%",
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"state_unit_of_measurement": "%",
"statistics_unit_of_measurement": "%",
"unit_class": None,
},
{
"statistic_id": "sensor.test4",
"display_unit_of_measurement": "EUR",
"has_mean": False,
"has_sum": True,
"name": None,
"source": "recorder",
"state_unit_of_measurement": "EUR",
"statistics_unit_of_measurement": "EUR",
"unit_class": None,
},
+5 -6
View File
@@ -1,7 +1,6 @@
"""Tests for Shelly update platform."""
from homeassistant.components.shelly.const import DOMAIN
from homeassistant.components.update import DOMAIN as UPDATE_DOMAIN
from homeassistant.components.update.const import SERVICE_INSTALL
from homeassistant.components.update import DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL
from homeassistant.const import ATTR_ENTITY_ID, STATE_ON, STATE_UNKNOWN
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_component import async_update_entity
@@ -16,8 +15,8 @@ async def test_block_update(hass: HomeAssistant, coap_wrapper, monkeypatch):
entity_registry.async_get_or_create(
UPDATE_DOMAIN,
DOMAIN,
"test_name_update",
suggested_object_id="test_name_update",
"test-mac-fwupdate",
suggested_object_id="test_name_firmware_update",
disabled_by=None,
)
hass.async_create_task(
@@ -62,8 +61,8 @@ async def test_rpc_update(hass: HomeAssistant, rpc_wrapper, monkeypatch):
entity_registry.async_get_or_create(
UPDATE_DOMAIN,
DOMAIN,
"test_name_update",
suggested_object_id="test_name_update",
"12345678-sys-fwupdate",
suggested_object_id="test_name_firmware_update",
disabled_by=None,
)
@@ -711,19 +711,18 @@ async def test_browse_media_eventthumb(
@freeze_time("2022-09-15 03:00:00-07:00")
async def test_browse_media_day(
hass: HomeAssistant, ufp: MockUFPFixture, doorbell: Camera, fixed_now: datetime
hass: HomeAssistant, ufp: MockUFPFixture, doorbell: Camera
):
"""Test browsing day selector level media."""
start = datetime.fromisoformat("2022-09-03 03:00:00-07:00")
end = datetime.fromisoformat("2022-09-15 03:00:00-07:00")
ufp.api.bootstrap._recording_start = dt_util.as_utc(start)
ufp.api.get_bootstrap = AsyncMock(return_value=ufp.api.bootstrap)
await init_entry(hass, ufp, [doorbell], regenerate_ids=False)
base_id = (
f"test_id:browse:{doorbell.id}:all:range:{fixed_now.year}:{fixed_now.month}"
)
base_id = f"test_id:browse:{doorbell.id}:all:range:{end.year}:{end.month}"
source = await async_get_media_source(hass)
media_item = MediaSourceItem(hass, DOMAIN, base_id, None)
@@ -731,7 +730,7 @@ async def test_browse_media_day(
assert (
browse.title
== f"UnifiProtect > {doorbell.name} > All Events > {fixed_now.strftime('%B %Y')}"
== f"UnifiProtect > {doorbell.name} > All Events > {end.strftime('%B %Y')}"
)
assert browse.identifier == base_id
assert len(browse.children) == 14
+5
View File
@@ -327,6 +327,7 @@ async def test_zeroconf_match_macaddress(hass, mock_async_zeroconf):
assert len(mock_service_browser.mock_calls) == 1
assert len(mock_config_flow.mock_calls) == 1
assert mock_config_flow.mock_calls[0][1][0] == "shelly"
assert mock_config_flow.mock_calls[0][2]["context"] == {"source": "zeroconf"}
async def test_zeroconf_match_manufacturer(hass, mock_async_zeroconf):
@@ -533,6 +534,10 @@ async def test_homekit_match_partial_space(hass, mock_async_zeroconf):
# One for HKC, and one for LIFX since lifx is local polling
assert len(mock_config_flow.mock_calls) == 2
assert mock_config_flow.mock_calls[0][1][0] == "lifx"
assert mock_config_flow.mock_calls[1][2]["context"] == {
"source": "zeroconf",
"alternative_domain": "lifx",
}
async def test_homekit_match_partial_dash(hass, mock_async_zeroconf):