forked from home-assistant/core
Compare commits
46 Commits
2023.3.0b3
...
2023.3.0b7
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6c73b9024b | ||
|
|
59a9ace171 | ||
|
|
e751948bc8 | ||
|
|
702646427d | ||
|
|
8a605b1377 | ||
|
|
8eb8415d3f | ||
|
|
9f3f71d0c3 | ||
|
|
b82da9418d | ||
|
|
38cf725075 | ||
|
|
04cedab8d4 | ||
|
|
2238a3f201 | ||
|
|
f58ca17926 | ||
|
|
d5e517b874 | ||
|
|
f9eeb4f4d8 | ||
|
|
86d5e4aaa8 | ||
|
|
a56935ed7c | ||
|
|
fc56c958c3 | ||
|
|
a8e1dc8962 | ||
|
|
32b138b6c6 | ||
|
|
2112c66804 | ||
|
|
72c0526d87 | ||
|
|
9ed4e01e94 | ||
|
|
dcf1ecfeb5 | ||
|
|
b72224ceff | ||
|
|
96ad5c9666 | ||
|
|
00b59c142a | ||
|
|
b054c81e13 | ||
|
|
b0cbcad440 | ||
|
|
bafe552af6 | ||
|
|
d399855e50 | ||
|
|
d26f430766 | ||
|
|
f2e4943a53 | ||
|
|
6512cd901f | ||
|
|
fbe1524f6c | ||
|
|
95e337277c | ||
|
|
1503674bd6 | ||
|
|
ab6bd75b70 | ||
|
|
2fff836bd4 | ||
|
|
d8850758f1 | ||
|
|
0449856064 | ||
|
|
e48089e0c9 | ||
|
|
a7e081f70d | ||
|
|
fe181425d8 | ||
|
|
8c7b29db25 | ||
|
|
aaa5bb9f86 | ||
|
|
5b78e0c4ff |
5
homeassistant/brands/heltun.json
Normal file
5
homeassistant/brands/heltun.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "heltun",
|
||||
"name": "HELTUN",
|
||||
"iot_standards": ["zwave"]
|
||||
}
|
||||
@@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==1.2.7", "yalexs_ble==2.0.3"]
|
||||
"requirements": ["yalexs==1.2.7", "yalexs_ble==2.0.4"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aurora",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["auroranoaa"],
|
||||
"requirements": ["auroranoaa==0.0.2"]
|
||||
"requirements": ["auroranoaa==0.0.3"]
|
||||
}
|
||||
|
||||
@@ -106,6 +106,8 @@ class ActiveBluetoothDataUpdateCoordinator(
|
||||
|
||||
def needs_poll(self, service_info: BluetoothServiceInfoBleak) -> bool:
|
||||
"""Return true if time to try and poll."""
|
||||
if self.hass.is_stopping:
|
||||
return False
|
||||
poll_age: float | None = None
|
||||
if self._last_poll:
|
||||
poll_age = monotonic_time_coarse() - self._last_poll
|
||||
|
||||
@@ -99,6 +99,8 @@ class ActiveBluetoothProcessorCoordinator(
|
||||
|
||||
def needs_poll(self, service_info: BluetoothServiceInfoBleak) -> bool:
|
||||
"""Return true if time to try and poll."""
|
||||
if self.hass.is_stopping:
|
||||
return False
|
||||
poll_age: float | None = None
|
||||
if self._last_poll:
|
||||
poll_age = monotonic_time_coarse() - self._last_poll
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/caldav",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["caldav", "vobject"],
|
||||
"requirements": ["caldav==1.1.1"]
|
||||
"requirements": ["caldav==1.2.0"]
|
||||
}
|
||||
|
||||
@@ -257,9 +257,9 @@ class DefaultAgent(AbstractConversationAgent):
|
||||
# This is available in the response template as "state".
|
||||
state1: core.State | None = None
|
||||
if intent_response.matched_states:
|
||||
state1 = intent_response.matched_states[0]
|
||||
state1 = matched[0]
|
||||
elif intent_response.unmatched_states:
|
||||
state1 = intent_response.unmatched_states[0]
|
||||
state1 = unmatched[0]
|
||||
|
||||
# Render response template
|
||||
speech = response_template.async_render(
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==1.0.5", "home-assistant-intents==2023.2.22"]
|
||||
"requirements": ["hassil==1.0.6", "home-assistant-intents==2023.2.28"]
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
from .const import CONF_ASSOCIATION_DATA, DOMAIN, UPDATE_SECONDS
|
||||
from .models import DormakabaDkeyData
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.LOCK, Platform.SENSOR]
|
||||
PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.LOCK, Platform.SENSOR]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -132,7 +132,8 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
association_data = await lock.associate(user_input["activation_code"])
|
||||
except BleakError:
|
||||
except BleakError as err:
|
||||
_LOGGER.warning("BleakError", exc_info=err)
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except dkey_errors.InvalidActivationCode:
|
||||
errors["base"] = "invalid_code"
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/dormakaba_dkey",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["py-dormakaba-dkey==1.0.2"]
|
||||
"requirements": ["py-dormakaba-dkey==1.0.3"]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from random import randint
|
||||
|
||||
from enturclient import EnturPublicTransportData
|
||||
import voluptuous as vol
|
||||
@@ -22,7 +23,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import Throttle
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
API_CLIENT_NAME = "homeassistant-homeassistant"
|
||||
API_CLIENT_NAME = "homeassistant-{}"
|
||||
|
||||
CONF_STOP_IDS = "stop_ids"
|
||||
CONF_EXPAND_PLATFORMS = "expand_platforms"
|
||||
@@ -105,7 +106,7 @@ async def async_setup_platform(
|
||||
quays = [s for s in stop_ids if "Quay" in s]
|
||||
|
||||
data = EnturPublicTransportData(
|
||||
API_CLIENT_NAME,
|
||||
API_CLIENT_NAME.format(str(randint(100000, 999999))),
|
||||
stops=stops,
|
||||
quays=quays,
|
||||
line_whitelist=line_whitelist,
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env_canada==0.5.28"]
|
||||
"requirements": ["env_canada==0.5.29"]
|
||||
}
|
||||
|
||||
@@ -87,14 +87,23 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass, STARTUP_SCAN_TIMEOUT
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_start_background_discovery(*_: Any) -> None:
|
||||
"""Run discovery in the background."""
|
||||
hass.async_create_background_task(_async_discovery(), "flux_led-discovery")
|
||||
|
||||
async def _async_discovery(*_: Any) -> None:
|
||||
async_trigger_discovery(
|
||||
hass, await async_discover_devices(hass, DISCOVER_SCAN_TIMEOUT)
|
||||
)
|
||||
|
||||
async_trigger_discovery(hass, domain_data[FLUX_LED_DISCOVERY])
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, _async_discovery)
|
||||
async_track_time_interval(hass, _async_discovery, DISCOVERY_INTERVAL)
|
||||
hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STARTED, _async_start_background_discovery
|
||||
)
|
||||
async_track_time_interval(
|
||||
hass, _async_start_background_discovery, DISCOVERY_INTERVAL
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20230224.0"]
|
||||
"requirements": ["home-assistant-frontend==20230227.0"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/garages_amsterdam",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["odp-amsterdam==5.0.1"]
|
||||
"requirements": ["odp-amsterdam==5.1.0"]
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"domain": "hassio",
|
||||
"name": "Home Assistant Supervisor",
|
||||
"after_dependencies": ["panel_custom"],
|
||||
"codeowners": ["@home-assistant/supervisor"],
|
||||
"dependencies": ["http"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/hassio",
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/honeywell",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["somecomfort"],
|
||||
"requirements": ["aiosomecomfort==0.0.8"]
|
||||
"requirements": ["aiosomecomfort==0.0.10"]
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ from collections.abc import Awaitable, Callable
|
||||
import logging
|
||||
import re
|
||||
from typing import Final
|
||||
from urllib.parse import unquote
|
||||
|
||||
from aiohttp.web import Application, HTTPBadRequest, Request, StreamResponse, middleware
|
||||
|
||||
@@ -39,18 +40,24 @@ FILTERS: Final = re.compile(
|
||||
def setup_security_filter(app: Application) -> None:
|
||||
"""Create security filter middleware for the app."""
|
||||
|
||||
def _recursive_unquote(value: str) -> str:
|
||||
"""Handle values that are encoded multiple times."""
|
||||
if (unquoted := unquote(value)) != value:
|
||||
unquoted = _recursive_unquote(unquoted)
|
||||
return unquoted
|
||||
|
||||
@middleware
|
||||
async def security_filter_middleware(
|
||||
request: Request, handler: Callable[[Request], Awaitable[StreamResponse]]
|
||||
) -> StreamResponse:
|
||||
"""Process request and tblock commonly known exploit attempts."""
|
||||
if FILTERS.search(request.path):
|
||||
"""Process request and block commonly known exploit attempts."""
|
||||
if FILTERS.search(_recursive_unquote(request.path)):
|
||||
_LOGGER.warning(
|
||||
"Filtered a potential harmful request to: %s", request.raw_path
|
||||
)
|
||||
raise HTTPBadRequest
|
||||
|
||||
if FILTERS.search(request.query_string):
|
||||
if FILTERS.search(_recursive_unquote(request.query_string)):
|
||||
_LOGGER.warning(
|
||||
"Filtered a request with a potential harmful query string: %s",
|
||||
request.raw_path,
|
||||
|
||||
@@ -35,6 +35,7 @@ TRIGGER_TYPE = {
|
||||
"remote_double_button_long_press": "both {subtype} released after long press",
|
||||
"remote_double_button_short_press": "both {subtype} released",
|
||||
"initial_press": "{subtype} pressed initially",
|
||||
"long_press": "{subtype} long press",
|
||||
"repeat": "{subtype} held down",
|
||||
"short_release": "{subtype} released after short press",
|
||||
"long_release": "{subtype} released after long press",
|
||||
|
||||
@@ -11,6 +11,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohue"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiohue==4.6.1"],
|
||||
"requirements": ["aiohue==4.6.2"],
|
||||
"zeroconf": ["_hue._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -118,13 +118,14 @@ class HueSceneEntityBase(HueBaseEntity, SceneEntity):
|
||||
"""Return device (service) info."""
|
||||
# we create a virtual service/device for Hue scenes
|
||||
# so we have a parent for grouped lights and scenes
|
||||
group_type = self.group.type.value.title()
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, self.group.id)},
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
name=self.group.metadata.name,
|
||||
manufacturer=self.bridge.api.config.bridge_device.product_data.manufacturer_name,
|
||||
model=self.group.type.value.title(),
|
||||
suggested_area=self.group.metadata.name,
|
||||
suggested_area=self.group.metadata.name if group_type == "Room" else None,
|
||||
via_device=(DOMAIN, self.bridge.api.config.bridge_device.id),
|
||||
)
|
||||
|
||||
|
||||
@@ -46,6 +46,7 @@ DEFAULT_BUTTON_EVENT_TYPES = (
|
||||
ButtonEvent.INITIAL_PRESS,
|
||||
ButtonEvent.REPEAT,
|
||||
ButtonEvent.SHORT_RELEASE,
|
||||
ButtonEvent.LONG_PRESS,
|
||||
ButtonEvent.LONG_RELEASE,
|
||||
)
|
||||
|
||||
|
||||
@@ -55,7 +55,13 @@ class HueBaseEntity(Entity):
|
||||
self._attr_unique_id = resource.id
|
||||
# device is precreated in main handler
|
||||
# this attaches the entity to the precreated device
|
||||
if self.device is not None:
|
||||
if self.device is None:
|
||||
# attach all device-less entities to the bridge itself
|
||||
# e.g. config based sensors like entertainment area
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, bridge.api.config.bridge.bridge_id)},
|
||||
)
|
||||
else:
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self.device.id)},
|
||||
)
|
||||
@@ -137,17 +143,14 @@ class HueBaseEntity(Entity):
|
||||
def _handle_event(self, event_type: EventType, resource: HueResource) -> None:
|
||||
"""Handle status event for this resource (or it's parent)."""
|
||||
if event_type == EventType.RESOURCE_DELETED:
|
||||
# remove any services created for zones/rooms
|
||||
# handle removal of room and zone 'virtual' devices/services
|
||||
# regular devices are removed automatically by the logic in device.py.
|
||||
if resource.type in (ResourceTypes.ROOM, ResourceTypes.ZONE):
|
||||
dev_reg = async_get_device_registry(self.hass)
|
||||
if device := dev_reg.async_get_device({(DOMAIN, resource.id)}):
|
||||
dev_reg.async_remove_device(device.id)
|
||||
if resource.type in (
|
||||
ResourceTypes.GROUPED_LIGHT,
|
||||
ResourceTypes.SCENE,
|
||||
ResourceTypes.SMART_SCENE,
|
||||
):
|
||||
# cleanup entities that are not strictly device-bound and have the bridge as parent
|
||||
if self.device is None:
|
||||
ent_reg = async_get_entity_registry(self.hass)
|
||||
ent_reg.async_remove(self.entity_id)
|
||||
return
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyinsteon", "pypubsub"],
|
||||
"requirements": [
|
||||
"pyinsteon==1.3.2",
|
||||
"pyinsteon==1.3.3",
|
||||
"insteon-frontend-home-assistant==0.3.2"
|
||||
],
|
||||
"usb": [
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["xknx"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["xknx==2.5.0"]
|
||||
"requirements": ["xknx==2.6.0"]
|
||||
}
|
||||
|
||||
@@ -33,6 +33,7 @@ from homeassistant.helpers.config_validation import ( # noqa: F401
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.service import remove_entity_service_fields
|
||||
from homeassistant.helpers.typing import ConfigType, StateType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -92,7 +93,7 @@ async def _async_lock(entity: LockEntity, service_call: ServiceCall) -> None:
|
||||
raise ValueError(
|
||||
f"Code '{code}' for locking {entity.entity_id} doesn't match pattern {entity.code_format}"
|
||||
)
|
||||
await entity.async_lock(**service_call.data)
|
||||
await entity.async_lock(**remove_entity_service_fields(service_call))
|
||||
|
||||
|
||||
async def _async_unlock(entity: LockEntity, service_call: ServiceCall) -> None:
|
||||
@@ -102,7 +103,7 @@ async def _async_unlock(entity: LockEntity, service_call: ServiceCall) -> None:
|
||||
raise ValueError(
|
||||
f"Code '{code}' for unlocking {entity.entity_id} doesn't match pattern {entity.code_format}"
|
||||
)
|
||||
await entity.async_unlock(**service_call.data)
|
||||
await entity.async_unlock(**remove_entity_service_fields(service_call))
|
||||
|
||||
|
||||
async def _async_open(entity: LockEntity, service_call: ServiceCall) -> None:
|
||||
@@ -112,7 +113,7 @@ async def _async_open(entity: LockEntity, service_call: ServiceCall) -> None:
|
||||
raise ValueError(
|
||||
f"Code '{code}' for opening {entity.entity_id} doesn't match pattern {entity.code_format}"
|
||||
)
|
||||
await entity.async_open(**service_call.data)
|
||||
await entity.async_open(**remove_entity_service_fields(service_call))
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
@@ -8,11 +8,11 @@ from datetime import timedelta
|
||||
from functools import cached_property
|
||||
from typing import Any, Generic, TypeVar
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
from nibe.connection import Connection
|
||||
from nibe.connection.modbus import Modbus
|
||||
from nibe.connection.nibegw import NibeGW, ProductInfo
|
||||
from nibe.exceptions import CoilNotFoundException, CoilReadException
|
||||
from nibe.exceptions import CoilNotFoundException, ReadException
|
||||
from nibe.heatpump import HeatPump, Model, Series
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -182,7 +182,7 @@ class ContextCoordinator(
|
||||
return release_update
|
||||
|
||||
|
||||
class Coordinator(ContextCoordinator[dict[int, Coil], int]):
|
||||
class Coordinator(ContextCoordinator[dict[int, CoilData], int]):
|
||||
"""Update coordinator for nibe heat pumps."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
@@ -199,17 +199,18 @@ class Coordinator(ContextCoordinator[dict[int, Coil], int]):
|
||||
)
|
||||
|
||||
self.data = {}
|
||||
self.seed: dict[int, Coil] = {}
|
||||
self.seed: dict[int, CoilData] = {}
|
||||
self.connection = connection
|
||||
self.heatpump = heatpump
|
||||
self.task: asyncio.Task | None = None
|
||||
|
||||
heatpump.subscribe(heatpump.COIL_UPDATE_EVENT, self._on_coil_update)
|
||||
|
||||
def _on_coil_update(self, coil: Coil):
|
||||
def _on_coil_update(self, data: CoilData):
|
||||
"""Handle callback on coil updates."""
|
||||
self.data[coil.address] = coil
|
||||
self.seed[coil.address] = coil
|
||||
coil = data.coil
|
||||
self.data[coil.address] = data
|
||||
self.seed[coil.address] = data
|
||||
self.async_update_context_listeners([coil.address])
|
||||
|
||||
@property
|
||||
@@ -246,26 +247,26 @@ class Coordinator(ContextCoordinator[dict[int, Coil], int]):
|
||||
|
||||
async def async_write_coil(self, coil: Coil, value: int | float | str) -> None:
|
||||
"""Write coil and update state."""
|
||||
coil.value = value
|
||||
coil = await self.connection.write_coil(coil)
|
||||
data = CoilData(coil, value)
|
||||
await self.connection.write_coil(data)
|
||||
|
||||
self.data[coil.address] = coil
|
||||
self.data[coil.address] = data
|
||||
|
||||
self.async_update_context_listeners([coil.address])
|
||||
|
||||
async def async_read_coil(self, coil: Coil) -> Coil:
|
||||
async def async_read_coil(self, coil: Coil) -> CoilData:
|
||||
"""Read coil and update state using callbacks."""
|
||||
return await self.connection.read_coil(coil)
|
||||
|
||||
async def _async_update_data(self) -> dict[int, Coil]:
|
||||
async def _async_update_data(self) -> dict[int, CoilData]:
|
||||
self.task = asyncio.current_task()
|
||||
try:
|
||||
return await self._async_update_data_internal()
|
||||
finally:
|
||||
self.task = None
|
||||
|
||||
async def _async_update_data_internal(self) -> dict[int, Coil]:
|
||||
result: dict[int, Coil] = {}
|
||||
async def _async_update_data_internal(self) -> dict[int, CoilData]:
|
||||
result: dict[int, CoilData] = {}
|
||||
|
||||
def _get_coils() -> Iterable[Coil]:
|
||||
for address in sorted(self.context_callbacks.keys()):
|
||||
@@ -282,10 +283,10 @@ class Coordinator(ContextCoordinator[dict[int, Coil], int]):
|
||||
yield coil
|
||||
|
||||
try:
|
||||
async for coil in self.connection.read_coils(_get_coils()):
|
||||
result[coil.address] = coil
|
||||
self.seed.pop(coil.address, None)
|
||||
except CoilReadException as exception:
|
||||
async for data in self.connection.read_coils(_get_coils()):
|
||||
result[data.coil.address] = data
|
||||
self.seed.pop(data.coil.address, None)
|
||||
except ReadException as exception:
|
||||
if not result:
|
||||
raise UpdateFailed(f"Failed to update: {exception}") from exception
|
||||
self.logger.debug(
|
||||
@@ -329,7 +330,7 @@ class CoilEntity(CoordinatorEntity[Coordinator]):
|
||||
self.coordinator.data or {}
|
||||
)
|
||||
|
||||
def _async_read_coil(self, coil: Coil):
|
||||
def _async_read_coil(self, data: CoilData):
|
||||
"""Update state of entity based on coil data."""
|
||||
|
||||
async def _async_write_coil(self, value: int | float | str):
|
||||
@@ -337,10 +338,9 @@ class CoilEntity(CoordinatorEntity[Coordinator]):
|
||||
await self.coordinator.async_write_coil(self._coil, value)
|
||||
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
coil = self.coordinator.data.get(self._coil.address)
|
||||
if coil is None:
|
||||
data = self.coordinator.data.get(self._coil.address)
|
||||
if data is None:
|
||||
return
|
||||
|
||||
self._coil = coil
|
||||
self._async_read_coil(coil)
|
||||
self._async_read_coil(data)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""The Nibe Heat Pump binary sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
|
||||
from homeassistant.components.binary_sensor import ENTITY_ID_FORMAT, BinarySensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -37,5 +37,5 @@ class BinarySensor(CoilEntity, BinarySensorEntity):
|
||||
"""Initialize entity."""
|
||||
super().__init__(coordinator, coil, ENTITY_ID_FORMAT)
|
||||
|
||||
def _async_read_coil(self, coil: Coil) -> None:
|
||||
self._attr_is_on = coil.value == "ON"
|
||||
def _async_read_coil(self, data: CoilData) -> None:
|
||||
self._attr_is_on = data.value == "ON"
|
||||
|
||||
@@ -8,10 +8,10 @@ from nibe.connection.nibegw import NibeGW
|
||||
from nibe.exceptions import (
|
||||
AddressInUseException,
|
||||
CoilNotFoundException,
|
||||
CoilReadException,
|
||||
CoilReadSendException,
|
||||
CoilWriteException,
|
||||
CoilWriteSendException,
|
||||
ReadException,
|
||||
ReadSendException,
|
||||
WriteException,
|
||||
)
|
||||
from nibe.heatpump import HeatPump, Model
|
||||
import voluptuous as vol
|
||||
@@ -108,13 +108,13 @@ async def validate_nibegw_input(
|
||||
|
||||
try:
|
||||
await connection.verify_connectivity()
|
||||
except (CoilReadSendException, CoilWriteSendException) as exception:
|
||||
except (ReadSendException, CoilWriteSendException) as exception:
|
||||
raise FieldError(str(exception), CONF_IP_ADDRESS, "address") from exception
|
||||
except CoilNotFoundException as exception:
|
||||
raise FieldError("Coils not found", "base", "model") from exception
|
||||
except CoilReadException as exception:
|
||||
except ReadException as exception:
|
||||
raise FieldError("Timeout on read from pump", "base", "read") from exception
|
||||
except CoilWriteException as exception:
|
||||
except WriteException as exception:
|
||||
raise FieldError("Timeout on writing to pump", "base", "write") from exception
|
||||
finally:
|
||||
await connection.stop()
|
||||
@@ -147,13 +147,13 @@ async def validate_modbus_input(
|
||||
|
||||
try:
|
||||
await connection.verify_connectivity()
|
||||
except (CoilReadSendException, CoilWriteSendException) as exception:
|
||||
except (ReadSendException, CoilWriteSendException) as exception:
|
||||
raise FieldError(str(exception), CONF_MODBUS_URL, "address") from exception
|
||||
except CoilNotFoundException as exception:
|
||||
raise FieldError("Coils not found", "base", "model") from exception
|
||||
except CoilReadException as exception:
|
||||
except ReadException as exception:
|
||||
raise FieldError("Timeout on read from pump", "base", "read") from exception
|
||||
except CoilWriteException as exception:
|
||||
except WriteException as exception:
|
||||
raise FieldError("Timeout on writing to pump", "base", "write") from exception
|
||||
finally:
|
||||
await connection.stop()
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/nibe_heatpump",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["nibe==1.6.0"]
|
||||
"requirements": ["nibe==2.0.0"]
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""The Nibe Heat Pump numbers."""
|
||||
from __future__ import annotations
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
|
||||
from homeassistant.components.number import ENTITY_ID_FORMAT, NumberEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -58,13 +58,13 @@ class Number(CoilEntity, NumberEntity):
|
||||
self._attr_native_unit_of_measurement = coil.unit
|
||||
self._attr_native_value = None
|
||||
|
||||
def _async_read_coil(self, coil: Coil) -> None:
|
||||
if coil.value is None:
|
||||
def _async_read_coil(self, data: CoilData) -> None:
|
||||
if data.value is None:
|
||||
self._attr_native_value = None
|
||||
return
|
||||
|
||||
try:
|
||||
self._attr_native_value = float(coil.value)
|
||||
self._attr_native_value = float(data.value)
|
||||
except ValueError:
|
||||
self._attr_native_value = None
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""The Nibe Heat Pump select."""
|
||||
from __future__ import annotations
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
|
||||
from homeassistant.components.select import ENTITY_ID_FORMAT, SelectEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -40,12 +40,12 @@ class Select(CoilEntity, SelectEntity):
|
||||
self._attr_options = list(coil.mappings.values())
|
||||
self._attr_current_option = None
|
||||
|
||||
def _async_read_coil(self, coil: Coil) -> None:
|
||||
if not isinstance(coil.value, str):
|
||||
def _async_read_coil(self, data: CoilData) -> None:
|
||||
if not isinstance(data.value, str):
|
||||
self._attr_current_option = None
|
||||
return
|
||||
|
||||
self._attr_current_option = coil.value
|
||||
self._attr_current_option = data.value
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Support writing value."""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""The Nibe Heat Pump sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
ENTITY_ID_FORMAT,
|
||||
@@ -146,5 +146,5 @@ class Sensor(CoilEntity, SensorEntity):
|
||||
self._attr_native_unit_of_measurement = coil.unit
|
||||
self._attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
|
||||
def _async_read_coil(self, coil: Coil):
|
||||
self._attr_native_value = coil.value
|
||||
def _async_read_coil(self, data: CoilData):
|
||||
self._attr_native_value = data.value
|
||||
|
||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
|
||||
from homeassistant.components.switch import ENTITY_ID_FORMAT, SwitchEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -40,8 +40,8 @@ class Switch(CoilEntity, SwitchEntity):
|
||||
super().__init__(coordinator, coil, ENTITY_ID_FORMAT)
|
||||
self._attr_is_on = None
|
||||
|
||||
def _async_read_coil(self, coil: Coil) -> None:
|
||||
self._attr_is_on = coil.value == "ON"
|
||||
def _async_read_coil(self, data: CoilData) -> None:
|
||||
self._attr_is_on = data.value == "ON"
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
|
||||
@@ -9,11 +9,14 @@ from typing import Any, Concatenate, ParamSpec, TypeVar
|
||||
|
||||
import aiohttp
|
||||
import python_otbr_api
|
||||
from python_otbr_api import tlv_parser
|
||||
from python_otbr_api.pskc import compute_pskc
|
||||
|
||||
from homeassistant.components.thread import async_add_dataset
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -23,6 +26,18 @@ from .const import DOMAIN
|
||||
_R = TypeVar("_R")
|
||||
_P = ParamSpec("_P")
|
||||
|
||||
INSECURE_NETWORK_KEYS = (
|
||||
# Thread web UI default
|
||||
bytes.fromhex("00112233445566778899AABBCCDDEEFF"),
|
||||
)
|
||||
|
||||
INSECURE_PASSPHRASES = (
|
||||
# Thread web UI default
|
||||
"j01Nme",
|
||||
# Thread documentation default
|
||||
"J01NME",
|
||||
)
|
||||
|
||||
|
||||
def _handle_otbr_error(
|
||||
func: Callable[Concatenate[OTBRData, _P], Coroutine[Any, Any, _R]]
|
||||
@@ -46,11 +61,23 @@ class OTBRData:
|
||||
url: str
|
||||
api: python_otbr_api.OTBR
|
||||
|
||||
@_handle_otbr_error
|
||||
async def set_enabled(self, enabled: bool) -> None:
|
||||
"""Enable or disable the router."""
|
||||
return await self.api.set_enabled(enabled)
|
||||
|
||||
@_handle_otbr_error
|
||||
async def get_active_dataset_tlvs(self) -> bytes | None:
|
||||
"""Get current active operational dataset in TLVS format, or None."""
|
||||
return await self.api.get_active_dataset_tlvs()
|
||||
|
||||
@_handle_otbr_error
|
||||
async def create_active_dataset(
|
||||
self, dataset: python_otbr_api.OperationalDataSet
|
||||
) -> None:
|
||||
"""Create an active operational dataset."""
|
||||
return await self.api.create_active_dataset(dataset)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Open Thread Border Router component."""
|
||||
@@ -58,21 +85,65 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def _warn_on_default_network_settings(
|
||||
hass: HomeAssistant, entry: ConfigEntry, dataset_tlvs: bytes
|
||||
) -> None:
|
||||
"""Warn user if insecure default network settings are used."""
|
||||
dataset = tlv_parser.parse_tlv(dataset_tlvs.hex())
|
||||
insecure = False
|
||||
|
||||
if (
|
||||
network_key := dataset.get(tlv_parser.MeshcopTLVType.NETWORKKEY)
|
||||
) is not None and bytes.fromhex(network_key) in INSECURE_NETWORK_KEYS:
|
||||
insecure = True
|
||||
if (
|
||||
not insecure
|
||||
and tlv_parser.MeshcopTLVType.EXTPANID in dataset
|
||||
and tlv_parser.MeshcopTLVType.NETWORKNAME in dataset
|
||||
and tlv_parser.MeshcopTLVType.PSKC in dataset
|
||||
):
|
||||
ext_pan_id = dataset[tlv_parser.MeshcopTLVType.EXTPANID]
|
||||
network_name = dataset[tlv_parser.MeshcopTLVType.NETWORKNAME]
|
||||
pskc = bytes.fromhex(dataset[tlv_parser.MeshcopTLVType.PSKC])
|
||||
for passphrase in INSECURE_PASSPHRASES:
|
||||
if pskc == compute_pskc(ext_pan_id, network_name, passphrase):
|
||||
insecure = True
|
||||
break
|
||||
|
||||
if insecure:
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"insecure_thread_network_{entry.entry_id}",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="insecure_thread_network",
|
||||
)
|
||||
else:
|
||||
ir.async_delete_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"insecure_thread_network_{entry.entry_id}",
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up an Open Thread Border Router config entry."""
|
||||
api = python_otbr_api.OTBR(entry.data["url"], async_get_clientsession(hass), 10)
|
||||
|
||||
otbrdata = OTBRData(entry.data["url"], api)
|
||||
try:
|
||||
dataset = await otbrdata.get_active_dataset_tlvs()
|
||||
dataset_tlvs = await otbrdata.get_active_dataset_tlvs()
|
||||
except (
|
||||
HomeAssistantError,
|
||||
aiohttp.ClientError,
|
||||
asyncio.TimeoutError,
|
||||
) as err:
|
||||
raise ConfigEntryNotReady("Unable to connect") from err
|
||||
if dataset:
|
||||
await async_add_dataset(hass, entry.title, dataset.hex())
|
||||
if dataset_tlvs:
|
||||
_warn_on_default_network_settings(hass, entry, dataset_tlvs)
|
||||
await async_add_dataset(hass, entry.title, dataset_tlvs.hex())
|
||||
|
||||
hass.data[DOMAIN] = otbrdata
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import logging
|
||||
|
||||
import aiohttp
|
||||
import python_otbr_api
|
||||
from python_otbr_api import tlv_parser
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.hassio import HassioServiceInfo
|
||||
@@ -15,7 +16,7 @@ from homeassistant.const import CONF_URL
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DEFAULT_CHANNEL, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -29,11 +30,26 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Connect to the OTBR and create a dataset if it doesn't have one."""
|
||||
api = python_otbr_api.OTBR(url, async_get_clientsession(self.hass), 10)
|
||||
if await api.get_active_dataset_tlvs() is None:
|
||||
if dataset := await async_get_preferred_dataset(self.hass):
|
||||
await api.set_active_dataset_tlvs(bytes.fromhex(dataset))
|
||||
# We currently have no way to know which channel zha is using, assume it's
|
||||
# the default
|
||||
zha_channel = DEFAULT_CHANNEL
|
||||
thread_dataset_channel = None
|
||||
thread_dataset_tlv = await async_get_preferred_dataset(self.hass)
|
||||
if thread_dataset_tlv:
|
||||
dataset = tlv_parser.parse_tlv(thread_dataset_tlv)
|
||||
if channel_str := dataset.get(tlv_parser.MeshcopTLVType.CHANNEL):
|
||||
thread_dataset_channel = int(channel_str, base=16)
|
||||
|
||||
if thread_dataset_tlv is not None and zha_channel == thread_dataset_channel:
|
||||
await api.set_active_dataset_tlvs(bytes.fromhex(thread_dataset_tlv))
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"not importing TLV with channel %s", thread_dataset_channel
|
||||
)
|
||||
await api.create_active_dataset(
|
||||
python_otbr_api.OperationalDataSet(network_name="home-assistant")
|
||||
python_otbr_api.OperationalDataSet(
|
||||
channel=zha_channel, network_name="home-assistant"
|
||||
)
|
||||
)
|
||||
await api.set_enabled(True)
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
"""Constants for the Open Thread Border Router integration."""
|
||||
|
||||
DOMAIN = "otbr"
|
||||
|
||||
DEFAULT_CHANNEL = 15
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/otbr",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["python-otbr-api==1.0.4"]
|
||||
"requirements": ["python-otbr-api==1.0.5"]
|
||||
}
|
||||
|
||||
@@ -12,7 +12,13 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"insecure_thread_network": {
|
||||
"title": "Insecure Thread network settings detected",
|
||||
"description": "Your Thread network is using a default network key or pass phrase.\n\nThis is a security risk, please create a new Thread network."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
"""Websocket API for OTBR."""
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import python_otbr_api
|
||||
|
||||
from homeassistant.components.websocket_api import (
|
||||
ActiveConnection,
|
||||
async_register_command,
|
||||
@@ -10,7 +12,7 @@ from homeassistant.components.websocket_api import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DEFAULT_CHANNEL, DOMAIN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import OTBRData
|
||||
@@ -20,6 +22,7 @@ if TYPE_CHECKING:
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the OTBR Websocket API."""
|
||||
async_register_command(hass, websocket_info)
|
||||
async_register_command(hass, websocket_create_network)
|
||||
|
||||
|
||||
@websocket_command(
|
||||
@@ -51,3 +54,48 @@ async def websocket_info(
|
||||
"active_dataset_tlvs": dataset.hex() if dataset else None,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_command(
|
||||
{
|
||||
"type": "otbr/create_network",
|
||||
}
|
||||
)
|
||||
@async_response
|
||||
async def websocket_create_network(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict
|
||||
) -> None:
|
||||
"""Create a new Thread network."""
|
||||
if DOMAIN not in hass.data:
|
||||
connection.send_error(msg["id"], "not_loaded", "No OTBR API loaded")
|
||||
return
|
||||
|
||||
# We currently have no way to know which channel zha is using, assume it's
|
||||
# the default
|
||||
zha_channel = DEFAULT_CHANNEL
|
||||
|
||||
data: OTBRData = hass.data[DOMAIN]
|
||||
|
||||
try:
|
||||
await data.set_enabled(False)
|
||||
except HomeAssistantError as exc:
|
||||
connection.send_error(msg["id"], "set_enabled_failed", str(exc))
|
||||
return
|
||||
|
||||
try:
|
||||
await data.create_active_dataset(
|
||||
python_otbr_api.OperationalDataSet(
|
||||
channel=zha_channel, network_name="home-assistant"
|
||||
)
|
||||
)
|
||||
except HomeAssistantError as exc:
|
||||
connection.send_error(msg["id"], "create_active_dataset_failed", str(exc))
|
||||
return
|
||||
|
||||
try:
|
||||
await data.set_enabled(True)
|
||||
except HomeAssistantError as exc:
|
||||
connection.send_error(msg["id"], "set_enabled_failed", str(exc))
|
||||
return
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
@@ -17,6 +17,7 @@ from homeassistant.const import (
|
||||
UnitOfPower,
|
||||
UnitOfPressure,
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
UnitOfVolume,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -303,9 +304,9 @@ SENSORS: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="gas_consumed_interval",
|
||||
name="Gas consumed interval",
|
||||
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||
device_class=SensorDeviceClass.GAS,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
icon="mdi:meter-gas",
|
||||
native_unit_of_measurement=f"{UnitOfVolume.CUBIC_METERS}/{UnitOfTime.HOURS}",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="gas_consumed_cumulative",
|
||||
|
||||
@@ -106,9 +106,10 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
)
|
||||
# Fetch initial data so we have data when entities subscribe
|
||||
try:
|
||||
# If camera WAN blocked, firmware check fails, do not prevent setup
|
||||
await asyncio.gather(
|
||||
device_coordinator.async_config_entry_first_refresh(),
|
||||
firmware_coordinator.async_config_entry_first_refresh(),
|
||||
firmware_coordinator.async_refresh(),
|
||||
)
|
||||
except ConfigEntryNotReady:
|
||||
await host.stop()
|
||||
|
||||
@@ -49,7 +49,7 @@ class ReolinkUpdateEntity(ReolinkBaseCoordinatorEntity, UpdateEntity):
|
||||
"""Initialize a Netgear device."""
|
||||
super().__init__(reolink_data, reolink_data.firmware_coordinator)
|
||||
|
||||
self._attr_unique_id = f"{self._host.unique_id}_update"
|
||||
self._attr_unique_id = f"{self._host.unique_id}"
|
||||
|
||||
@property
|
||||
def installed_version(self) -> str | None:
|
||||
|
||||
@@ -196,19 +196,30 @@ class SensorEntity(Entity):
|
||||
if self.unique_id is None or self.device_class is None:
|
||||
return
|
||||
registry = er.async_get(self.hass)
|
||||
|
||||
# Bail out if the entity is not yet registered
|
||||
if not (
|
||||
entity_id := registry.async_get_entity_id(
|
||||
platform.domain, platform.platform_name, self.unique_id
|
||||
)
|
||||
):
|
||||
# Prime _sensor_option_unit_of_measurement to ensure the correct unit
|
||||
# is stored in the entity registry.
|
||||
self._sensor_option_unit_of_measurement = self._get_initial_suggested_unit()
|
||||
return
|
||||
|
||||
registry_entry = registry.async_get(entity_id)
|
||||
assert registry_entry
|
||||
|
||||
# Prime _sensor_option_unit_of_measurement to ensure the correct unit
|
||||
# is stored in the entity registry.
|
||||
self.registry_entry = registry_entry
|
||||
self._async_read_entity_options()
|
||||
|
||||
# If the sensor has 'unit_of_measurement' in its sensor options, the user has
|
||||
# overridden the unit.
|
||||
# If the sensor has 'sensor.private' in its entity options, it was added after
|
||||
# automatic unit conversion was implemented.
|
||||
# If the sensor has 'sensor.private' in its entity options, it already has a
|
||||
# suggested_unit.
|
||||
registry_unit = registry_entry.unit_of_measurement
|
||||
if (
|
||||
(
|
||||
@@ -230,11 +241,14 @@ class SensorEntity(Entity):
|
||||
|
||||
# Set suggested_unit_of_measurement to the old unit to enable automatic
|
||||
# conversion
|
||||
registry.async_update_entity_options(
|
||||
self.registry_entry = registry.async_update_entity_options(
|
||||
entity_id,
|
||||
f"{DOMAIN}.private",
|
||||
{"suggested_unit_of_measurement": registry_unit},
|
||||
)
|
||||
# Update _sensor_option_unit_of_measurement to ensure the correct unit
|
||||
# is stored in the entity registry.
|
||||
self._async_read_entity_options()
|
||||
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Call when the sensor entity is added to hass."""
|
||||
@@ -305,12 +319,8 @@ class SensorEntity(Entity):
|
||||
|
||||
return None
|
||||
|
||||
def get_initial_entity_options(self) -> er.EntityOptionsType | None:
|
||||
"""Return initial entity options.
|
||||
|
||||
These will be stored in the entity registry the first time the entity is seen,
|
||||
and then never updated.
|
||||
"""
|
||||
def _get_initial_suggested_unit(self) -> str | UndefinedType:
|
||||
"""Return the initial unit."""
|
||||
# Unit suggested by the integration
|
||||
suggested_unit_of_measurement = self.suggested_unit_of_measurement
|
||||
|
||||
@@ -321,6 +331,19 @@ class SensorEntity(Entity):
|
||||
)
|
||||
|
||||
if suggested_unit_of_measurement is None:
|
||||
return UNDEFINED
|
||||
|
||||
return suggested_unit_of_measurement
|
||||
|
||||
def get_initial_entity_options(self) -> er.EntityOptionsType | None:
|
||||
"""Return initial entity options.
|
||||
|
||||
These will be stored in the entity registry the first time the entity is seen,
|
||||
and then never updated.
|
||||
"""
|
||||
suggested_unit_of_measurement = self._get_initial_suggested_unit()
|
||||
|
||||
if suggested_unit_of_measurement is UNDEFINED:
|
||||
return None
|
||||
|
||||
return {
|
||||
@@ -416,7 +439,7 @@ class SensorEntity(Entity):
|
||||
return self._sensor_option_unit_of_measurement
|
||||
|
||||
# Second priority, for non registered entities: unit suggested by integration
|
||||
if not self.registry_entry and self.suggested_unit_of_measurement:
|
||||
if not self.unique_id and self.suggested_unit_of_measurement:
|
||||
return self.suggested_unit_of_measurement
|
||||
|
||||
# Third priority: Legacy temperature conversion, which applies
|
||||
|
||||
@@ -21,7 +21,6 @@ set_climate_timer:
|
||||
description: Choose this or Overlay. Set the time period for the change if you want to be specific. Alternatively use Overlay
|
||||
required: false
|
||||
example: "01:30:00"
|
||||
default: "01:00:00"
|
||||
selector:
|
||||
text:
|
||||
requested_overlay:
|
||||
@@ -29,7 +28,6 @@ set_climate_timer:
|
||||
description: Choose this or Time Period. Allows you to choose an overlay. MANUAL:=Overlay until user removes; NEXT_TIME_BLOCK:=Overlay until next timeblock; TADO_DEFAULT:=Overlay based on tado app setting
|
||||
required: false
|
||||
example: "MANUAL"
|
||||
default: "TADO_DEFAULT"
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
|
||||
@@ -233,6 +233,9 @@ class CoverTemplate(TemplateEntity, CoverEntity):
|
||||
if not self._position_template:
|
||||
self._position = None
|
||||
|
||||
self._is_opening = False
|
||||
self._is_closing = False
|
||||
|
||||
@callback
|
||||
def _update_position(self, result):
|
||||
try:
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
"""Config flow for the Thread integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components import onboarding, zeroconf
|
||||
from homeassistant.config_entries import ConfigFlow
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
|
||||
@@ -13,16 +15,31 @@ class ThreadConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
) -> FlowResult:
|
||||
"""Set up because the user has border routers."""
|
||||
await self._async_handle_discovery_without_unique_id()
|
||||
return self.async_create_entry(title="Thread", data={})
|
||||
|
||||
async def async_step_import(
|
||||
self, import_data: dict[str, str] | None = None
|
||||
) -> FlowResult:
|
||||
"""Set up by import from async_setup."""
|
||||
await self._async_handle_discovery_without_unique_id()
|
||||
return self.async_create_entry(title="Thread", data={})
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> FlowResult:
|
||||
"""Set up by import from async_setup."""
|
||||
await self._async_handle_discovery_without_unique_id()
|
||||
return self.async_create_entry(title="Thread", data={})
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
) -> FlowResult:
|
||||
"""Set up because the user has border routers."""
|
||||
await self._async_handle_discovery_without_unique_id()
|
||||
return await self.async_step_confirm()
|
||||
|
||||
async def async_step_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Confirm the setup."""
|
||||
if user_input is not None or not onboarding.async_is_onboarded(self.hass):
|
||||
return self.async_create_entry(title="Thread", data={})
|
||||
return self.async_show_form(step_id="confirm")
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/thread",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["python-otbr-api==1.0.4", "pyroute2==0.7.5"],
|
||||
"requirements": ["python-otbr-api==1.0.5", "pyroute2==0.7.5"],
|
||||
"zeroconf": ["_meshcop._udp.local."]
|
||||
}
|
||||
|
||||
9
homeassistant/components/thread/strings.json
Normal file
9
homeassistant/components/thread/strings.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "[%key:common::config_flow::description::confirm_setup%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -14,6 +14,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr, issue_registry as ir
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
CONF_ALLOW_EA,
|
||||
@@ -40,10 +41,15 @@ _LOGGER = logging.getLogger(__name__)
|
||||
SCAN_INTERVAL = timedelta(seconds=DEFAULT_SCAN_INTERVAL)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the UniFi Protect."""
|
||||
# Only start discovery once regardless of how many entries they have
|
||||
async_start_discovery(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up the UniFi Protect config entries."""
|
||||
|
||||
async_start_discovery(hass)
|
||||
protect = async_create_api_client(hass, entry)
|
||||
_LOGGER.debug("Connect to UniFi Protect")
|
||||
data_service = ProtectData(hass, protect, SCAN_INTERVAL, entry)
|
||||
|
||||
@@ -29,13 +29,19 @@ def async_start_discovery(hass: HomeAssistant) -> None:
|
||||
return
|
||||
domain_data[DISCOVERY] = True
|
||||
|
||||
async def _async_discovery(*_: Any) -> None:
|
||||
async def _async_discovery() -> None:
|
||||
async_trigger_discovery(hass, await async_discover_devices())
|
||||
|
||||
# Do not block startup since discovery takes 31s or more
|
||||
hass.async_create_background_task(_async_discovery(), "unifiprotect-discovery")
|
||||
@callback
|
||||
def _async_start_background_discovery(*_: Any) -> None:
|
||||
"""Run discovery in the background."""
|
||||
hass.async_create_background_task(_async_discovery(), "unifiprotect-discovery")
|
||||
|
||||
async_track_time_interval(hass, _async_discovery, DISCOVERY_INTERVAL)
|
||||
# Do not block startup since discovery takes 31s or more
|
||||
_async_start_background_discovery()
|
||||
async_track_time_interval(
|
||||
hass, _async_start_background_discovery, DISCOVERY_INTERVAL
|
||||
)
|
||||
|
||||
|
||||
async def async_discover_devices() -> list[UnifiDevice]:
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/yalexs_ble",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["yalexs-ble==2.0.3"]
|
||||
"requirements": ["yalexs-ble==2.0.4"]
|
||||
}
|
||||
|
||||
@@ -2,18 +2,12 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
from importlib.metadata import version
|
||||
from typing import Any
|
||||
|
||||
import bellows
|
||||
import pkg_resources
|
||||
import zigpy
|
||||
from zigpy.config import CONF_NWK_EXTENDED_PAN_ID
|
||||
from zigpy.profiles import PROFILES
|
||||
from zigpy.zcl import Cluster
|
||||
import zigpy_deconz
|
||||
import zigpy_xbee
|
||||
import zigpy_zigate
|
||||
import zigpy_znp
|
||||
|
||||
from homeassistant.components.diagnostics.util import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -79,13 +73,13 @@ async def async_get_config_entry_diagnostics(
|
||||
"config_entry": config_entry.as_dict(),
|
||||
"application_state": shallow_asdict(gateway.application_controller.state),
|
||||
"versions": {
|
||||
"bellows": bellows.__version__,
|
||||
"zigpy": zigpy.__version__,
|
||||
"zigpy_deconz": zigpy_deconz.__version__,
|
||||
"zigpy_xbee": zigpy_xbee.__version__,
|
||||
"zigpy_znp": zigpy_znp.__version__,
|
||||
"zigpy_zigate": zigpy_zigate.__version__,
|
||||
"zhaquirks": pkg_resources.get_distribution("zha-quirks").version,
|
||||
"bellows": version("bellows"),
|
||||
"zigpy": version("zigpy"),
|
||||
"zigpy_deconz": version("zigpy-deconz"),
|
||||
"zigpy_xbee": version("zigpy-xbee"),
|
||||
"zigpy_znp": version("zigpy_znp"),
|
||||
"zigpy_zigate": version("zigpy-zigate"),
|
||||
"zhaquirks": version("zha-quirks"),
|
||||
},
|
||||
},
|
||||
KEYS_TO_REDACT,
|
||||
|
||||
@@ -1,12 +1,7 @@
|
||||
{
|
||||
"domain": "zha",
|
||||
"name": "Zigbee Home Automation",
|
||||
"after_dependencies": [
|
||||
"onboarding",
|
||||
"usb",
|
||||
"zeroconf",
|
||||
"homeassistant_yellow"
|
||||
],
|
||||
"after_dependencies": ["onboarding", "usb"],
|
||||
"codeowners": ["@dmulcahey", "@adminiuga", "@puddly"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["file_upload"],
|
||||
@@ -25,15 +20,15 @@
|
||||
"zigpy_znp"
|
||||
],
|
||||
"requirements": [
|
||||
"bellows==0.34.7",
|
||||
"bellows==0.34.9",
|
||||
"pyserial==3.5",
|
||||
"pyserial-asyncio==0.6",
|
||||
"zha-quirks==0.0.93",
|
||||
"zigpy-deconz==0.19.2",
|
||||
"zigpy==0.53.0",
|
||||
"zigpy==0.53.2",
|
||||
"zigpy-xbee==0.16.2",
|
||||
"zigpy-zigate==0.10.3",
|
||||
"zigpy-znp==0.9.2"
|
||||
"zigpy-znp==0.9.3"
|
||||
],
|
||||
"usb": [
|
||||
{
|
||||
|
||||
@@ -445,6 +445,10 @@ class ConfigEntry:
|
||||
|
||||
async def setup_again(*_: Any) -> None:
|
||||
"""Run setup again."""
|
||||
# Check again when we fire in case shutdown
|
||||
# has started so we do not block shutdown
|
||||
if hass.is_stopping:
|
||||
return
|
||||
self._async_cancel_retry_setup = None
|
||||
await self.async_setup(hass, integration=integration, tries=tries)
|
||||
|
||||
@@ -459,7 +463,8 @@ class ConfigEntry:
|
||||
|
||||
await self._async_process_on_unload()
|
||||
return
|
||||
except Exception: # pylint: disable=broad-except
|
||||
# pylint: disable-next=broad-except
|
||||
except (asyncio.CancelledError, SystemExit, Exception):
|
||||
_LOGGER.exception(
|
||||
"Error setting up entry %s for %s", self.title, integration.domain
|
||||
)
|
||||
|
||||
@@ -8,7 +8,7 @@ from .backports.enum import StrEnum
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2023
|
||||
MINOR_VERSION: Final = 3
|
||||
PATCH_VERSION: Final = "0b3"
|
||||
PATCH_VERSION: Final = "0b7"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0)
|
||||
|
||||
@@ -38,6 +38,7 @@ from typing import (
|
||||
)
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import async_timeout
|
||||
from typing_extensions import Self
|
||||
import voluptuous as vol
|
||||
import yarl
|
||||
@@ -711,6 +712,14 @@ class HomeAssistant:
|
||||
"Stopping Home Assistant before startup has completed may fail"
|
||||
)
|
||||
|
||||
# Keep holding the reference to the tasks but do not allow them
|
||||
# to block shutdown. Only tasks created after this point will
|
||||
# be waited for.
|
||||
running_tasks = self._tasks
|
||||
# Avoid clearing here since we want the remove callbacks to fire
|
||||
# and remove the tasks from the original set which is now running_tasks
|
||||
self._tasks = set()
|
||||
|
||||
# Cancel all background tasks
|
||||
for task in self._background_tasks:
|
||||
self._tasks.add(task)
|
||||
@@ -749,6 +758,35 @@ class HomeAssistant:
|
||||
self.state = CoreState.not_running
|
||||
self.bus.async_fire(EVENT_HOMEASSISTANT_CLOSE)
|
||||
|
||||
# Make a copy of running_tasks since a task can finish
|
||||
# while we are awaiting canceled tasks to get their result
|
||||
# which will result in the set size changing during iteration
|
||||
for task in list(running_tasks):
|
||||
if task.done():
|
||||
# Since we made a copy we need to check
|
||||
# to see if the task finished while we
|
||||
# were awaiting another task
|
||||
continue
|
||||
_LOGGER.warning(
|
||||
"Task %s was still running after stage 2 shutdown; "
|
||||
"Integrations should cancel non-critical tasks when receiving "
|
||||
"the stop event to prevent delaying shutdown",
|
||||
task,
|
||||
)
|
||||
task.cancel()
|
||||
try:
|
||||
async with async_timeout.timeout(0.1):
|
||||
await task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
except asyncio.TimeoutError:
|
||||
# Task may be shielded from cancellation.
|
||||
_LOGGER.exception(
|
||||
"Task %s could not be canceled during stage 3 shutdown", task
|
||||
)
|
||||
except Exception as ex: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Task %s error during stage 3 shutdown: %s", task, ex)
|
||||
|
||||
# Prevent run_callback_threadsafe from scheduling any additional
|
||||
# callbacks in the event loop as callbacks created on the futures
|
||||
# it returns will never run after the final `self.async_block_till_done`
|
||||
|
||||
@@ -2202,6 +2202,12 @@
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "gree"
|
||||
},
|
||||
"heltun": {
|
||||
"name": "HELTUN",
|
||||
"iot_standards": [
|
||||
"zwave"
|
||||
]
|
||||
},
|
||||
"here_travel_time": {
|
||||
"name": "HERE Travel Time",
|
||||
"integration_type": "hub",
|
||||
|
||||
@@ -513,6 +513,16 @@ async def async_get_all_descriptions(
|
||||
return descriptions
|
||||
|
||||
|
||||
@callback
|
||||
def remove_entity_service_fields(call: ServiceCall) -> dict[Any, Any]:
|
||||
"""Remove entity service fields."""
|
||||
return {
|
||||
key: val
|
||||
for key, val in call.data.items()
|
||||
if key not in cv.ENTITY_SERVICE_FIELDS
|
||||
}
|
||||
|
||||
|
||||
@callback
|
||||
@bind_hass
|
||||
def async_set_service_schema(
|
||||
@@ -567,11 +577,7 @@ async def entity_service_call( # noqa: C901
|
||||
|
||||
# If the service function is a string, we'll pass it the service call data
|
||||
if isinstance(func, str):
|
||||
data: dict | ServiceCall = {
|
||||
key: val
|
||||
for key, val in call.data.items()
|
||||
if key not in cv.ENTITY_SERVICE_FIELDS
|
||||
}
|
||||
data: dict | ServiceCall = remove_entity_service_fields(call)
|
||||
# If the service function is not a string, we pass the service call
|
||||
else:
|
||||
data = call
|
||||
|
||||
@@ -21,10 +21,10 @@ cryptography==39.0.1
|
||||
dbus-fast==1.84.1
|
||||
fnvhash==0.1.0
|
||||
hass-nabucasa==0.61.0
|
||||
hassil==1.0.5
|
||||
hassil==1.0.6
|
||||
home-assistant-bluetooth==1.9.3
|
||||
home-assistant-frontend==20230224.0
|
||||
home-assistant-intents==2023.2.22
|
||||
home-assistant-frontend==20230227.0
|
||||
home-assistant-intents==2023.2.28
|
||||
httpx==0.23.3
|
||||
ifaddr==0.1.7
|
||||
janus==1.0.0
|
||||
|
||||
@@ -264,7 +264,8 @@ async def _async_setup_component(
|
||||
SLOW_SETUP_MAX_WAIT,
|
||||
)
|
||||
return False
|
||||
except Exception: # pylint: disable=broad-except
|
||||
# pylint: disable-next=broad-except
|
||||
except (asyncio.CancelledError, SystemExit, Exception):
|
||||
_LOGGER.exception("Error during setup of component %s", domain)
|
||||
async_notify_setup_error(hass, domain, integration.documentation)
|
||||
return False
|
||||
|
||||
@@ -39,7 +39,7 @@ def is_installed(package: str) -> bool:
|
||||
try:
|
||||
pkg_resources.get_distribution(package)
|
||||
return True
|
||||
except (pkg_resources.ResolutionError, pkg_resources.ExtractionError):
|
||||
except (IndexError, pkg_resources.ResolutionError, pkg_resources.ExtractionError):
|
||||
req = pkg_resources.Requirement.parse(package)
|
||||
except ValueError:
|
||||
# This is a zip file. We no longer use this in Home Assistant,
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2023.3.0b3"
|
||||
version = "2023.3.0b7"
|
||||
license = {text = "Apache-2.0"}
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
readme = "README.rst"
|
||||
|
||||
@@ -181,7 +181,7 @@ aiohomekit==2.6.1
|
||||
aiohttp_cors==0.7.0
|
||||
|
||||
# homeassistant.components.hue
|
||||
aiohue==4.6.1
|
||||
aiohue==4.6.2
|
||||
|
||||
# homeassistant.components.imap
|
||||
aioimaplib==1.0.1
|
||||
@@ -276,7 +276,7 @@ aioskybell==22.7.0
|
||||
aioslimproto==2.1.1
|
||||
|
||||
# homeassistant.components.honeywell
|
||||
aiosomecomfort==0.0.8
|
||||
aiosomecomfort==0.0.10
|
||||
|
||||
# homeassistant.components.steamist
|
||||
aiosteamist==0.3.2
|
||||
@@ -383,7 +383,7 @@ asyncsleepiq==1.2.3
|
||||
atenpdu==0.3.2
|
||||
|
||||
# homeassistant.components.aurora
|
||||
auroranoaa==0.0.2
|
||||
auroranoaa==0.0.3
|
||||
|
||||
# homeassistant.components.aurora_abb_powerone
|
||||
aurorapy==0.2.7
|
||||
@@ -422,7 +422,7 @@ beautifulsoup4==4.11.1
|
||||
# beewi_smartclim==0.0.10
|
||||
|
||||
# homeassistant.components.zha
|
||||
bellows==0.34.7
|
||||
bellows==0.34.9
|
||||
|
||||
# homeassistant.components.bmw_connected_drive
|
||||
bimmer_connected==0.12.1
|
||||
@@ -504,7 +504,7 @@ btsmarthub_devicelist==0.2.3
|
||||
buienradar==1.0.5
|
||||
|
||||
# homeassistant.components.caldav
|
||||
caldav==1.1.1
|
||||
caldav==1.2.0
|
||||
|
||||
# homeassistant.components.circuit
|
||||
circuit-webhook==1.0.1
|
||||
@@ -661,7 +661,7 @@ enocean==0.50
|
||||
enturclient==0.2.4
|
||||
|
||||
# homeassistant.components.environment_canada
|
||||
env_canada==0.5.28
|
||||
env_canada==0.5.29
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
envoy_reader==0.20.1
|
||||
@@ -874,7 +874,7 @@ hass-nabucasa==0.61.0
|
||||
hass_splunk==0.1.1
|
||||
|
||||
# homeassistant.components.conversation
|
||||
hassil==1.0.5
|
||||
hassil==1.0.6
|
||||
|
||||
# homeassistant.components.tasmota
|
||||
hatasmota==0.6.4
|
||||
@@ -907,10 +907,10 @@ hole==0.8.0
|
||||
holidays==0.18.0
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20230224.0
|
||||
home-assistant-frontend==20230227.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2023.2.22
|
||||
home-assistant-intents==2023.2.28
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
homeconnect==0.7.2
|
||||
@@ -1201,7 +1201,7 @@ nextcord==2.0.0a8
|
||||
nextdns==1.3.0
|
||||
|
||||
# homeassistant.components.nibe_heatpump
|
||||
nibe==1.6.0
|
||||
nibe==2.0.0
|
||||
|
||||
# homeassistant.components.niko_home_control
|
||||
niko-home-control==0.2.1
|
||||
@@ -1248,7 +1248,7 @@ oauth2client==4.1.3
|
||||
objgraph==3.5.0
|
||||
|
||||
# homeassistant.components.garages_amsterdam
|
||||
odp-amsterdam==5.0.1
|
||||
odp-amsterdam==5.1.0
|
||||
|
||||
# homeassistant.components.oem
|
||||
oemthermostat==1.1.1
|
||||
@@ -1430,7 +1430,7 @@ py-canary==0.5.3
|
||||
py-cpuinfo==8.0.0
|
||||
|
||||
# homeassistant.components.dormakaba_dkey
|
||||
py-dormakaba-dkey==1.0.2
|
||||
py-dormakaba-dkey==1.0.3
|
||||
|
||||
# homeassistant.components.melissa
|
||||
py-melissa-climate==2.1.4
|
||||
@@ -1687,7 +1687,7 @@ pyialarm==2.2.0
|
||||
pyicloud==1.0.0
|
||||
|
||||
# homeassistant.components.insteon
|
||||
pyinsteon==1.3.2
|
||||
pyinsteon==1.3.3
|
||||
|
||||
# homeassistant.components.intesishome
|
||||
pyintesishome==1.8.0
|
||||
@@ -2097,7 +2097,7 @@ python-nest==4.2.0
|
||||
|
||||
# homeassistant.components.otbr
|
||||
# homeassistant.components.thread
|
||||
python-otbr-api==1.0.4
|
||||
python-otbr-api==1.0.5
|
||||
|
||||
# homeassistant.components.picnic
|
||||
python-picnic-api==1.1.0
|
||||
@@ -2653,7 +2653,7 @@ xboxapi==2.0.1
|
||||
xiaomi-ble==0.16.4
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknx==2.5.0
|
||||
xknx==2.6.0
|
||||
|
||||
# homeassistant.components.bluesound
|
||||
# homeassistant.components.fritz
|
||||
@@ -2670,13 +2670,13 @@ xs1-api-client==3.0.0
|
||||
yalesmartalarmclient==0.3.9
|
||||
|
||||
# homeassistant.components.yalexs_ble
|
||||
yalexs-ble==2.0.3
|
||||
yalexs-ble==2.0.4
|
||||
|
||||
# homeassistant.components.august
|
||||
yalexs==1.2.7
|
||||
|
||||
# homeassistant.components.august
|
||||
yalexs_ble==2.0.3
|
||||
yalexs_ble==2.0.4
|
||||
|
||||
# homeassistant.components.yeelight
|
||||
yeelight==0.7.10
|
||||
@@ -2724,10 +2724,10 @@ zigpy-xbee==0.16.2
|
||||
zigpy-zigate==0.10.3
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-znp==0.9.2
|
||||
zigpy-znp==0.9.3
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy==0.53.0
|
||||
zigpy==0.53.2
|
||||
|
||||
# homeassistant.components.zoneminder
|
||||
zm-py==0.5.2
|
||||
|
||||
@@ -165,7 +165,7 @@ aiohomekit==2.6.1
|
||||
aiohttp_cors==0.7.0
|
||||
|
||||
# homeassistant.components.hue
|
||||
aiohue==4.6.1
|
||||
aiohue==4.6.2
|
||||
|
||||
# homeassistant.components.imap
|
||||
aioimaplib==1.0.1
|
||||
@@ -254,7 +254,7 @@ aioskybell==22.7.0
|
||||
aioslimproto==2.1.1
|
||||
|
||||
# homeassistant.components.honeywell
|
||||
aiosomecomfort==0.0.8
|
||||
aiosomecomfort==0.0.10
|
||||
|
||||
# homeassistant.components.steamist
|
||||
aiosteamist==0.3.2
|
||||
@@ -334,7 +334,7 @@ async-upnp-client==0.33.1
|
||||
asyncsleepiq==1.2.3
|
||||
|
||||
# homeassistant.components.aurora
|
||||
auroranoaa==0.0.2
|
||||
auroranoaa==0.0.3
|
||||
|
||||
# homeassistant.components.aurora_abb_powerone
|
||||
aurorapy==0.2.7
|
||||
@@ -352,7 +352,7 @@ base36==0.1.1
|
||||
beautifulsoup4==4.11.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
bellows==0.34.7
|
||||
bellows==0.34.9
|
||||
|
||||
# homeassistant.components.bmw_connected_drive
|
||||
bimmer_connected==0.12.1
|
||||
@@ -405,7 +405,7 @@ bthome-ble==2.5.2
|
||||
buienradar==1.0.5
|
||||
|
||||
# homeassistant.components.caldav
|
||||
caldav==1.1.1
|
||||
caldav==1.2.0
|
||||
|
||||
# homeassistant.components.co2signal
|
||||
co2signal==0.4.2
|
||||
@@ -514,7 +514,7 @@ energyzero==0.3.1
|
||||
enocean==0.50
|
||||
|
||||
# homeassistant.components.environment_canada
|
||||
env_canada==0.5.28
|
||||
env_canada==0.5.29
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
envoy_reader==0.20.1
|
||||
@@ -666,7 +666,7 @@ habitipy==0.2.0
|
||||
hass-nabucasa==0.61.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
hassil==1.0.5
|
||||
hassil==1.0.6
|
||||
|
||||
# homeassistant.components.tasmota
|
||||
hatasmota==0.6.4
|
||||
@@ -690,10 +690,10 @@ hole==0.8.0
|
||||
holidays==0.18.0
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20230224.0
|
||||
home-assistant-frontend==20230227.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2023.2.22
|
||||
home-assistant-intents==2023.2.28
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
homeconnect==0.7.2
|
||||
@@ -891,7 +891,7 @@ nextcord==2.0.0a8
|
||||
nextdns==1.3.0
|
||||
|
||||
# homeassistant.components.nibe_heatpump
|
||||
nibe==1.6.0
|
||||
nibe==2.0.0
|
||||
|
||||
# homeassistant.components.nfandroidtv
|
||||
notifications-android-tv==0.1.5
|
||||
@@ -923,7 +923,7 @@ oauth2client==4.1.3
|
||||
objgraph==3.5.0
|
||||
|
||||
# homeassistant.components.garages_amsterdam
|
||||
odp-amsterdam==5.0.1
|
||||
odp-amsterdam==5.1.0
|
||||
|
||||
# homeassistant.components.omnilogic
|
||||
omnilogic==0.4.5
|
||||
@@ -1045,7 +1045,7 @@ py-canary==0.5.3
|
||||
py-cpuinfo==8.0.0
|
||||
|
||||
# homeassistant.components.dormakaba_dkey
|
||||
py-dormakaba-dkey==1.0.2
|
||||
py-dormakaba-dkey==1.0.3
|
||||
|
||||
# homeassistant.components.melissa
|
||||
py-melissa-climate==2.1.4
|
||||
@@ -1212,7 +1212,7 @@ pyialarm==2.2.0
|
||||
pyicloud==1.0.0
|
||||
|
||||
# homeassistant.components.insteon
|
||||
pyinsteon==1.3.2
|
||||
pyinsteon==1.3.3
|
||||
|
||||
# homeassistant.components.ipma
|
||||
pyipma==3.0.6
|
||||
@@ -1490,7 +1490,7 @@ python-nest==4.2.0
|
||||
|
||||
# homeassistant.components.otbr
|
||||
# homeassistant.components.thread
|
||||
python-otbr-api==1.0.4
|
||||
python-otbr-api==1.0.5
|
||||
|
||||
# homeassistant.components.picnic
|
||||
python-picnic-api==1.1.0
|
||||
@@ -1881,7 +1881,7 @@ xbox-webapi==2.0.11
|
||||
xiaomi-ble==0.16.4
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknx==2.5.0
|
||||
xknx==2.6.0
|
||||
|
||||
# homeassistant.components.bluesound
|
||||
# homeassistant.components.fritz
|
||||
@@ -1895,13 +1895,13 @@ xmltodict==0.13.0
|
||||
yalesmartalarmclient==0.3.9
|
||||
|
||||
# homeassistant.components.yalexs_ble
|
||||
yalexs-ble==2.0.3
|
||||
yalexs-ble==2.0.4
|
||||
|
||||
# homeassistant.components.august
|
||||
yalexs==1.2.7
|
||||
|
||||
# homeassistant.components.august
|
||||
yalexs_ble==2.0.3
|
||||
yalexs_ble==2.0.4
|
||||
|
||||
# homeassistant.components.yeelight
|
||||
yeelight==0.7.10
|
||||
@@ -1934,10 +1934,10 @@ zigpy-xbee==0.16.2
|
||||
zigpy-zigate==0.10.3
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-znp==0.9.2
|
||||
zigpy-znp==0.9.3
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy==0.53.0
|
||||
zigpy==0.53.2
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.46.0
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import ast
|
||||
from collections import deque
|
||||
from pathlib import Path
|
||||
|
||||
from homeassistant.const import Platform
|
||||
@@ -118,6 +119,7 @@ ALLOWED_USED_COMPONENTS = {
|
||||
"input_text",
|
||||
"media_source",
|
||||
"onboarding",
|
||||
"panel_custom",
|
||||
"persistent_notification",
|
||||
"person",
|
||||
"script",
|
||||
@@ -138,22 +140,19 @@ IGNORE_VIOLATIONS = {
|
||||
# Has same requirement, gets defaults.
|
||||
("sql", "recorder"),
|
||||
# Sharing a base class
|
||||
("openalpr_cloud", "openalpr_local"),
|
||||
("lutron_caseta", "lutron"),
|
||||
("ffmpeg_noise", "ffmpeg_motion"),
|
||||
# Demo
|
||||
("demo", "manual"),
|
||||
("demo", "openalpr_local"),
|
||||
# This would be a circular dep
|
||||
("http", "network"),
|
||||
# This would be a circular dep
|
||||
("zha", "homeassistant_hardware"),
|
||||
("zha", "homeassistant_yellow"),
|
||||
# This should become a helper method that integrations can submit data to
|
||||
("websocket_api", "lovelace"),
|
||||
("websocket_api", "shopping_list"),
|
||||
"logbook",
|
||||
# Migration wizard from zwave to zwave_js.
|
||||
"zwave_js",
|
||||
}
|
||||
|
||||
|
||||
@@ -231,6 +230,7 @@ def find_non_referenced_integrations(
|
||||
def validate_dependencies(
|
||||
integrations: dict[str, Integration],
|
||||
integration: Integration,
|
||||
check_dependencies: bool,
|
||||
) -> None:
|
||||
"""Validate all dependencies."""
|
||||
# Some integrations are allowed to have violations.
|
||||
@@ -252,12 +252,60 @@ def validate_dependencies(
|
||||
"or 'after_dependencies'",
|
||||
)
|
||||
|
||||
if check_dependencies:
|
||||
_check_circular_deps(
|
||||
integrations, integration.domain, integration, set(), deque()
|
||||
)
|
||||
|
||||
|
||||
def _check_circular_deps(
|
||||
integrations: dict[str, Integration],
|
||||
start_domain: str,
|
||||
integration: Integration,
|
||||
checked: set[str],
|
||||
checking: deque[str],
|
||||
) -> None:
|
||||
"""Check for circular dependencies pointing at starting_domain."""
|
||||
if integration.domain in checked or integration.domain in checking:
|
||||
return
|
||||
|
||||
checking.append(integration.domain)
|
||||
for domain in integration.manifest.get("dependencies", []):
|
||||
if domain == start_domain:
|
||||
integrations[start_domain].add_error(
|
||||
"dependencies",
|
||||
f"Found a circular dependency with {integration.domain} ({', '.join(checking)})",
|
||||
)
|
||||
break
|
||||
|
||||
_check_circular_deps(
|
||||
integrations, start_domain, integrations[domain], checked, checking
|
||||
)
|
||||
else:
|
||||
for domain in integration.manifest.get("after_dependencies", []):
|
||||
if domain == start_domain:
|
||||
integrations[start_domain].add_error(
|
||||
"dependencies",
|
||||
f"Found a circular dependency with after dependencies of {integration.domain} ({', '.join(checking)})",
|
||||
)
|
||||
break
|
||||
|
||||
_check_circular_deps(
|
||||
integrations, start_domain, integrations[domain], checked, checking
|
||||
)
|
||||
checked.add(integration.domain)
|
||||
checking.remove(integration.domain)
|
||||
|
||||
|
||||
def validate(integrations: dict[str, Integration], config: Config) -> None:
|
||||
"""Handle dependencies for integrations."""
|
||||
# check for non-existing dependencies
|
||||
for integration in integrations.values():
|
||||
validate_dependencies(integrations, integration)
|
||||
validate_dependencies(
|
||||
integrations,
|
||||
integration,
|
||||
check_dependencies=not config.specific_integrations,
|
||||
)
|
||||
|
||||
if config.specific_integrations:
|
||||
continue
|
||||
|
||||
@@ -166,3 +166,4 @@ async def test_step_reauth(
|
||||
|
||||
assert len(hass.config_entries.async_entries()) == 1
|
||||
assert hass.config_entries.async_entries()[0].data[CONF_API_KEY] == new_api_key
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -19,7 +19,7 @@ from homeassistant.components.bluetooth.active_update_coordinator import (
|
||||
_T,
|
||||
ActiveBluetoothDataUpdateCoordinator,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import CoreState, HomeAssistant
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.service_info.bluetooth import BluetoothServiceInfo
|
||||
from homeassistant.setup import async_setup_component
|
||||
@@ -395,3 +395,58 @@ async def test_polling_rejecting_the_first_time(
|
||||
|
||||
cancel()
|
||||
unregister_listener()
|
||||
|
||||
|
||||
async def test_no_polling_after_stop_event(
|
||||
hass: HomeAssistant,
|
||||
mock_bleak_scanner_start: MagicMock,
|
||||
mock_bluetooth_adapters: None,
|
||||
) -> None:
|
||||
"""Test we do not poll after the stop event."""
|
||||
await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
|
||||
needs_poll_calls = 0
|
||||
|
||||
def _needs_poll(
|
||||
service_info: BluetoothServiceInfoBleak, seconds_since_last_poll: float | None
|
||||
) -> bool:
|
||||
nonlocal needs_poll_calls
|
||||
needs_poll_calls += 1
|
||||
return True
|
||||
|
||||
async def _poll_method(service_info: BluetoothServiceInfoBleak) -> dict[str, Any]:
|
||||
return {"fake": "data"}
|
||||
|
||||
coordinator = MyCoordinator(
|
||||
hass=hass,
|
||||
logger=_LOGGER,
|
||||
address="aa:bb:cc:dd:ee:ff",
|
||||
mode=BluetoothScanningMode.ACTIVE,
|
||||
needs_poll_method=_needs_poll,
|
||||
poll_method=_poll_method,
|
||||
)
|
||||
assert coordinator.available is False # no data yet
|
||||
|
||||
mock_listener = MagicMock()
|
||||
unregister_listener = coordinator.async_add_listener(mock_listener)
|
||||
|
||||
cancel = coordinator.async_start()
|
||||
assert needs_poll_calls == 0
|
||||
|
||||
inject_bluetooth_service_info(hass, GENERIC_BLUETOOTH_SERVICE_INFO)
|
||||
await hass.async_block_till_done()
|
||||
assert coordinator.passive_data == {"rssi": GENERIC_BLUETOOTH_SERVICE_INFO.rssi}
|
||||
assert coordinator.data == {"fake": "data"}
|
||||
|
||||
assert needs_poll_calls == 1
|
||||
|
||||
hass.state = CoreState.stopping
|
||||
await hass.async_block_till_done()
|
||||
assert needs_poll_calls == 1
|
||||
|
||||
# Should not generate a poll now
|
||||
inject_bluetooth_service_info(hass, GENERIC_BLUETOOTH_SERVICE_INFO_2)
|
||||
await hass.async_block_till_done()
|
||||
assert needs_poll_calls == 1
|
||||
|
||||
cancel()
|
||||
unregister_listener()
|
||||
|
||||
@@ -16,7 +16,7 @@ from homeassistant.components.bluetooth import (
|
||||
from homeassistant.components.bluetooth.active_update_processor import (
|
||||
ActiveBluetoothProcessorCoordinator,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import CoreState, HomeAssistant
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.service_info.bluetooth import BluetoothServiceInfo
|
||||
from homeassistant.setup import async_setup_component
|
||||
@@ -384,3 +384,65 @@ async def test_rate_limit(
|
||||
assert async_handle_update.mock_calls[-1] == call({"testdata": 1})
|
||||
|
||||
cancel()
|
||||
|
||||
|
||||
async def test_no_polling_after_stop_event(
|
||||
hass: HomeAssistant,
|
||||
mock_bleak_scanner_start: MagicMock,
|
||||
mock_bluetooth_adapters: None,
|
||||
) -> None:
|
||||
"""Test we do not poll after the stop event."""
|
||||
await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
|
||||
needs_poll_calls = 0
|
||||
|
||||
def _update_method(service_info: BluetoothServiceInfoBleak):
|
||||
return {"testdata": 0}
|
||||
|
||||
def _poll_needed(*args, **kwargs):
|
||||
nonlocal needs_poll_calls
|
||||
needs_poll_calls += 1
|
||||
return True
|
||||
|
||||
async def _poll(*args, **kwargs):
|
||||
return {"testdata": 1}
|
||||
|
||||
coordinator = ActiveBluetoothProcessorCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
address="aa:bb:cc:dd:ee:ff",
|
||||
mode=BluetoothScanningMode.ACTIVE,
|
||||
update_method=_update_method,
|
||||
needs_poll_method=_poll_needed,
|
||||
poll_method=_poll,
|
||||
)
|
||||
assert coordinator.available is False # no data yet
|
||||
|
||||
processor = MagicMock()
|
||||
coordinator.async_register_processor(processor)
|
||||
async_handle_update = processor.async_handle_update
|
||||
|
||||
cancel = coordinator.async_start()
|
||||
|
||||
inject_bluetooth_service_info(hass, GENERIC_BLUETOOTH_SERVICE_INFO)
|
||||
await hass.async_block_till_done()
|
||||
assert needs_poll_calls == 1
|
||||
|
||||
assert coordinator.available is True
|
||||
|
||||
# async_handle_update should have been called twice
|
||||
# The first time, it was passed the data from parsing the advertisement
|
||||
# The second time, it was passed the data from polling
|
||||
assert len(async_handle_update.mock_calls) == 2
|
||||
assert async_handle_update.mock_calls[0] == call({"testdata": 0})
|
||||
assert async_handle_update.mock_calls[1] == call({"testdata": 1})
|
||||
|
||||
hass.state = CoreState.stopping
|
||||
await hass.async_block_till_done()
|
||||
assert needs_poll_calls == 1
|
||||
|
||||
# Should not generate a poll now that CoreState is stopping
|
||||
inject_bluetooth_service_info(hass, GENERIC_BLUETOOTH_SERVICE_INFO_2)
|
||||
await hass.async_block_till_done()
|
||||
assert needs_poll_calls == 1
|
||||
|
||||
cancel()
|
||||
|
||||
@@ -49,7 +49,17 @@ async def test_ok_requests(
|
||||
("/", {"test": "test/../../api"}, True),
|
||||
("/", {"test": "/test/%2E%2E%2f%2E%2E%2fapi"}, True),
|
||||
("/", {"test": "test/%2E%2E%2f%2E%2E%2fapi"}, True),
|
||||
("/", {"test": "test/%252E%252E/api"}, True),
|
||||
("/", {"test": "test/%252E%252E%2fapi"}, True),
|
||||
(
|
||||
"/",
|
||||
{"test": "test/%2525252E%2525252E%2525252f%2525252E%2525252E%2525252fapi"},
|
||||
True,
|
||||
),
|
||||
("/test/.%252E/api", {}, False),
|
||||
("/test/%252E%252E/api", {}, False),
|
||||
("/test/%2E%2E%2f%2E%2E%2fapi", {}, False),
|
||||
("/test/%2525252E%2525252E%2525252f%2525252E%2525252E/api", {}, False),
|
||||
("/", {"sql": ";UNION SELECT (a, b"}, True),
|
||||
("/", {"sql": "UNION%20SELECT%20%28a%2C%20b"}, True),
|
||||
("/UNION%20SELECT%20%28a%2C%20b", {}, False),
|
||||
@@ -87,7 +97,7 @@ async def test_bad_requests(
|
||||
None,
|
||||
http.request,
|
||||
"GET",
|
||||
f"http://{mock_api_client.host}:{mock_api_client.port}/{request_path}{man_params}",
|
||||
f"http://{mock_api_client.host}:{mock_api_client.port}{request_path}{man_params}",
|
||||
request_params,
|
||||
)
|
||||
|
||||
|
||||
@@ -84,6 +84,7 @@ async def test_get_triggers(
|
||||
}
|
||||
for event_type in (
|
||||
ButtonEvent.INITIAL_PRESS,
|
||||
ButtonEvent.LONG_PRESS,
|
||||
ButtonEvent.LONG_RELEASE,
|
||||
ButtonEvent.REPEAT,
|
||||
ButtonEvent.SHORT_RELEASE,
|
||||
|
||||
@@ -4,9 +4,9 @@ from contextlib import ExitStack
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, Mock, patch
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
from nibe.connection import Connection
|
||||
from nibe.exceptions import CoilReadException
|
||||
from nibe.exceptions import ReadException
|
||||
import pytest
|
||||
|
||||
|
||||
@@ -39,12 +39,11 @@ async def fixture_coils(mock_connection):
|
||||
"""Return a dict with coil data."""
|
||||
coils: dict[int, Any] = {}
|
||||
|
||||
async def read_coil(coil: Coil, timeout: float = 0) -> Coil:
|
||||
async def read_coil(coil: Coil, timeout: float = 0) -> CoilData:
|
||||
nonlocal coils
|
||||
if (data := coils.get(coil.address, None)) is None:
|
||||
raise CoilReadException()
|
||||
coil.value = data
|
||||
return coil
|
||||
raise ReadException()
|
||||
return CoilData(coil, data)
|
||||
|
||||
async def read_coils(
|
||||
coils: Iterable[Coil], timeout: float = 0
|
||||
|
||||
@@ -3,7 +3,7 @@ from typing import Any
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import CoilData
|
||||
from nibe.coil_groups import UNIT_COILGROUPS
|
||||
from nibe.heatpump import Model
|
||||
import pytest
|
||||
@@ -91,6 +91,6 @@ async def test_reset_button(
|
||||
# Verify reset was written
|
||||
args = mock_connection.write_coil.call_args
|
||||
assert args
|
||||
coil: Coil = args.args[0]
|
||||
assert coil.address == unit.alarm_reset
|
||||
coil: CoilData = args.args[0]
|
||||
assert coil.coil.address == unit.alarm_reset
|
||||
assert coil.value == 1
|
||||
|
||||
@@ -5,9 +5,9 @@ from nibe.coil import Coil
|
||||
from nibe.exceptions import (
|
||||
AddressInUseException,
|
||||
CoilNotFoundException,
|
||||
CoilReadException,
|
||||
CoilReadSendException,
|
||||
CoilWriteException,
|
||||
ReadException,
|
||||
ReadSendException,
|
||||
WriteException,
|
||||
)
|
||||
import pytest
|
||||
|
||||
@@ -169,7 +169,7 @@ async def test_read_timeout(
|
||||
"""Test we handle cannot connect error."""
|
||||
result = await _get_connection_form(hass, connection_type)
|
||||
|
||||
mock_connection.verify_connectivity.side_effect = CoilReadException()
|
||||
mock_connection.verify_connectivity.side_effect = ReadException()
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data)
|
||||
|
||||
@@ -190,7 +190,7 @@ async def test_write_timeout(
|
||||
"""Test we handle cannot connect error."""
|
||||
result = await _get_connection_form(hass, connection_type)
|
||||
|
||||
mock_connection.verify_connectivity.side_effect = CoilWriteException()
|
||||
mock_connection.verify_connectivity.side_effect = WriteException()
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data)
|
||||
|
||||
@@ -232,7 +232,7 @@ async def test_nibegw_invalid_host(
|
||||
"""Test we handle cannot connect error."""
|
||||
result = await _get_connection_form(hass, connection_type)
|
||||
|
||||
mock_connection.verify_connectivity.side_effect = CoilReadSendException()
|
||||
mock_connection.verify_connectivity.side_effect = ReadSendException()
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data)
|
||||
|
||||
|
||||
@@ -1,8 +1,27 @@
|
||||
"""Tests for the Open Thread Border Router integration."""
|
||||
BASE_URL = "http://core-silabs-multiprotocol:8081"
|
||||
CONFIG_ENTRY_DATA = {"url": "http://core-silabs-multiprotocol:8081"}
|
||||
DATASET = bytes.fromhex(
|
||||
|
||||
DATASET_CH15 = bytes.fromhex(
|
||||
"0E080000000000010000000300000F35060004001FFFE00208F642646DA209B1C00708FDF57B5A"
|
||||
"0FE2AAF60510DE98B5BA1A528FEE049D4B4B01835375030D4F70656E5468726561642048410102"
|
||||
"25A40410F5DD18371BFD29E1A601EF6FFAD94C030C0402A0F7F8"
|
||||
)
|
||||
|
||||
DATASET_CH16 = bytes.fromhex(
|
||||
"0E080000000000010000000300001035060004001FFFE00208F642646DA209B1C00708FDF57B5A"
|
||||
"0FE2AAF60510DE98B5BA1A528FEE049D4B4B01835375030D4F70656E5468726561642048410102"
|
||||
"25A40410F5DD18371BFD29E1A601EF6FFAD94C030C0402A0F7F8"
|
||||
)
|
||||
|
||||
DATASET_INSECURE_NW_KEY = bytes.fromhex(
|
||||
"0E080000000000010000000300000F35060004001FFFE0020811111111222222220708FDD24657"
|
||||
"0A336069051000112233445566778899AABBCCDDEEFF030E4F70656E54687265616444656D6F01"
|
||||
"0212340410445F2B5CA6F2A93A55CE570A70EFEECB0C0402A0F7F8"
|
||||
)
|
||||
|
||||
DATASET_INSECURE_PASSPHRASE = bytes.fromhex(
|
||||
"0E080000000000010000000300000F35060004001FFFE0020811111111222222220708FDD24657"
|
||||
"0A336069051000112233445566778899AABBCCDDEEFA030E4F70656E54687265616444656D6F01"
|
||||
"0212340410445F2B5CA6F2A93A55CE570A70EFEECB0C0402A0F7F8"
|
||||
)
|
||||
|
||||
@@ -5,7 +5,7 @@ import pytest
|
||||
|
||||
from homeassistant.components import otbr
|
||||
|
||||
from . import CONFIG_ENTRY_DATA, DATASET
|
||||
from . import CONFIG_ENTRY_DATA, DATASET_CH16
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
@@ -20,7 +20,11 @@ async def otbr_config_entry_fixture(hass):
|
||||
title="Open Thread Border Router",
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
with patch("python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET):
|
||||
with patch(
|
||||
"python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16
|
||||
), patch(
|
||||
"homeassistant.components.otbr.compute_pskc"
|
||||
): # Patch to speed up tests
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
|
||||
|
||||
|
||||
@@ -11,6 +11,8 @@ from homeassistant.components import hassio, otbr
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
|
||||
from . import DATASET_CH15, DATASET_CH16
|
||||
|
||||
from tests.common import MockConfigEntry, MockModule, mock_integration
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
|
||||
@@ -94,7 +96,10 @@ async def test_user_flow_router_not_setup(
|
||||
# Check we create a dataset and enable the router
|
||||
assert aioclient_mock.mock_calls[-2][0] == "POST"
|
||||
assert aioclient_mock.mock_calls[-2][1].path == "/node/dataset/active"
|
||||
assert aioclient_mock.mock_calls[-2][2] == {"NetworkName": "home-assistant"}
|
||||
assert aioclient_mock.mock_calls[-2][2] == {
|
||||
"Channel": 15,
|
||||
"NetworkName": "home-assistant",
|
||||
}
|
||||
|
||||
assert aioclient_mock.mock_calls[-1][0] == "POST"
|
||||
assert aioclient_mock.mock_calls[-1][1].path == "/node/state"
|
||||
@@ -226,7 +231,10 @@ async def test_hassio_discovery_flow_router_not_setup(
|
||||
# Check we create a dataset and enable the router
|
||||
assert aioclient_mock.mock_calls[-2][0] == "POST"
|
||||
assert aioclient_mock.mock_calls[-2][1].path == "/node/dataset/active"
|
||||
assert aioclient_mock.mock_calls[-2][2] == {"NetworkName": "home-assistant"}
|
||||
assert aioclient_mock.mock_calls[-2][2] == {
|
||||
"Channel": 15,
|
||||
"NetworkName": "home-assistant",
|
||||
}
|
||||
|
||||
assert aioclient_mock.mock_calls[-1][0] == "POST"
|
||||
assert aioclient_mock.mock_calls[-1][1].path == "/node/state"
|
||||
@@ -263,7 +271,7 @@ async def test_hassio_discovery_flow_router_not_setup_has_preferred(
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.otbr.config_flow.async_get_preferred_dataset",
|
||||
return_value="aa",
|
||||
return_value=DATASET_CH15.hex(),
|
||||
), patch(
|
||||
"homeassistant.components.otbr.async_setup_entry",
|
||||
return_value=True,
|
||||
@@ -275,7 +283,60 @@ async def test_hassio_discovery_flow_router_not_setup_has_preferred(
|
||||
# Check we create a dataset and enable the router
|
||||
assert aioclient_mock.mock_calls[-2][0] == "PUT"
|
||||
assert aioclient_mock.mock_calls[-2][1].path == "/node/dataset/active"
|
||||
assert aioclient_mock.mock_calls[-2][2] == "aa"
|
||||
assert aioclient_mock.mock_calls[-2][2] == DATASET_CH15.hex()
|
||||
|
||||
assert aioclient_mock.mock_calls[-1][0] == "POST"
|
||||
assert aioclient_mock.mock_calls[-1][1].path == "/node/state"
|
||||
assert aioclient_mock.mock_calls[-1][2] == "enable"
|
||||
|
||||
expected_data = {
|
||||
"url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}",
|
||||
}
|
||||
|
||||
assert result["type"] == FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "Open Thread Border Router"
|
||||
assert result["data"] == expected_data
|
||||
assert result["options"] == {}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0]
|
||||
assert config_entry.data == expected_data
|
||||
assert config_entry.options == {}
|
||||
assert config_entry.title == "Open Thread Border Router"
|
||||
assert config_entry.unique_id == otbr.DOMAIN
|
||||
|
||||
|
||||
async def test_hassio_discovery_flow_router_not_setup_has_preferred_2(
|
||||
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test the hassio discovery flow when the border router has no dataset.
|
||||
|
||||
This tests the behavior when the thread integration has a preferred dataset, but
|
||||
the preferred dataset is not using channel 15.
|
||||
"""
|
||||
url = "http://core-silabs-multiprotocol:8081"
|
||||
aioclient_mock.get(f"{url}/node/dataset/active", status=HTTPStatus.NO_CONTENT)
|
||||
aioclient_mock.post(f"{url}/node/dataset/active", status=HTTPStatus.ACCEPTED)
|
||||
aioclient_mock.post(f"{url}/node/state", status=HTTPStatus.OK)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.otbr.config_flow.async_get_preferred_dataset",
|
||||
return_value=DATASET_CH16.hex(),
|
||||
), patch(
|
||||
"homeassistant.components.otbr.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA
|
||||
)
|
||||
|
||||
# Check we create a dataset and enable the router
|
||||
assert aioclient_mock.mock_calls[-2][0] == "POST"
|
||||
assert aioclient_mock.mock_calls[-2][1].path == "/node/dataset/active"
|
||||
assert aioclient_mock.mock_calls[-2][2] == {
|
||||
"Channel": 15,
|
||||
"NetworkName": "home-assistant",
|
||||
}
|
||||
|
||||
assert aioclient_mock.mock_calls[-1][0] == "POST"
|
||||
assert aioclient_mock.mock_calls[-1][1].path == "/node/state"
|
||||
|
||||
@@ -10,8 +10,15 @@ import python_otbr_api
|
||||
from homeassistant.components import otbr
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
|
||||
from . import BASE_URL, CONFIG_ENTRY_DATA, DATASET
|
||||
from . import (
|
||||
BASE_URL,
|
||||
CONFIG_ENTRY_DATA,
|
||||
DATASET_CH16,
|
||||
DATASET_INSECURE_NW_KEY,
|
||||
DATASET_INSECURE_PASSPHRASE,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
@@ -19,6 +26,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
|
||||
async def test_import_dataset(hass: HomeAssistant) -> None:
|
||||
"""Test the active dataset is imported at setup."""
|
||||
issue_registry = ir.async_get(hass)
|
||||
|
||||
config_entry = MockConfigEntry(
|
||||
data=CONFIG_ENTRY_DATA,
|
||||
@@ -28,13 +36,46 @@ async def test_import_dataset(hass: HomeAssistant) -> None:
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
with patch(
|
||||
"python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET
|
||||
"python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16
|
||||
), patch(
|
||||
"homeassistant.components.thread.dataset_store.DatasetStore.async_add"
|
||||
) as mock_add:
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
|
||||
mock_add.assert_called_once_with(config_entry.title, DATASET.hex())
|
||||
mock_add.assert_called_once_with(config_entry.title, DATASET_CH16.hex())
|
||||
assert not issue_registry.async_get_issue(
|
||||
domain=otbr.DOMAIN, issue_id=f"insecure_thread_network_{config_entry.entry_id}"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"dataset", [DATASET_INSECURE_NW_KEY, DATASET_INSECURE_PASSPHRASE]
|
||||
)
|
||||
async def test_import_insecure_dataset(hass: HomeAssistant, dataset: bytes) -> None:
|
||||
"""Test the active dataset is imported at setup.
|
||||
|
||||
This imports a dataset with insecure settings.
|
||||
"""
|
||||
issue_registry = ir.async_get(hass)
|
||||
|
||||
config_entry = MockConfigEntry(
|
||||
data=CONFIG_ENTRY_DATA,
|
||||
domain=otbr.DOMAIN,
|
||||
options={},
|
||||
title="My OTBR",
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
with patch(
|
||||
"python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=dataset
|
||||
), patch(
|
||||
"homeassistant.components.thread.dataset_store.DatasetStore.async_add"
|
||||
) as mock_add:
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
|
||||
mock_add.assert_called_once_with(config_entry.title, dataset.hex())
|
||||
assert issue_registry.async_get_issue(
|
||||
domain=otbr.DOMAIN, issue_id=f"insecure_thread_network_{config_entry.entry_id}"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -96,3 +96,141 @@ async def test_get_info_fetch_fails(
|
||||
assert msg["id"] == 5
|
||||
assert not msg["success"]
|
||||
assert msg["error"]["code"] == "get_dataset_failed"
|
||||
|
||||
|
||||
async def test_create_network(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
otbr_config_entry,
|
||||
websocket_client,
|
||||
) -> None:
|
||||
"""Test create network."""
|
||||
|
||||
with patch(
|
||||
"python_otbr_api.OTBR.create_active_dataset"
|
||||
) as create_dataset_mock, patch(
|
||||
"python_otbr_api.OTBR.set_enabled"
|
||||
) as set_enabled_mock:
|
||||
await websocket_client.send_json(
|
||||
{
|
||||
"id": 5,
|
||||
"type": "otbr/create_network",
|
||||
}
|
||||
)
|
||||
|
||||
msg = await websocket_client.receive_json()
|
||||
assert msg["id"] == 5
|
||||
assert msg["success"]
|
||||
assert msg["result"] is None
|
||||
|
||||
create_dataset_mock.assert_called_once_with(
|
||||
python_otbr_api.models.OperationalDataSet(
|
||||
channel=15, network_name="home-assistant"
|
||||
)
|
||||
)
|
||||
assert len(set_enabled_mock.mock_calls) == 2
|
||||
assert set_enabled_mock.mock_calls[0][1][0] is False
|
||||
assert set_enabled_mock.mock_calls[1][1][0] is True
|
||||
|
||||
|
||||
async def test_create_network_no_entry(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
) -> None:
|
||||
"""Test create network."""
|
||||
await async_setup_component(hass, "otbr", {})
|
||||
websocket_client = await hass_ws_client(hass)
|
||||
await websocket_client.send_json(
|
||||
{
|
||||
"id": 5,
|
||||
"type": "otbr/create_network",
|
||||
}
|
||||
)
|
||||
|
||||
msg = await websocket_client.receive_json()
|
||||
assert msg["id"] == 5
|
||||
assert not msg["success"]
|
||||
assert msg["error"]["code"] == "not_loaded"
|
||||
|
||||
|
||||
async def test_get_info_fetch_fails_1(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
otbr_config_entry,
|
||||
websocket_client,
|
||||
) -> None:
|
||||
"""Test create network."""
|
||||
await async_setup_component(hass, "otbr", {})
|
||||
|
||||
with patch(
|
||||
"python_otbr_api.OTBR.set_enabled",
|
||||
side_effect=python_otbr_api.OTBRError,
|
||||
):
|
||||
await websocket_client.send_json(
|
||||
{
|
||||
"id": 5,
|
||||
"type": "otbr/create_network",
|
||||
}
|
||||
)
|
||||
msg = await websocket_client.receive_json()
|
||||
|
||||
assert msg["id"] == 5
|
||||
assert not msg["success"]
|
||||
assert msg["error"]["code"] == "set_enabled_failed"
|
||||
|
||||
|
||||
async def test_get_info_fetch_fails_2(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
otbr_config_entry,
|
||||
websocket_client,
|
||||
) -> None:
|
||||
"""Test create network."""
|
||||
await async_setup_component(hass, "otbr", {})
|
||||
|
||||
with patch(
|
||||
"python_otbr_api.OTBR.set_enabled",
|
||||
), patch(
|
||||
"python_otbr_api.OTBR.create_active_dataset",
|
||||
side_effect=python_otbr_api.OTBRError,
|
||||
):
|
||||
await websocket_client.send_json(
|
||||
{
|
||||
"id": 5,
|
||||
"type": "otbr/create_network",
|
||||
}
|
||||
)
|
||||
msg = await websocket_client.receive_json()
|
||||
|
||||
assert msg["id"] == 5
|
||||
assert not msg["success"]
|
||||
assert msg["error"]["code"] == "create_active_dataset_failed"
|
||||
|
||||
|
||||
async def test_get_info_fetch_fails_3(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
otbr_config_entry,
|
||||
websocket_client,
|
||||
) -> None:
|
||||
"""Test create network."""
|
||||
await async_setup_component(hass, "otbr", {})
|
||||
|
||||
with patch(
|
||||
"python_otbr_api.OTBR.set_enabled",
|
||||
side_effect=[None, python_otbr_api.OTBRError],
|
||||
), patch(
|
||||
"python_otbr_api.OTBR.create_active_dataset",
|
||||
):
|
||||
await websocket_client.send_json(
|
||||
{
|
||||
"id": 5,
|
||||
"type": "otbr/create_network",
|
||||
}
|
||||
)
|
||||
msg = await websocket_client.receive_json()
|
||||
|
||||
assert msg["id"] == 5
|
||||
assert not msg["success"]
|
||||
assert msg["error"]["code"] == "set_enabled_failed"
|
||||
|
||||
@@ -915,6 +915,7 @@ async def test_unit_conversion_priority(
|
||||
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == automatic_unit
|
||||
# Assert the automatic unit conversion is stored in the registry
|
||||
entry = entity_registry.async_get(entity0.entity_id)
|
||||
assert entry.unit_of_measurement == automatic_unit
|
||||
assert entry.options == {
|
||||
"sensor.private": {"suggested_unit_of_measurement": automatic_unit}
|
||||
}
|
||||
@@ -930,6 +931,7 @@ async def test_unit_conversion_priority(
|
||||
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == suggested_unit
|
||||
# Assert the suggested unit is stored in the registry
|
||||
entry = entity_registry.async_get(entity2.entity_id)
|
||||
assert entry.unit_of_measurement == suggested_unit
|
||||
assert entry.options == {
|
||||
"sensor.private": {"suggested_unit_of_measurement": suggested_unit}
|
||||
}
|
||||
@@ -1065,6 +1067,7 @@ async def test_unit_conversion_priority_precision(
|
||||
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == automatic_unit
|
||||
# Assert the automatic unit conversion is stored in the registry
|
||||
entry = entity_registry.async_get(entity0.entity_id)
|
||||
assert entry.unit_of_measurement == automatic_unit
|
||||
assert entry.options == {
|
||||
"sensor": {"suggested_display_precision": 2},
|
||||
"sensor.private": {"suggested_unit_of_measurement": automatic_unit},
|
||||
@@ -1081,6 +1084,7 @@ async def test_unit_conversion_priority_precision(
|
||||
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == suggested_unit
|
||||
# Assert the suggested unit is stored in the registry
|
||||
entry = entity_registry.async_get(entity2.entity_id)
|
||||
assert entry.unit_of_measurement == suggested_unit
|
||||
assert entry.options == {
|
||||
"sensor": {"suggested_display_precision": 2},
|
||||
"sensor.private": {"suggested_unit_of_measurement": suggested_unit},
|
||||
@@ -1154,13 +1158,17 @@ async def test_unit_conversion_priority_suggested_unit_change(
|
||||
platform.init(empty=True)
|
||||
|
||||
# Pre-register entities
|
||||
entry = entity_registry.async_get_or_create("sensor", "test", "very_unique")
|
||||
entry = entity_registry.async_get_or_create(
|
||||
"sensor", "test", "very_unique", unit_of_measurement=original_unit
|
||||
)
|
||||
entity_registry.async_update_entity_options(
|
||||
entry.entity_id,
|
||||
"sensor.private",
|
||||
{"suggested_unit_of_measurement": original_unit},
|
||||
)
|
||||
entry = entity_registry.async_get_or_create("sensor", "test", "very_unique_2")
|
||||
entry = entity_registry.async_get_or_create(
|
||||
"sensor", "test", "very_unique_2", unit_of_measurement=original_unit
|
||||
)
|
||||
entity_registry.async_update_entity_options(
|
||||
entry.entity_id,
|
||||
"sensor.private",
|
||||
@@ -1193,11 +1201,124 @@ async def test_unit_conversion_priority_suggested_unit_change(
|
||||
state = hass.states.get(entity0.entity_id)
|
||||
assert float(state.state) == pytest.approx(float(original_value))
|
||||
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == original_unit
|
||||
# Assert the suggested unit is stored in the registry
|
||||
entry = entity_registry.async_get(entity0.entity_id)
|
||||
assert entry.unit_of_measurement == original_unit
|
||||
assert entry.options == {
|
||||
"sensor.private": {"suggested_unit_of_measurement": original_unit},
|
||||
}
|
||||
|
||||
# Registered entity -> Follow suggested unit the first time the entity was seen
|
||||
state = hass.states.get(entity1.entity_id)
|
||||
assert float(state.state) == pytest.approx(float(original_value))
|
||||
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == original_unit
|
||||
# Assert the suggested unit is stored in the registry
|
||||
entry = entity_registry.async_get(entity1.entity_id)
|
||||
assert entry.unit_of_measurement == original_unit
|
||||
assert entry.options == {
|
||||
"sensor.private": {"suggested_unit_of_measurement": original_unit},
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
(
|
||||
"native_unit_1",
|
||||
"native_unit_2",
|
||||
"suggested_unit",
|
||||
"native_value",
|
||||
"original_value",
|
||||
"device_class",
|
||||
),
|
||||
[
|
||||
# Distance
|
||||
(
|
||||
UnitOfLength.KILOMETERS,
|
||||
UnitOfLength.METERS,
|
||||
UnitOfLength.KILOMETERS,
|
||||
1000000,
|
||||
1000,
|
||||
SensorDeviceClass.DISTANCE,
|
||||
),
|
||||
# Energy
|
||||
(
|
||||
UnitOfEnergy.KILO_WATT_HOUR,
|
||||
UnitOfEnergy.WATT_HOUR,
|
||||
UnitOfEnergy.KILO_WATT_HOUR,
|
||||
1000000,
|
||||
1000,
|
||||
SensorDeviceClass.ENERGY,
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_unit_conversion_priority_suggested_unit_change_2(
|
||||
hass: HomeAssistant,
|
||||
enable_custom_integrations: None,
|
||||
native_unit_1,
|
||||
native_unit_2,
|
||||
suggested_unit,
|
||||
native_value,
|
||||
original_value,
|
||||
device_class,
|
||||
) -> None:
|
||||
"""Test priority of unit conversion."""
|
||||
|
||||
hass.config.units = METRIC_SYSTEM
|
||||
|
||||
entity_registry = er.async_get(hass)
|
||||
platform = getattr(hass.components, "test.sensor")
|
||||
platform.init(empty=True)
|
||||
|
||||
# Pre-register entities
|
||||
entity_registry.async_get_or_create(
|
||||
"sensor", "test", "very_unique", unit_of_measurement=native_unit_1
|
||||
)
|
||||
entity_registry.async_get_or_create(
|
||||
"sensor", "test", "very_unique_2", unit_of_measurement=native_unit_1
|
||||
)
|
||||
|
||||
platform.ENTITIES["0"] = platform.MockSensor(
|
||||
name="Test",
|
||||
device_class=device_class,
|
||||
native_unit_of_measurement=native_unit_2,
|
||||
native_value=str(native_value),
|
||||
unique_id="very_unique",
|
||||
)
|
||||
entity0 = platform.ENTITIES["0"]
|
||||
|
||||
platform.ENTITIES["1"] = platform.MockSensor(
|
||||
name="Test",
|
||||
device_class=device_class,
|
||||
native_unit_of_measurement=native_unit_2,
|
||||
native_value=str(native_value),
|
||||
suggested_unit_of_measurement=suggested_unit,
|
||||
unique_id="very_unique_2",
|
||||
)
|
||||
entity1 = platform.ENTITIES["1"]
|
||||
|
||||
assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Registered entity -> Follow unit in entity registry
|
||||
state = hass.states.get(entity0.entity_id)
|
||||
assert float(state.state) == pytest.approx(float(original_value))
|
||||
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == native_unit_1
|
||||
# Assert the suggested unit is stored in the registry
|
||||
entry = entity_registry.async_get(entity0.entity_id)
|
||||
assert entry.unit_of_measurement == native_unit_1
|
||||
assert entry.options == {
|
||||
"sensor.private": {"suggested_unit_of_measurement": native_unit_1},
|
||||
}
|
||||
|
||||
# Registered entity -> Follow unit in entity registry
|
||||
state = hass.states.get(entity1.entity_id)
|
||||
assert float(state.state) == pytest.approx(float(original_value))
|
||||
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == native_unit_1
|
||||
# Assert the suggested unit is stored in the registry
|
||||
entry = entity_registry.async_get(entity0.entity_id)
|
||||
assert entry.unit_of_measurement == native_unit_1
|
||||
assert entry.options == {
|
||||
"sensor.private": {"suggested_unit_of_measurement": native_unit_1},
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -72,7 +72,7 @@ OPEN_CLOSE_COVER_CONFIG = {
|
||||
(
|
||||
"cover.test_state",
|
||||
"dog",
|
||||
STATE_CLOSING,
|
||||
STATE_UNKNOWN,
|
||||
{},
|
||||
-1,
|
||||
"Received invalid cover is_on state: dog",
|
||||
|
||||
@@ -78,14 +78,14 @@ async def test_import_then_zeroconf(hass: HomeAssistant) -> None:
|
||||
assert len(mock_setup_entry.mock_calls) == 0
|
||||
|
||||
|
||||
async def test_zeroconf(hass: HomeAssistant) -> None:
|
||||
"""Test the zeroconf flow."""
|
||||
async def test_user(hass: HomeAssistant) -> None:
|
||||
"""Test the user flow."""
|
||||
with patch(
|
||||
"homeassistant.components.thread.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
thread.DOMAIN, context={"source": "zeroconf"}, data=TEST_ZEROCONF_RECORD
|
||||
thread.DOMAIN, context={"source": "user"}
|
||||
)
|
||||
|
||||
assert result["type"] == FlowResultType.CREATE_ENTRY
|
||||
@@ -101,16 +101,61 @@ async def test_zeroconf(hass: HomeAssistant) -> None:
|
||||
assert config_entry.unique_id is None
|
||||
|
||||
|
||||
async def test_zeroconf_then_import(hass: HomeAssistant) -> None:
|
||||
"""Test the import flow."""
|
||||
async def test_zeroconf(hass: HomeAssistant) -> None:
|
||||
"""Test the zeroconf flow."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
thread.DOMAIN, context={"source": "zeroconf"}, data=TEST_ZEROCONF_RECORD
|
||||
)
|
||||
assert result["type"] == FlowResultType.FORM
|
||||
assert result["errors"] is None
|
||||
assert result["step_id"] == "confirm"
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.thread.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
|
||||
assert result["type"] == FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "Thread"
|
||||
assert result["data"] == {}
|
||||
assert result["options"] == {}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
config_entry = hass.config_entries.async_entries(thread.DOMAIN)[0]
|
||||
assert config_entry.data == {}
|
||||
assert config_entry.options == {}
|
||||
assert config_entry.title == "Thread"
|
||||
assert config_entry.unique_id is None
|
||||
|
||||
|
||||
async def test_zeroconf_setup_onboarding(hass: HomeAssistant) -> None:
|
||||
"""Test we automatically finish a zeroconf flow during onboarding."""
|
||||
with patch(
|
||||
"homeassistant.components.onboarding.async_is_onboarded", return_value=False
|
||||
), patch(
|
||||
"homeassistant.components.thread.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
thread.DOMAIN, context={"source": "zeroconf"}, data=TEST_ZEROCONF_RECORD
|
||||
)
|
||||
assert result["type"] == FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "Thread"
|
||||
assert result["data"] == {}
|
||||
assert result["options"] == {}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_zeroconf_then_import(hass: HomeAssistant) -> None:
|
||||
"""Test the import flow."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
thread.DOMAIN, context={"source": "zeroconf"}, data=TEST_ZEROCONF_RECORD
|
||||
)
|
||||
with patch(
|
||||
"homeassistant.components.thread.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
|
||||
assert result["type"] == FlowResultType.CREATE_ENTRY
|
||||
|
||||
with patch(
|
||||
|
||||
@@ -71,7 +71,10 @@ async def test_form(hass: HomeAssistant, nvr: NVR) -> None:
|
||||
), patch(
|
||||
"homeassistant.components.unifiprotect.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
) as mock_setup_entry, patch(
|
||||
"homeassistant.components.unifiprotect.async_setup",
|
||||
return_value=True,
|
||||
) as mock_setup:
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
@@ -93,6 +96,7 @@ async def test_form(hass: HomeAssistant, nvr: NVR) -> None:
|
||||
"verify_ssl": False,
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
assert len(mock_setup.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_form_version_too_old(hass: HomeAssistant, old_nvr: NVR) -> None:
|
||||
@@ -214,7 +218,10 @@ async def test_form_reauth_auth(hass: HomeAssistant, nvr: NVR) -> None:
|
||||
with patch(
|
||||
"homeassistant.components.unifiprotect.config_flow.ProtectApiClient.get_nvr",
|
||||
return_value=nvr,
|
||||
):
|
||||
), patch(
|
||||
"homeassistant.components.unifiprotect.async_setup",
|
||||
return_value=True,
|
||||
) as mock_setup:
|
||||
result3 = await hass.config_entries.flow.async_configure(
|
||||
result2["flow_id"],
|
||||
{
|
||||
@@ -225,6 +232,7 @@ async def test_form_reauth_auth(hass: HomeAssistant, nvr: NVR) -> None:
|
||||
|
||||
assert result3["type"] == FlowResultType.ABORT
|
||||
assert result3["reason"] == "reauth_successful"
|
||||
assert len(mock_setup.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_form_options(hass: HomeAssistant, ufp_client: ProtectApiClient) -> None:
|
||||
@@ -332,7 +340,10 @@ async def test_discovered_by_unifi_discovery_direct_connect(
|
||||
), patch(
|
||||
"homeassistant.components.unifiprotect.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
) as mock_setup_entry, patch(
|
||||
"homeassistant.components.unifiprotect.async_setup",
|
||||
return_value=True,
|
||||
) as mock_setup:
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
@@ -353,6 +364,7 @@ async def test_discovered_by_unifi_discovery_direct_connect(
|
||||
"verify_ssl": True,
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
assert len(mock_setup.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_discovered_by_unifi_discovery_direct_connect_updated(
|
||||
@@ -515,7 +527,10 @@ async def test_discovered_by_unifi_discovery(hass: HomeAssistant, nvr: NVR) -> N
|
||||
), patch(
|
||||
"homeassistant.components.unifiprotect.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
) as mock_setup_entry, patch(
|
||||
"homeassistant.components.unifiprotect.async_setup",
|
||||
return_value=True,
|
||||
) as mock_setup:
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
@@ -536,6 +551,7 @@ async def test_discovered_by_unifi_discovery(hass: HomeAssistant, nvr: NVR) -> N
|
||||
"verify_ssl": False,
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
assert len(mock_setup.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_discovered_by_unifi_discovery_partial(
|
||||
@@ -567,7 +583,10 @@ async def test_discovered_by_unifi_discovery_partial(
|
||||
), patch(
|
||||
"homeassistant.components.unifiprotect.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
) as mock_setup_entry, patch(
|
||||
"homeassistant.components.unifiprotect.async_setup",
|
||||
return_value=True,
|
||||
) as mock_setup:
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
@@ -588,6 +607,7 @@ async def test_discovered_by_unifi_discovery_partial(
|
||||
"verify_ssl": False,
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
assert len(mock_setup.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_discovered_by_unifi_discovery_direct_connect_on_different_interface(
|
||||
@@ -736,7 +756,10 @@ async def test_discovered_by_unifi_discovery_direct_connect_on_different_interfa
|
||||
), patch(
|
||||
"homeassistant.components.unifiprotect.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
) as mock_setup_entry, patch(
|
||||
"homeassistant.components.unifiprotect.async_setup",
|
||||
return_value=True,
|
||||
) as mock_setup:
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
@@ -757,6 +780,7 @@ async def test_discovered_by_unifi_discovery_direct_connect_on_different_interfa
|
||||
"verify_ssl": True,
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
assert len(mock_setup.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_discovered_by_unifi_discovery_direct_connect_on_different_interface_resolver_no_result(
|
||||
|
||||
@@ -173,4 +173,4 @@ async def test_cant_turn_on_lock(hass: HomeAssistant) -> None:
|
||||
)
|
||||
|
||||
assert result.response.response_type == intent.IntentResponseType.ERROR
|
||||
assert result.response.error_code == intent.IntentResponseErrorCode.FAILED_TO_HANDLE
|
||||
assert result.response.error_code == intent.IntentResponseErrorCode.NO_INTENT_MATCH
|
||||
|
||||
@@ -29,6 +29,7 @@ from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.setup import async_set_domains_to_be_loaded, async_setup_component
|
||||
from homeassistant.util import dt
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .common import (
|
||||
MockConfigEntry,
|
||||
@@ -999,6 +1000,27 @@ async def test_setup_retrying_during_unload_before_started(hass: HomeAssistant)
|
||||
)
|
||||
|
||||
|
||||
async def test_setup_does_not_retry_during_shutdown(hass: HomeAssistant) -> None:
|
||||
"""Test we do not retry when HASS is shutting down."""
|
||||
entry = MockConfigEntry(domain="test")
|
||||
|
||||
mock_setup_entry = AsyncMock(side_effect=ConfigEntryNotReady)
|
||||
mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry))
|
||||
mock_entity_platform(hass, "config_flow.test", None)
|
||||
|
||||
await entry.async_setup(hass)
|
||||
|
||||
assert entry.state is config_entries.ConfigEntryState.SETUP_RETRY
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
hass.state = CoreState.stopping
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=5))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert entry.state is config_entries.ConfigEntryState.SETUP_RETRY
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_create_entry_options(hass: HomeAssistant) -> None:
|
||||
"""Test a config entry being created with options."""
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ import gc
|
||||
import logging
|
||||
import os
|
||||
from tempfile import TemporaryDirectory
|
||||
import time
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock, Mock, PropertyMock, patch
|
||||
|
||||
@@ -2003,3 +2004,49 @@ async def test_background_task(hass: HomeAssistant) -> None:
|
||||
await asyncio.sleep(0)
|
||||
await hass.async_stop()
|
||||
assert result.result() == ha.CoreState.stopping
|
||||
|
||||
|
||||
async def test_shutdown_does_not_block_on_normal_tasks(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Ensure shutdown does not block on normal tasks."""
|
||||
result = asyncio.Future()
|
||||
unshielded_task = asyncio.sleep(10)
|
||||
|
||||
async def test_task():
|
||||
try:
|
||||
await unshielded_task
|
||||
except asyncio.CancelledError:
|
||||
result.set_result(hass.state)
|
||||
|
||||
start = time.monotonic()
|
||||
task = hass.async_create_task(test_task())
|
||||
await asyncio.sleep(0)
|
||||
await hass.async_stop()
|
||||
await asyncio.sleep(0)
|
||||
assert result.done()
|
||||
assert task.done()
|
||||
assert time.monotonic() - start < 0.5
|
||||
|
||||
|
||||
async def test_shutdown_does_not_block_on_shielded_tasks(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Ensure shutdown does not block on shielded tasks."""
|
||||
result = asyncio.Future()
|
||||
shielded_task = asyncio.shield(asyncio.sleep(10))
|
||||
|
||||
async def test_task():
|
||||
try:
|
||||
await shielded_task
|
||||
except asyncio.CancelledError:
|
||||
result.set_result(hass.state)
|
||||
|
||||
start = time.monotonic()
|
||||
task = hass.async_create_task(test_task())
|
||||
await asyncio.sleep(0)
|
||||
await hass.async_stop()
|
||||
await asyncio.sleep(0)
|
||||
assert result.done()
|
||||
assert task.done()
|
||||
assert time.monotonic() - start < 0.5
|
||||
|
||||
Reference in New Issue
Block a user