mirror of
https://github.com/home-assistant/core.git
synced 2026-01-06 23:57:17 +01:00
Compare commits
41 Commits
2022.2.0b6
...
2022.2.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f44f1f0c4a | ||
|
|
b450a41d7b | ||
|
|
88c3ab1113 | ||
|
|
f1c8fc241a | ||
|
|
e5b9d5baa3 | ||
|
|
3c43089cc2 | ||
|
|
c8827e00b3 | ||
|
|
3a1a12b13e | ||
|
|
2928935838 | ||
|
|
b9a37e2c3e | ||
|
|
92f4f99d41 | ||
|
|
e32a54eecc | ||
|
|
2bb65ecf38 | ||
|
|
afbc55b181 | ||
|
|
931c27f452 | ||
|
|
689133976a | ||
|
|
faa8ac692e | ||
|
|
ec0b0e41a1 | ||
|
|
6550d04313 | ||
|
|
8d33964e4d | ||
|
|
d195e8a1b4 | ||
|
|
a8b29c4be9 | ||
|
|
f7ec373aab | ||
|
|
1ae2bfcc89 | ||
|
|
1155d229f3 | ||
|
|
2f638a6b5e | ||
|
|
580573fcb3 | ||
|
|
8851af7dba | ||
|
|
ec2e450442 | ||
|
|
dacf5957d2 | ||
|
|
5190282b4d | ||
|
|
51c6cac74d | ||
|
|
1809489421 | ||
|
|
690764ec84 | ||
|
|
2b0e828736 | ||
|
|
91023cf132 | ||
|
|
fcd14e2830 | ||
|
|
40a174cc70 | ||
|
|
95d4be375c | ||
|
|
37f9c833c0 | ||
|
|
b902c59504 |
@@ -62,6 +62,9 @@ class AccuWeatherEntity(CoordinatorEntity, WeatherEntity):
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._unit_system = API_METRIC if coordinator.is_metric else API_IMPERIAL
|
||||
self._attr_wind_speed_unit = self.coordinator.data["Wind"]["Speed"][
|
||||
self._unit_system
|
||||
]["Unit"]
|
||||
self._attr_name = name
|
||||
self._attr_unique_id = coordinator.location_key
|
||||
self._attr_temperature_unit = (
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "aladdin_connect",
|
||||
"name": "Aladdin Connect",
|
||||
"documentation": "https://www.home-assistant.io/integrations/aladdin_connect",
|
||||
"requirements": ["aladdin_connect==0.3"],
|
||||
"requirements": ["aladdin_connect==0.4"],
|
||||
"codeowners": [],
|
||||
"iot_class": "cloud_polling"
|
||||
}
|
||||
|
||||
@@ -179,7 +179,6 @@ class AppleTVManager:
|
||||
def _handle_disconnect(self):
|
||||
"""Handle that the device disconnected and restart connect loop."""
|
||||
if self.atv:
|
||||
self.atv.listener = None
|
||||
self.atv.close()
|
||||
self.atv = None
|
||||
self._dispatch_send(SIGNAL_DISCONNECTED)
|
||||
@@ -196,8 +195,6 @@ class AppleTVManager:
|
||||
self._is_on = False
|
||||
try:
|
||||
if self.atv:
|
||||
self.atv.push_updater.listener = None
|
||||
self.atv.push_updater.stop()
|
||||
self.atv.close()
|
||||
self.atv = None
|
||||
if self._task:
|
||||
|
||||
@@ -162,15 +162,15 @@ class AppleTvMediaPlayer(AppleTVEntity, MediaPlayerEntity):
|
||||
except exceptions.ProtocolError:
|
||||
_LOGGER.exception("Failed to update app list")
|
||||
else:
|
||||
self._app_list = {app.name: app.identifier for app in apps}
|
||||
self._app_list = {
|
||||
app.name: app.identifier
|
||||
for app in sorted(apps, key=lambda app: app.name.lower())
|
||||
}
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def async_device_disconnected(self):
|
||||
"""Handle when connection was lost to device."""
|
||||
self.atv.push_updater.stop()
|
||||
self.atv.push_updater.listener = None
|
||||
self.atv.power.listener = None
|
||||
self._attr_supported_features = SUPPORT_APPLE_TV
|
||||
|
||||
@property
|
||||
|
||||
@@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from http import HTTPStatus
|
||||
|
||||
from aiohttp import web
|
||||
import aiohttp.web_exceptions
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -11,7 +12,7 @@ from homeassistant.auth.permissions.const import CAT_CONFIG_ENTRIES, POLICY_EDIT
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import Unauthorized
|
||||
from homeassistant.exceptions import DependencyError, Unauthorized
|
||||
from homeassistant.helpers.data_entry_flow import (
|
||||
FlowManagerIndexView,
|
||||
FlowManagerResourceView,
|
||||
@@ -127,7 +128,13 @@ class ConfigManagerFlowIndexView(FlowManagerIndexView):
|
||||
raise Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission="add")
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
return await super().post(request)
|
||||
try:
|
||||
return await super().post(request)
|
||||
except DependencyError as exc:
|
||||
return web.Response(
|
||||
text=f"Failed dependencies {', '.join(exc.failed_dependencies)}",
|
||||
status=HTTPStatus.BAD_REQUEST,
|
||||
)
|
||||
|
||||
def _prepare_result_json(self, result):
|
||||
"""Convert result to JSON."""
|
||||
|
||||
@@ -33,6 +33,13 @@ DATA_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_HOSTNAME, default=DEFAULT_HOSTNAME): cv.string,
|
||||
}
|
||||
)
|
||||
DATA_SCHEMA_ADV = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOSTNAME, default=DEFAULT_HOSTNAME): cv.string,
|
||||
vol.Optional(CONF_RESOLVER, default=DEFAULT_RESOLVER): cv.string,
|
||||
vol.Optional(CONF_RESOLVER_IPV6, default=DEFAULT_RESOLVER_IPV6): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_validate_hostname(
|
||||
@@ -94,8 +101,8 @@ class DnsIPConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
hostname = user_input[CONF_HOSTNAME]
|
||||
name = DEFAULT_NAME if hostname == DEFAULT_HOSTNAME else hostname
|
||||
resolver = DEFAULT_RESOLVER
|
||||
resolver_ipv6 = DEFAULT_RESOLVER_IPV6
|
||||
resolver = user_input.get(CONF_RESOLVER, DEFAULT_RESOLVER)
|
||||
resolver_ipv6 = user_input.get(CONF_RESOLVER_IPV6, DEFAULT_RESOLVER_IPV6)
|
||||
|
||||
validate = await async_validate_hostname(hostname, resolver, resolver_ipv6)
|
||||
|
||||
@@ -119,6 +126,12 @@ class DnsIPConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
)
|
||||
|
||||
if self.show_advanced_options is True:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=DATA_SCHEMA_ADV,
|
||||
errors=errors,
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=DATA_SCHEMA,
|
||||
|
||||
@@ -3,7 +3,9 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"hostname": "The hostname for which to perform the DNS query"
|
||||
"hostname": "The hostname for which to perform the DNS query",
|
||||
"resolver": "Resolver for IPV4 lookup",
|
||||
"resolver_ipv6": "Resolver for IPV6 lookup"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -6,7 +6,9 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"hostname": "The hostname for which to perform the DNS query"
|
||||
"hostname": "The hostname for which to perform the DNS query",
|
||||
"resolver": "Resolver for IPV4 lookup",
|
||||
"resolver_ipv6": "Resolver for IPV6 lookup"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,7 +59,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
update_interval=DEFAULT_UPDATE_INTERVAL,
|
||||
update_method=partial(async_update, api_category),
|
||||
)
|
||||
data_init_tasks.append(coordinator.async_refresh())
|
||||
data_init_tasks.append(coordinator.async_config_entry_first_refresh())
|
||||
|
||||
await asyncio.gather(*data_init_tasks)
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
|
||||
@@ -220,8 +220,8 @@ class FritzBoxTools(update_coordinator.DataUpdateCoordinator):
|
||||
"""Update FritzboxTools data."""
|
||||
try:
|
||||
await self.async_scan_devices()
|
||||
except (FritzSecurityError, FritzConnectionException) as ex:
|
||||
raise update_coordinator.UpdateFailed from ex
|
||||
except FRITZ_EXCEPTIONS as ex:
|
||||
raise update_coordinator.UpdateFailed(ex) from ex
|
||||
|
||||
@property
|
||||
def unique_id(self) -> str:
|
||||
@@ -294,11 +294,19 @@ class FritzBoxTools(update_coordinator.DataUpdateCoordinator):
|
||||
|
||||
def _get_wan_access(self, ip_address: str) -> bool | None:
|
||||
"""Get WAN access rule for given IP address."""
|
||||
return not self.connection.call_action(
|
||||
"X_AVM-DE_HostFilter:1",
|
||||
"GetWANAccessByIP",
|
||||
NewIPv4Address=ip_address,
|
||||
).get("NewDisallow")
|
||||
try:
|
||||
return not self.connection.call_action(
|
||||
"X_AVM-DE_HostFilter:1",
|
||||
"GetWANAccessByIP",
|
||||
NewIPv4Address=ip_address,
|
||||
).get("NewDisallow")
|
||||
except FRITZ_EXCEPTIONS as ex:
|
||||
_LOGGER.debug(
|
||||
"could not get WAN access rule for client device with IP '%s', error: %s",
|
||||
ip_address,
|
||||
ex,
|
||||
)
|
||||
return None
|
||||
|
||||
async def async_scan_devices(self, now: datetime | None = None) -> None:
|
||||
"""Wrap up FritzboxTools class scan."""
|
||||
@@ -566,6 +574,13 @@ class AvmWrapper(FritzBoxTools):
|
||||
partial(self.get_wan_dsl_interface_config)
|
||||
)
|
||||
|
||||
async def async_get_wan_link_properties(self) -> dict[str, Any]:
|
||||
"""Call WANCommonInterfaceConfig service."""
|
||||
|
||||
return await self.hass.async_add_executor_job(
|
||||
partial(self.get_wan_link_properties)
|
||||
)
|
||||
|
||||
async def async_get_port_mapping(self, con_type: str, index: int) -> dict[str, Any]:
|
||||
"""Call GetGenericPortMappingEntry action."""
|
||||
|
||||
@@ -668,6 +683,13 @@ class AvmWrapper(FritzBoxTools):
|
||||
|
||||
return self._service_call_action("WANDSLInterfaceConfig", "1", "GetInfo")
|
||||
|
||||
def get_wan_link_properties(self) -> dict[str, Any]:
|
||||
"""Call WANCommonInterfaceConfig service."""
|
||||
|
||||
return self._service_call_action(
|
||||
"WANCommonInterfaceConfig", "1", "GetCommonLinkProperties"
|
||||
)
|
||||
|
||||
def set_wlan_configuration(self, index: int, turn_on: bool) -> dict[str, Any]:
|
||||
"""Call SetEnable action from WLANConfiguration service."""
|
||||
|
||||
|
||||
@@ -29,6 +29,19 @@ async def async_get_config_entry_diagnostics(
|
||||
"mesh_role": avm_wrapper.mesh_role,
|
||||
"last_update success": avm_wrapper.last_update_success,
|
||||
"last_exception": avm_wrapper.last_exception,
|
||||
"discovered_services": list(avm_wrapper.connection.services),
|
||||
"client_devices": [
|
||||
{
|
||||
"connected_to": device.connected_to,
|
||||
"connection_type": device.connection_type,
|
||||
"hostname": device.hostname,
|
||||
"is_connected": device.is_connected,
|
||||
"last_activity": device.last_activity,
|
||||
"wan_access": device.wan_access,
|
||||
}
|
||||
for _, device in avm_wrapper.devices.items()
|
||||
],
|
||||
"wan_link_properties": await avm_wrapper.async_get_wan_link_properties(),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"requirements": [
|
||||
"home-assistant-frontend==20220201.0"
|
||||
"home-assistant-frontend==20220203.0"
|
||||
],
|
||||
"dependencies": [
|
||||
"api",
|
||||
|
||||
@@ -227,7 +227,7 @@ def _async_subscribe_pico_remote_events(
|
||||
action = ACTION_RELEASE
|
||||
|
||||
type_ = device["type"]
|
||||
name = device["name"]
|
||||
area, name = device["name"].split("_", 1)
|
||||
button_number = device["button_number"]
|
||||
# The original implementation used LIP instead of LEAP
|
||||
# so we need to convert the button number to maintain compat
|
||||
@@ -252,7 +252,7 @@ def _async_subscribe_pico_remote_events(
|
||||
ATTR_BUTTON_NUMBER: lip_button_number,
|
||||
ATTR_LEAP_BUTTON_NUMBER: button_number,
|
||||
ATTR_DEVICE_NAME: name,
|
||||
ATTR_AREA_NAME: name.split("_")[0],
|
||||
ATTR_AREA_NAME: area,
|
||||
ATTR_ACTION: action,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "lutron_caseta",
|
||||
"name": "Lutron Cas\u00e9ta",
|
||||
"documentation": "https://www.home-assistant.io/integrations/lutron_caseta",
|
||||
"requirements": ["pylutron-caseta==0.13.0"],
|
||||
"requirements": ["pylutron-caseta==0.13.1"],
|
||||
"config_flow": true,
|
||||
"zeroconf": ["_leap._tcp.local."],
|
||||
"homekit": {
|
||||
|
||||
@@ -20,6 +20,8 @@ from homeassistant.const import (
|
||||
CONF_PAYLOAD_OFF,
|
||||
CONF_PAYLOAD_ON,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
@@ -27,6 +29,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
import homeassistant.helpers.event as evt
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
from homeassistant.helpers.reload import async_setup_reload_service
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@@ -95,7 +98,7 @@ async def _async_setup_entity(
|
||||
async_add_entities([MqttBinarySensor(hass, config, config_entry, discovery_data)])
|
||||
|
||||
|
||||
class MqttBinarySensor(MqttEntity, BinarySensorEntity):
|
||||
class MqttBinarySensor(MqttEntity, BinarySensorEntity, RestoreEntity):
|
||||
"""Representation a binary sensor that is updated by MQTT."""
|
||||
|
||||
_entity_id_format = binary_sensor.ENTITY_ID_FORMAT
|
||||
@@ -113,6 +116,42 @@ class MqttBinarySensor(MqttEntity, BinarySensorEntity):
|
||||
|
||||
MqttEntity.__init__(self, hass, config, config_entry, discovery_data)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore state for entities with expire_after set."""
|
||||
await super().async_added_to_hass()
|
||||
if (
|
||||
(expire_after := self._config.get(CONF_EXPIRE_AFTER)) is not None
|
||||
and expire_after > 0
|
||||
and (last_state := await self.async_get_last_state()) is not None
|
||||
and last_state.state not in [STATE_UNKNOWN, STATE_UNAVAILABLE]
|
||||
):
|
||||
expiration_at = last_state.last_changed + timedelta(seconds=expire_after)
|
||||
if expiration_at < (time_now := dt_util.utcnow()):
|
||||
# Skip reactivating the binary_sensor
|
||||
_LOGGER.debug("Skip state recovery after reload for %s", self.entity_id)
|
||||
return
|
||||
self._expired = False
|
||||
self._state = last_state.state
|
||||
|
||||
self._expiration_trigger = async_track_point_in_utc_time(
|
||||
self.hass, self._value_is_expired, expiration_at
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"State recovered after reload for %s, remaining time before expiring %s",
|
||||
self.entity_id,
|
||||
expiration_at - time_now,
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Remove exprire triggers."""
|
||||
# Clean up expire triggers
|
||||
if self._expiration_trigger:
|
||||
_LOGGER.debug("Clean up expire after trigger for %s", self.entity_id)
|
||||
self._expiration_trigger()
|
||||
self._expiration_trigger = None
|
||||
self._expired = False
|
||||
await MqttEntity.async_will_remove_from_hass(self)
|
||||
|
||||
@staticmethod
|
||||
def config_schema():
|
||||
"""Return the config schema."""
|
||||
|
||||
@@ -139,7 +139,7 @@ async def info_for_device(hass, device_id):
|
||||
"topic": topic,
|
||||
"messages": [
|
||||
{
|
||||
"payload": msg.payload,
|
||||
"payload": str(msg.payload),
|
||||
"qos": msg.qos,
|
||||
"retain": msg.retain,
|
||||
"time": msg.timestamp,
|
||||
|
||||
@@ -523,6 +523,11 @@ class MqttDiscoveryUpdate(Entity):
|
||||
async def async_removed_from_registry(self) -> None:
|
||||
"""Clear retained discovery topic in broker."""
|
||||
if not self._removed_from_hass:
|
||||
# Stop subscribing to discovery updates to not trigger when we clear the
|
||||
# discovery topic
|
||||
self._cleanup_discovery_on_remove()
|
||||
|
||||
# Clear the discovery topic so the entity is not rediscovered after a restart
|
||||
discovery_topic = self._discovery_data[ATTR_DISCOVERY_TOPIC]
|
||||
publish(self.hass, discovery_topic, "", retain=True)
|
||||
|
||||
|
||||
@@ -23,12 +23,15 @@ from homeassistant.const import (
|
||||
CONF_NAME,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
from homeassistant.helpers.reload import async_setup_reload_service
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@@ -140,7 +143,7 @@ async def _async_setup_entity(
|
||||
async_add_entities([MqttSensor(hass, config, config_entry, discovery_data)])
|
||||
|
||||
|
||||
class MqttSensor(MqttEntity, SensorEntity):
|
||||
class MqttSensor(MqttEntity, SensorEntity, RestoreEntity):
|
||||
"""Representation of a sensor that can be updated using MQTT."""
|
||||
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
@@ -160,6 +163,42 @@ class MqttSensor(MqttEntity, SensorEntity):
|
||||
|
||||
MqttEntity.__init__(self, hass, config, config_entry, discovery_data)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore state for entities with expire_after set."""
|
||||
await super().async_added_to_hass()
|
||||
if (
|
||||
(expire_after := self._config.get(CONF_EXPIRE_AFTER)) is not None
|
||||
and expire_after > 0
|
||||
and (last_state := await self.async_get_last_state()) is not None
|
||||
and last_state.state not in [STATE_UNKNOWN, STATE_UNAVAILABLE]
|
||||
):
|
||||
expiration_at = last_state.last_changed + timedelta(seconds=expire_after)
|
||||
if expiration_at < (time_now := dt_util.utcnow()):
|
||||
# Skip reactivating the sensor
|
||||
_LOGGER.debug("Skip state recovery after reload for %s", self.entity_id)
|
||||
return
|
||||
self._expired = False
|
||||
self._state = last_state.state
|
||||
|
||||
self._expiration_trigger = async_track_point_in_utc_time(
|
||||
self.hass, self._value_is_expired, expiration_at
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"State recovered after reload for %s, remaining time before expiring %s",
|
||||
self.entity_id,
|
||||
expiration_at - time_now,
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Remove exprire triggers."""
|
||||
# Clean up expire triggers
|
||||
if self._expiration_trigger:
|
||||
_LOGGER.debug("Clean up expire after trigger for %s", self.entity_id)
|
||||
self._expiration_trigger()
|
||||
self._expiration_trigger = None
|
||||
self._expired = False
|
||||
await MqttEntity.async_will_remove_from_hass(self)
|
||||
|
||||
@staticmethod
|
||||
def config_schema():
|
||||
"""Return the config schema."""
|
||||
@@ -197,7 +236,7 @@ class MqttSensor(MqttEntity, SensorEntity):
|
||||
self.hass, self._value_is_expired, expiration_at
|
||||
)
|
||||
|
||||
payload = self._template(msg.payload)
|
||||
payload = self._template(msg.payload, default=self._state)
|
||||
|
||||
if payload is not None and self.device_class in (
|
||||
SensorDeviceClass.DATE,
|
||||
|
||||
@@ -70,7 +70,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
try:
|
||||
await openuv.async_update()
|
||||
except OpenUvError as err:
|
||||
except HomeAssistantError as err:
|
||||
LOGGER.error("Config entry failed: %s", err)
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/pvoutput",
|
||||
"config_flow": true,
|
||||
"codeowners": ["@fabaff", "@frenck"],
|
||||
"requirements": ["pvo==0.2.0"],
|
||||
"requirements": ["pvo==0.2.1"],
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "platinum"
|
||||
}
|
||||
|
||||
@@ -51,7 +51,8 @@ class HASensemeLight(SensemeEntity, LightEntity):
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update attrs from device."""
|
||||
self._attr_is_on = self._device.light_on
|
||||
self._attr_brightness = int(min(255, self._device.light_brightness * 16))
|
||||
if self._device.light_brightness is not None:
|
||||
self._attr_brightness = int(min(255, self._device.light_brightness * 16))
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on the light."""
|
||||
|
||||
@@ -264,7 +264,8 @@ def get_model_name(info: dict[str, Any]) -> str:
|
||||
|
||||
def get_rpc_channel_name(device: RpcDevice, key: str) -> str:
|
||||
"""Get name based on device and channel name."""
|
||||
key = key.replace("input", "switch")
|
||||
if device.config.get("switch:0"):
|
||||
key = key.replace("input", "switch")
|
||||
device_name = get_rpc_device_name(device)
|
||||
entity_name: str | None = device.config[key].get("name", device_name)
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ class ShodanSensor(SensorEntity):
|
||||
def update(self) -> None:
|
||||
"""Get the latest data and updates the states."""
|
||||
data = self.data.update()
|
||||
self._attr_native_value = data.details["total"]
|
||||
self._attr_native_value = data["total"]
|
||||
|
||||
|
||||
class ShodanData:
|
||||
|
||||
@@ -38,3 +38,4 @@ KEY_MOISTURE: Final = "moisture"
|
||||
KEY_POWER: Final = "power"
|
||||
|
||||
PREVIOUS_STATE: Final = "previous_state"
|
||||
AVAILABILITY_EVENT_CODE: Final = "RP"
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .const import DOMAIN, SIA_EVENT, SIA_HUB_ZONE
|
||||
from .const import AVAILABILITY_EVENT_CODE, DOMAIN, SIA_EVENT, SIA_HUB_ZONE
|
||||
from .utils import get_attr_from_sia_event, get_unavailability_interval
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -105,7 +105,7 @@ class SIABaseEntity(RestoreEntity):
|
||||
return
|
||||
self._attr_extra_state_attributes.update(get_attr_from_sia_event(sia_event))
|
||||
state_changed = self.update_state(sia_event)
|
||||
if state_changed:
|
||||
if state_changed or sia_event.code == AVAILABILITY_EVENT_CODE:
|
||||
self.async_reset_availability_cb()
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -820,17 +820,6 @@ class SimpliSafeEntity(CoordinatorEntity):
|
||||
):
|
||||
return
|
||||
|
||||
if event.event_type in (EVENT_CONNECTION_LOST, EVENT_POWER_OUTAGE):
|
||||
self._online = False
|
||||
elif event.event_type in (EVENT_CONNECTION_RESTORED, EVENT_POWER_RESTORED):
|
||||
self._online = True
|
||||
|
||||
# It's uncertain whether SimpliSafe events will still propagate down the
|
||||
# websocket when the base station is offline. Just in case, we guard against
|
||||
# further action until connection is restored:
|
||||
if not self._online:
|
||||
return
|
||||
|
||||
sensor_type: str | None
|
||||
if event.sensor_type:
|
||||
sensor_type = event.sensor_type.name
|
||||
@@ -846,6 +835,19 @@ class SimpliSafeEntity(CoordinatorEntity):
|
||||
}
|
||||
)
|
||||
|
||||
# It's unknown whether these events reach the base station (since the connection
|
||||
# is lost); we include this for completeness and coverage:
|
||||
if event.event_type in (EVENT_CONNECTION_LOST, EVENT_POWER_OUTAGE):
|
||||
self._online = False
|
||||
return
|
||||
|
||||
# If the base station comes back online, set entities to available, but don't
|
||||
# instruct the entities to update their state (since there won't be anything new
|
||||
# until the next websocket event or REST API update:
|
||||
if event.event_type in (EVENT_CONNECTION_RESTORED, EVENT_POWER_RESTORED):
|
||||
self._online = True
|
||||
return
|
||||
|
||||
self.async_update_from_websocket_event(event)
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -160,6 +160,7 @@ SONOS_SPEAKER_ACTIVITY = "sonos_speaker_activity"
|
||||
SONOS_SPEAKER_ADDED = "sonos_speaker_added"
|
||||
SONOS_STATE_UPDATED = "sonos_state_updated"
|
||||
SONOS_REBOOTED = "sonos_rebooted"
|
||||
SONOS_VANISHED = "sonos_vanished"
|
||||
|
||||
SOURCE_LINEIN = "Line-in"
|
||||
SOURCE_TV = "TV"
|
||||
|
||||
@@ -91,9 +91,13 @@ async def async_generate_media_info(
|
||||
payload[attrib] = getattr(speaker.media, attrib)
|
||||
|
||||
def poll_current_track_info():
|
||||
return speaker.soco.avTransport.GetPositionInfo(
|
||||
[("InstanceID", 0), ("Channel", "Master")]
|
||||
)
|
||||
try:
|
||||
return speaker.soco.avTransport.GetPositionInfo(
|
||||
[("InstanceID", 0), ("Channel", "Master")],
|
||||
timeout=3,
|
||||
)
|
||||
except OSError as ex:
|
||||
return f"Error retrieving: {ex}"
|
||||
|
||||
payload["current_track_poll"] = await hass.async_add_executor_job(
|
||||
poll_current_track_info
|
||||
|
||||
@@ -12,6 +12,7 @@ from typing import Any
|
||||
import urllib.parse
|
||||
|
||||
import async_timeout
|
||||
import defusedxml.ElementTree as ET
|
||||
from soco.core import MUSIC_SRC_LINE_IN, MUSIC_SRC_RADIO, MUSIC_SRC_TV, SoCo
|
||||
from soco.data_structures import DidlAudioBroadcast, DidlPlaylistContainer
|
||||
from soco.events_base import Event as SonosEvent, SubscriptionBase
|
||||
@@ -56,6 +57,7 @@ from .const import (
|
||||
SONOS_STATE_PLAYING,
|
||||
SONOS_STATE_TRANSITIONING,
|
||||
SONOS_STATE_UPDATED,
|
||||
SONOS_VANISHED,
|
||||
SOURCE_LINEIN,
|
||||
SOURCE_TV,
|
||||
SUBSCRIPTION_TIMEOUT,
|
||||
@@ -225,6 +227,7 @@ class SonosSpeaker:
|
||||
(SONOS_SPEAKER_ADDED, self.update_group_for_uid),
|
||||
(f"{SONOS_REBOOTED}-{self.soco.uid}", self.async_rebooted),
|
||||
(f"{SONOS_SPEAKER_ACTIVITY}-{self.soco.uid}", self.speaker_activity),
|
||||
(f"{SONOS_VANISHED}-{self.soco.uid}", self.async_vanished),
|
||||
)
|
||||
|
||||
for (signal, target) in dispatch_pairs:
|
||||
@@ -388,6 +391,8 @@ class SonosSpeaker:
|
||||
|
||||
async def async_unsubscribe(self) -> None:
|
||||
"""Cancel all subscriptions."""
|
||||
if not self._subscriptions:
|
||||
return
|
||||
_LOGGER.debug("Unsubscribing from events for %s", self.zone_name)
|
||||
results = await asyncio.gather(
|
||||
*(subscription.unsubscribe() for subscription in self._subscriptions),
|
||||
@@ -395,7 +400,12 @@ class SonosSpeaker:
|
||||
)
|
||||
for result in results:
|
||||
if isinstance(result, Exception):
|
||||
_LOGGER.debug("Unsubscribe failed for %s: %s", self.zone_name, result)
|
||||
_LOGGER.debug(
|
||||
"Unsubscribe failed for %s: %s",
|
||||
self.zone_name,
|
||||
result,
|
||||
exc_info=result,
|
||||
)
|
||||
self._subscriptions = []
|
||||
|
||||
@callback
|
||||
@@ -572,6 +582,15 @@ class SonosSpeaker:
|
||||
self.hass.data[DATA_SONOS].discovery_known.discard(self.soco.uid)
|
||||
self.async_write_entity_states()
|
||||
|
||||
async def async_vanished(self, reason: str) -> None:
|
||||
"""Handle removal of speaker when marked as vanished."""
|
||||
if not self.available:
|
||||
return
|
||||
_LOGGER.debug(
|
||||
"%s has vanished (%s), marking unavailable", self.zone_name, reason
|
||||
)
|
||||
await self.async_offline()
|
||||
|
||||
async def async_rebooted(self, soco: SoCo) -> None:
|
||||
"""Handle a detected speaker reboot."""
|
||||
_LOGGER.warning(
|
||||
@@ -685,7 +704,25 @@ class SonosSpeaker:
|
||||
@callback
|
||||
def async_update_groups(self, event: SonosEvent) -> None:
|
||||
"""Handle callback for topology change event."""
|
||||
if not hasattr(event, "zone_player_uui_ds_in_group"):
|
||||
if xml := event.variables.get("zone_group_state"):
|
||||
zgs = ET.fromstring(xml)
|
||||
for vanished_device in zgs.find("VanishedDevices") or []:
|
||||
if (reason := vanished_device.get("Reason")) != "sleeping":
|
||||
_LOGGER.debug(
|
||||
"Ignoring %s marked %s as vanished with reason: %s",
|
||||
self.zone_name,
|
||||
vanished_device.get("ZoneName"),
|
||||
reason,
|
||||
)
|
||||
continue
|
||||
uid = vanished_device.get("UUID")
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{SONOS_VANISHED}-{uid}",
|
||||
reason,
|
||||
)
|
||||
|
||||
if "zone_player_uui_ds_in_group" not in event.variables:
|
||||
return
|
||||
self.event_stats.process(event)
|
||||
self.hass.async_create_task(self.create_update_groups_coro(event))
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Tile",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/tile",
|
||||
"requirements": ["pytile==2022.01.0"],
|
||||
"requirements": ["pytile==2022.02.0"],
|
||||
"codeowners": ["@bachya"],
|
||||
"iot_class": "cloud_polling"
|
||||
}
|
||||
|
||||
@@ -72,9 +72,11 @@ class IntegerTypeData:
|
||||
return remap_value(value, from_min, from_max, self.min, self.max, reverse)
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, dpcode: DPCode, data: str) -> IntegerTypeData:
|
||||
def from_json(cls, dpcode: DPCode, data: str) -> IntegerTypeData | None:
|
||||
"""Load JSON string and return a IntegerTypeData object."""
|
||||
parsed = json.loads(data)
|
||||
if not (parsed := json.loads(data)):
|
||||
return None
|
||||
|
||||
return cls(
|
||||
dpcode,
|
||||
min=int(parsed["min"]),
|
||||
@@ -94,9 +96,11 @@ class EnumTypeData:
|
||||
range: list[str]
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, dpcode: DPCode, data: str) -> EnumTypeData:
|
||||
def from_json(cls, dpcode: DPCode, data: str) -> EnumTypeData | None:
|
||||
"""Load JSON string and return a EnumTypeData object."""
|
||||
return cls(dpcode, **json.loads(data))
|
||||
if not (parsed := json.loads(data)):
|
||||
return None
|
||||
return cls(dpcode, **parsed)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -222,17 +226,25 @@ class TuyaEntity(Entity):
|
||||
dptype == DPType.ENUM
|
||||
and getattr(self.device, key)[dpcode].type == DPType.ENUM
|
||||
):
|
||||
return EnumTypeData.from_json(
|
||||
dpcode, getattr(self.device, key)[dpcode].values
|
||||
)
|
||||
if not (
|
||||
enum_type := EnumTypeData.from_json(
|
||||
dpcode, getattr(self.device, key)[dpcode].values
|
||||
)
|
||||
):
|
||||
continue
|
||||
return enum_type
|
||||
|
||||
if (
|
||||
dptype == DPType.INTEGER
|
||||
and getattr(self.device, key)[dpcode].type == DPType.INTEGER
|
||||
):
|
||||
return IntegerTypeData.from_json(
|
||||
dpcode, getattr(self.device, key)[dpcode].values
|
||||
)
|
||||
if not (
|
||||
integer_type := IntegerTypeData.from_json(
|
||||
dpcode, getattr(self.device, key)[dpcode].values
|
||||
)
|
||||
):
|
||||
continue
|
||||
return integer_type
|
||||
|
||||
if dptype not in (DPType.ENUM, DPType.INTEGER):
|
||||
return dpcode
|
||||
|
||||
@@ -223,7 +223,9 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
|
||||
|
||||
# Determine fan modes
|
||||
if enum_type := self.find_dpcode(
|
||||
DPCode.FAN_SPEED_ENUM, dptype=DPType.ENUM, prefer_function=True
|
||||
(DPCode.FAN_SPEED_ENUM, DPCode.WINDSPEED),
|
||||
dptype=DPType.ENUM,
|
||||
prefer_function=True,
|
||||
):
|
||||
self._attr_supported_features |= SUPPORT_FAN_MODE
|
||||
self._attr_fan_modes = enum_type.range
|
||||
|
||||
@@ -360,6 +360,7 @@ class DPCode(StrEnum):
|
||||
WATER_SET = "water_set" # Water level
|
||||
WATERSENSOR_STATE = "watersensor_state"
|
||||
WET = "wet" # Humidification
|
||||
WINDSPEED = "windspeed"
|
||||
WIRELESS_BATTERYLOCK = "wireless_batterylock"
|
||||
WIRELESS_ELECTRICITY = "wireless_electricity"
|
||||
WORK_MODE = "work_mode" # Working mode
|
||||
|
||||
@@ -37,6 +37,7 @@ TUYA_MODE_RETURN_HOME = "chargego"
|
||||
TUYA_STATUS_TO_HA = {
|
||||
"charge_done": STATE_DOCKED,
|
||||
"chargecompleted": STATE_DOCKED,
|
||||
"chargego": STATE_DOCKED,
|
||||
"charging": STATE_DOCKED,
|
||||
"cleaning": STATE_CLEANING,
|
||||
"docking": STATE_RETURNING,
|
||||
@@ -48,11 +49,14 @@ TUYA_STATUS_TO_HA = {
|
||||
"pick_zone_clean": STATE_CLEANING,
|
||||
"pos_arrived": STATE_CLEANING,
|
||||
"pos_unarrive": STATE_CLEANING,
|
||||
"random": STATE_CLEANING,
|
||||
"sleep": STATE_IDLE,
|
||||
"smart_clean": STATE_CLEANING,
|
||||
"smart": STATE_CLEANING,
|
||||
"spot_clean": STATE_CLEANING,
|
||||
"standby": STATE_IDLE,
|
||||
"wall_clean": STATE_CLEANING,
|
||||
"wall_follow": STATE_CLEANING,
|
||||
"zone_clean": STATE_CLEANING,
|
||||
}
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@ from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
from homeassistant.helpers.typing import DiscoveryInfoType
|
||||
from homeassistant.loader import async_get_integration
|
||||
from homeassistant.util.network import is_ip_address
|
||||
|
||||
from .const import (
|
||||
CONF_ALL_UPDATES,
|
||||
@@ -90,7 +91,11 @@ class ProtectFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(mac)
|
||||
source_ip = discovery_info["source_ip"]
|
||||
direct_connect_domain = discovery_info["direct_connect_domain"]
|
||||
for entry in self._async_current_entries(include_ignore=False):
|
||||
for entry in self._async_current_entries():
|
||||
if entry.source == config_entries.SOURCE_IGNORE:
|
||||
if entry.unique_id == mac:
|
||||
return self.async_abort(reason="already_configured")
|
||||
continue
|
||||
entry_host = entry.data[CONF_HOST]
|
||||
entry_has_direct_connect = _host_is_direct_connect(entry_host)
|
||||
if entry.unique_id == mac:
|
||||
@@ -101,7 +106,11 @@ class ProtectFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
and entry_host != direct_connect_domain
|
||||
):
|
||||
new_host = direct_connect_domain
|
||||
elif not entry_has_direct_connect and entry_host != source_ip:
|
||||
elif (
|
||||
not entry_has_direct_connect
|
||||
and is_ip_address(entry_host)
|
||||
and entry_host != source_ip
|
||||
):
|
||||
new_host = source_ip
|
||||
if new_host:
|
||||
self.hass.config_entries.async_update_entry(
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "velbus",
|
||||
"name": "Velbus",
|
||||
"documentation": "https://www.home-assistant.io/integrations/velbus",
|
||||
"requirements": ["velbus-aio==2021.11.7"],
|
||||
"requirements": ["velbus-aio==2022.2.1"],
|
||||
"config_flow": true,
|
||||
"codeowners": ["@Cereal2nd", "@brefra"],
|
||||
"dependencies": ["usb"],
|
||||
|
||||
@@ -7,7 +7,7 @@ from .backports.enum import StrEnum
|
||||
|
||||
MAJOR_VERSION: Final = 2022
|
||||
MINOR_VERSION: Final = 2
|
||||
PATCH_VERSION: Final = "0b6"
|
||||
PATCH_VERSION: Final = "1"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
|
||||
|
||||
@@ -199,3 +199,15 @@ class RequiredParameterMissing(HomeAssistantError):
|
||||
),
|
||||
)
|
||||
self.parameter_names = parameter_names
|
||||
|
||||
|
||||
class DependencyError(HomeAssistantError):
|
||||
"""Raised when dependencies can not be setup."""
|
||||
|
||||
def __init__(self, failed_dependencies: list[str]) -> None:
|
||||
"""Initialize error."""
|
||||
super().__init__(
|
||||
self,
|
||||
f"Could not setup dependencies: {', '.join(failed_dependencies)}",
|
||||
)
|
||||
self.failed_dependencies = failed_dependencies
|
||||
|
||||
@@ -742,7 +742,7 @@ class _ScriptRun:
|
||||
if saved_repeat_vars:
|
||||
self._variables["repeat"] = saved_repeat_vars
|
||||
else:
|
||||
del self._variables["repeat"]
|
||||
self._variables.pop("repeat", None) # Not set if count = 0
|
||||
|
||||
async def _async_choose_step(self) -> None:
|
||||
"""Choose a sequence."""
|
||||
|
||||
@@ -15,7 +15,7 @@ ciso8601==2.2.0
|
||||
cryptography==35.0.0
|
||||
emoji==1.6.3
|
||||
hass-nabucasa==0.52.0
|
||||
home-assistant-frontend==20220201.0
|
||||
home-assistant-frontend==20220203.0
|
||||
httpx==0.21.3
|
||||
ifaddr==0.1.7
|
||||
jinja2==3.0.3
|
||||
|
||||
@@ -18,7 +18,7 @@ from .const import (
|
||||
Platform,
|
||||
)
|
||||
from .core import CALLBACK_TYPE
|
||||
from .exceptions import HomeAssistantError
|
||||
from .exceptions import DependencyError, HomeAssistantError
|
||||
from .helpers.typing import ConfigType
|
||||
from .util import dt as dt_util, ensure_unique_string
|
||||
|
||||
@@ -83,8 +83,11 @@ async def async_setup_component(
|
||||
|
||||
async def _async_process_dependencies(
|
||||
hass: core.HomeAssistant, config: ConfigType, integration: loader.Integration
|
||||
) -> bool:
|
||||
"""Ensure all dependencies are set up."""
|
||||
) -> list[str]:
|
||||
"""Ensure all dependencies are set up.
|
||||
|
||||
Returns a list of dependencies which failed to set up.
|
||||
"""
|
||||
dependencies_tasks = {
|
||||
dep: hass.loop.create_task(async_setup_component(hass, dep, config))
|
||||
for dep in integration.dependencies
|
||||
@@ -104,7 +107,7 @@ async def _async_process_dependencies(
|
||||
)
|
||||
|
||||
if not dependencies_tasks and not after_dependencies_tasks:
|
||||
return True
|
||||
return []
|
||||
|
||||
if dependencies_tasks:
|
||||
_LOGGER.debug(
|
||||
@@ -135,8 +138,7 @@ async def _async_process_dependencies(
|
||||
", ".join(failed),
|
||||
)
|
||||
|
||||
return False
|
||||
return True
|
||||
return failed
|
||||
|
||||
|
||||
async def _async_setup_component(
|
||||
@@ -341,8 +343,8 @@ async def async_process_deps_reqs(
|
||||
elif integration.domain in processed:
|
||||
return
|
||||
|
||||
if not await _async_process_dependencies(hass, config, integration):
|
||||
raise HomeAssistantError("Could not set up all dependencies.")
|
||||
if failed_deps := await _async_process_dependencies(hass, config, integration):
|
||||
raise DependencyError(failed_deps)
|
||||
|
||||
if not hass.config.skip_pip and integration.requirements:
|
||||
async with hass.timeout.async_freeze(integration.domain):
|
||||
|
||||
@@ -88,7 +88,7 @@ def run_callback_threadsafe(
|
||||
return future
|
||||
|
||||
|
||||
def check_loop(strict: bool = True) -> None:
|
||||
def check_loop(func: Callable, strict: bool = True) -> None:
|
||||
"""Warn if called inside the event loop. Raise if `strict` is True."""
|
||||
try:
|
||||
get_running_loop()
|
||||
@@ -101,7 +101,18 @@ def check_loop(strict: bool = True) -> None:
|
||||
|
||||
found_frame = None
|
||||
|
||||
for frame in reversed(extract_stack()):
|
||||
stack = extract_stack()
|
||||
|
||||
if (
|
||||
func.__name__ == "sleep"
|
||||
and len(stack) >= 3
|
||||
and stack[-3].filename.endswith("pydevd.py")
|
||||
):
|
||||
# Don't report `time.sleep` injected by the debugger (pydevd.py)
|
||||
# stack[-1] is us, stack[-2] is protected_loop_func, stack[-3] is the offender
|
||||
return
|
||||
|
||||
for frame in reversed(stack):
|
||||
for path in ("custom_components/", "homeassistant/components/"):
|
||||
try:
|
||||
index = frame.filename.index(path)
|
||||
@@ -152,7 +163,7 @@ def protect_loop(func: Callable, strict: bool = True) -> Callable:
|
||||
|
||||
@functools.wraps(func)
|
||||
def protected_loop_func(*args, **kwargs): # type: ignore
|
||||
check_loop(strict=strict)
|
||||
check_loop(func, strict=strict)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return protected_loop_func
|
||||
|
||||
@@ -293,7 +293,7 @@ airthings_cloud==0.1.0
|
||||
airtouch4pyapi==1.0.5
|
||||
|
||||
# homeassistant.components.aladdin_connect
|
||||
aladdin_connect==0.3
|
||||
aladdin_connect==0.4
|
||||
|
||||
# homeassistant.components.alpha_vantage
|
||||
alpha_vantage==2.3.1
|
||||
@@ -842,7 +842,7 @@ hole==0.7.0
|
||||
holidays==0.12
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20220201.0
|
||||
home-assistant-frontend==20220203.0
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.10
|
||||
@@ -1310,7 +1310,7 @@ pushbullet.py==0.11.0
|
||||
pushover_complete==1.1.1
|
||||
|
||||
# homeassistant.components.pvoutput
|
||||
pvo==0.2.0
|
||||
pvo==0.2.1
|
||||
|
||||
# homeassistant.components.rpi_gpio_pwm
|
||||
pwmled==1.6.7
|
||||
@@ -1648,7 +1648,7 @@ pylitejet==0.3.0
|
||||
pylitterbot==2021.12.0
|
||||
|
||||
# homeassistant.components.lutron_caseta
|
||||
pylutron-caseta==0.13.0
|
||||
pylutron-caseta==0.13.1
|
||||
|
||||
# homeassistant.components.lutron
|
||||
pylutron==0.2.8
|
||||
@@ -1999,7 +1999,7 @@ python_opendata_transport==0.3.0
|
||||
pythonegardia==1.0.40
|
||||
|
||||
# homeassistant.components.tile
|
||||
pytile==2022.01.0
|
||||
pytile==2022.02.0
|
||||
|
||||
# homeassistant.components.touchline
|
||||
pytouchline==0.7
|
||||
@@ -2423,7 +2423,7 @@ vallox-websocket-api==2.9.0
|
||||
vehicle==0.3.1
|
||||
|
||||
# homeassistant.components.velbus
|
||||
velbus-aio==2021.11.7
|
||||
velbus-aio==2022.2.1
|
||||
|
||||
# homeassistant.components.venstar
|
||||
venstarcolortouch==0.15
|
||||
|
||||
@@ -543,7 +543,7 @@ hole==0.7.0
|
||||
holidays==0.12
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20220201.0
|
||||
home-assistant-frontend==20220203.0
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.10
|
||||
@@ -814,7 +814,7 @@ pure-python-adb[async]==0.3.0.dev0
|
||||
pushbullet.py==0.11.0
|
||||
|
||||
# homeassistant.components.pvoutput
|
||||
pvo==0.2.0
|
||||
pvo==0.2.1
|
||||
|
||||
# homeassistant.components.canary
|
||||
py-canary==0.5.1
|
||||
@@ -1032,7 +1032,7 @@ pylitejet==0.3.0
|
||||
pylitterbot==2021.12.0
|
||||
|
||||
# homeassistant.components.lutron_caseta
|
||||
pylutron-caseta==0.13.0
|
||||
pylutron-caseta==0.13.1
|
||||
|
||||
# homeassistant.components.mailgun
|
||||
pymailgunner==1.4
|
||||
@@ -1230,7 +1230,7 @@ python-twitch-client==0.6.0
|
||||
python_awair==0.2.1
|
||||
|
||||
# homeassistant.components.tile
|
||||
pytile==2022.01.0
|
||||
pytile==2022.02.0
|
||||
|
||||
# homeassistant.components.traccar
|
||||
pytraccar==0.10.0
|
||||
@@ -1481,7 +1481,7 @@ vallox-websocket-api==2.9.0
|
||||
vehicle==0.3.1
|
||||
|
||||
# homeassistant.components.velbus
|
||||
velbus-aio==2021.11.7
|
||||
velbus-aio==2022.2.1
|
||||
|
||||
# homeassistant.components.venstar
|
||||
venstarcolortouch==0.15
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[metadata]
|
||||
name = homeassistant
|
||||
version = 2022.2.0b6
|
||||
version = 2022.2.1
|
||||
author = The Home Assistant Authors
|
||||
author_email = hello@home-assistant.io
|
||||
license = Apache-2.0
|
||||
|
||||
@@ -46,7 +46,7 @@ async def test_weather_without_forecast(hass):
|
||||
assert state.attributes.get(ATTR_WEATHER_TEMPERATURE) == 22.6
|
||||
assert state.attributes.get(ATTR_WEATHER_VISIBILITY) == 16.1
|
||||
assert state.attributes.get(ATTR_WEATHER_WIND_BEARING) == 180
|
||||
assert state.attributes.get(ATTR_WEATHER_WIND_SPEED) == 14.5
|
||||
assert state.attributes.get(ATTR_WEATHER_WIND_SPEED) == 4.03
|
||||
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
|
||||
|
||||
entry = registry.async_get("weather.home")
|
||||
@@ -68,7 +68,7 @@ async def test_weather_with_forecast(hass):
|
||||
assert state.attributes.get(ATTR_WEATHER_TEMPERATURE) == 22.6
|
||||
assert state.attributes.get(ATTR_WEATHER_VISIBILITY) == 16.1
|
||||
assert state.attributes.get(ATTR_WEATHER_WIND_BEARING) == 180
|
||||
assert state.attributes.get(ATTR_WEATHER_WIND_SPEED) == 14.5
|
||||
assert state.attributes.get(ATTR_WEATHER_WIND_SPEED) == 4.03
|
||||
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
|
||||
forecast = state.attributes.get(ATTR_FORECAST)[0]
|
||||
assert forecast.get(ATTR_FORECAST_CONDITION) == "lightning-rainy"
|
||||
@@ -78,7 +78,7 @@ async def test_weather_with_forecast(hass):
|
||||
assert forecast.get(ATTR_FORECAST_TEMP_LOW) == 15.4
|
||||
assert forecast.get(ATTR_FORECAST_TIME) == "2020-07-26T05:00:00+00:00"
|
||||
assert forecast.get(ATTR_FORECAST_WIND_BEARING) == 166
|
||||
assert forecast.get(ATTR_FORECAST_WIND_SPEED) == 13.0
|
||||
assert forecast.get(ATTR_FORECAST_WIND_SPEED) == 3.61
|
||||
|
||||
entry = registry.async_get("weather.home")
|
||||
assert entry
|
||||
|
||||
@@ -252,6 +252,35 @@ async def test_initialize_flow(hass, client):
|
||||
}
|
||||
|
||||
|
||||
async def test_initialize_flow_unmet_dependency(hass, client):
|
||||
"""Test unmet dependencies are listed."""
|
||||
mock_entity_platform(hass, "config_flow.test", None)
|
||||
|
||||
config_schema = vol.Schema({"comp_conf": {"hello": str}}, required=True)
|
||||
mock_integration(
|
||||
hass, MockModule(domain="dependency_1", config_schema=config_schema)
|
||||
)
|
||||
# The test2 config flow should fail because dependency_1 can't be automatically setup
|
||||
mock_integration(
|
||||
hass,
|
||||
MockModule(domain="test2", partial_manifest={"dependencies": ["dependency_1"]}),
|
||||
)
|
||||
|
||||
class TestFlow(core_ce.ConfigFlow):
|
||||
async def async_step_user(self, user_input=None):
|
||||
pass
|
||||
|
||||
with patch.dict(HANDLERS, {"test2": TestFlow}):
|
||||
resp = await client.post(
|
||||
"/api/config/config_entries/flow",
|
||||
json={"handler": "test2", "show_advanced_options": True},
|
||||
)
|
||||
|
||||
assert resp.status == HTTPStatus.BAD_REQUEST
|
||||
data = await resp.text()
|
||||
assert data == "Failed dependencies dependency_1"
|
||||
|
||||
|
||||
async def test_initialize_flow_unauth(hass, client, hass_admin_user):
|
||||
"""Test we can initialize a flow."""
|
||||
hass_admin_user.groups = []
|
||||
|
||||
@@ -7,6 +7,7 @@ from aiodns.error import DNSError
|
||||
import pytest
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.dnsip.config_flow import DATA_SCHEMA, DATA_SCHEMA_ADV
|
||||
from homeassistant.components.dnsip.const import (
|
||||
CONF_HOSTNAME,
|
||||
CONF_IPV4,
|
||||
@@ -47,6 +48,7 @@ async def test_form(hass: HomeAssistant) -> None:
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] == "form"
|
||||
assert result["data_schema"] == DATA_SCHEMA
|
||||
assert result["errors"] == {}
|
||||
|
||||
with patch(
|
||||
@@ -79,6 +81,48 @@ async def test_form(hass: HomeAssistant) -> None:
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_form_adv(hass: HomeAssistant) -> None:
|
||||
"""Test we get the form with advanced options on."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_USER, "show_advanced_options": True},
|
||||
)
|
||||
|
||||
assert result["data_schema"] == DATA_SCHEMA_ADV
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.dnsip.config_flow.aiodns.DNSResolver",
|
||||
return_value=RetrieveDNS(),
|
||||
), patch(
|
||||
"homeassistant.components.dnsip.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_HOSTNAME: "home-assistant.io",
|
||||
CONF_RESOLVER: "8.8.8.8",
|
||||
CONF_RESOLVER_IPV6: "2620:0:ccc::2",
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result2["type"] == RESULT_TYPE_CREATE_ENTRY
|
||||
assert result2["title"] == "home-assistant.io"
|
||||
assert result2["data"] == {
|
||||
"hostname": "home-assistant.io",
|
||||
"name": "home-assistant.io",
|
||||
"ipv4": True,
|
||||
"ipv6": True,
|
||||
}
|
||||
assert result2["options"] == {
|
||||
"resolver": "8.8.8.8",
|
||||
"resolver_ipv6": "2620:0:ccc::2",
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_form_error(hass: HomeAssistant) -> None:
|
||||
"""Test validate url fails."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
|
||||
@@ -36,6 +36,7 @@ from .test_common import (
|
||||
help_test_entity_device_info_with_identifier,
|
||||
help_test_entity_id_update_discovery_update,
|
||||
help_test_entity_id_update_subscriptions,
|
||||
help_test_reload_with_config,
|
||||
help_test_reloadable,
|
||||
help_test_setting_attribute_via_mqtt_json_message,
|
||||
help_test_setting_attribute_with_template,
|
||||
@@ -44,7 +45,11 @@ from .test_common import (
|
||||
help_test_update_with_json_attrs_not_dict,
|
||||
)
|
||||
|
||||
from tests.common import async_fire_mqtt_message, async_fire_time_changed
|
||||
from tests.common import (
|
||||
assert_setup_component,
|
||||
async_fire_mqtt_message,
|
||||
async_fire_time_changed,
|
||||
)
|
||||
|
||||
DEFAULT_CONFIG = {
|
||||
binary_sensor.DOMAIN: {
|
||||
@@ -868,3 +873,87 @@ async def test_reloadable(hass, mqtt_mock, caplog, tmp_path):
|
||||
domain = binary_sensor.DOMAIN
|
||||
config = DEFAULT_CONFIG[domain]
|
||||
await help_test_reloadable(hass, mqtt_mock, caplog, tmp_path, domain, config)
|
||||
|
||||
|
||||
async def test_cleanup_triggers_and_restoring_state(
|
||||
hass, mqtt_mock, caplog, tmp_path, freezer
|
||||
):
|
||||
"""Test cleanup old triggers at reloading and restoring the state."""
|
||||
domain = binary_sensor.DOMAIN
|
||||
config1 = copy.deepcopy(DEFAULT_CONFIG[domain])
|
||||
config1["name"] = "test1"
|
||||
config1["expire_after"] = 30
|
||||
config1["state_topic"] = "test-topic1"
|
||||
config2 = copy.deepcopy(DEFAULT_CONFIG[domain])
|
||||
config2["name"] = "test2"
|
||||
config2["expire_after"] = 5
|
||||
config2["state_topic"] = "test-topic2"
|
||||
|
||||
freezer.move_to("2022-02-02 12:01:00+01:00")
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
binary_sensor.DOMAIN,
|
||||
{binary_sensor.DOMAIN: [config1, config2]},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
async_fire_mqtt_message(hass, "test-topic1", "ON")
|
||||
state = hass.states.get("binary_sensor.test1")
|
||||
assert state.state == "on"
|
||||
|
||||
async_fire_mqtt_message(hass, "test-topic2", "ON")
|
||||
state = hass.states.get("binary_sensor.test2")
|
||||
assert state.state == "on"
|
||||
|
||||
freezer.move_to("2022-02-02 12:01:10+01:00")
|
||||
|
||||
await help_test_reload_with_config(
|
||||
hass, caplog, tmp_path, domain, [config1, config2]
|
||||
)
|
||||
assert "Clean up expire after trigger for binary_sensor.test1" in caplog.text
|
||||
assert "Clean up expire after trigger for binary_sensor.test2" not in caplog.text
|
||||
assert (
|
||||
"State recovered after reload for binary_sensor.test1, remaining time before expiring"
|
||||
in caplog.text
|
||||
)
|
||||
assert "State recovered after reload for binary_sensor.test2" not in caplog.text
|
||||
|
||||
state = hass.states.get("binary_sensor.test1")
|
||||
assert state.state == "on"
|
||||
|
||||
state = hass.states.get("binary_sensor.test2")
|
||||
assert state.state == STATE_UNAVAILABLE
|
||||
|
||||
async_fire_mqtt_message(hass, "test-topic1", "OFF")
|
||||
state = hass.states.get("binary_sensor.test1")
|
||||
assert state.state == "off"
|
||||
|
||||
async_fire_mqtt_message(hass, "test-topic2", "OFF")
|
||||
state = hass.states.get("binary_sensor.test2")
|
||||
assert state.state == "off"
|
||||
|
||||
|
||||
async def test_skip_restoring_state_with_over_due_expire_trigger(
|
||||
hass, mqtt_mock, caplog, freezer
|
||||
):
|
||||
"""Test restoring a state with over due expire timer."""
|
||||
|
||||
freezer.move_to("2022-02-02 12:02:00+01:00")
|
||||
domain = binary_sensor.DOMAIN
|
||||
config3 = copy.deepcopy(DEFAULT_CONFIG[domain])
|
||||
config3["name"] = "test3"
|
||||
config3["expire_after"] = 10
|
||||
config3["state_topic"] = "test-topic3"
|
||||
fake_state = ha.State(
|
||||
"binary_sensor.test3",
|
||||
"on",
|
||||
{},
|
||||
last_changed=datetime.fromisoformat("2022-02-02 12:01:35+01:00"),
|
||||
)
|
||||
with patch(
|
||||
"homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state",
|
||||
return_value=fake_state,
|
||||
), assert_setup_component(1, domain):
|
||||
assert await async_setup_component(hass, domain, {domain: config3})
|
||||
await hass.async_block_till_done()
|
||||
assert "Skip state recovery after reload for binary_sensor.test3" in caplog.text
|
||||
|
||||
@@ -1222,7 +1222,7 @@ async def help_test_entity_debug_info_message(
|
||||
"topic": topic,
|
||||
"messages": [
|
||||
{
|
||||
"payload": payload,
|
||||
"payload": str(payload),
|
||||
"qos": 0,
|
||||
"retain": False,
|
||||
"time": start_dt,
|
||||
@@ -1525,6 +1525,25 @@ async def help_test_publishing_with_custom_encoding(
|
||||
mqtt_mock.async_publish.reset_mock()
|
||||
|
||||
|
||||
async def help_test_reload_with_config(hass, caplog, tmp_path, domain, config):
|
||||
"""Test reloading with supplied config."""
|
||||
new_yaml_config_file = tmp_path / "configuration.yaml"
|
||||
new_yaml_config = yaml.dump({domain: config})
|
||||
new_yaml_config_file.write_text(new_yaml_config)
|
||||
assert new_yaml_config_file.read_text() == new_yaml_config
|
||||
|
||||
with patch.object(hass_config, "YAML_CONFIG_FILE", new_yaml_config_file):
|
||||
await hass.services.async_call(
|
||||
"mqtt",
|
||||
SERVICE_RELOAD,
|
||||
{},
|
||||
blocking=True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert "<Event event_mqtt_reloaded[L]>" in caplog.text
|
||||
|
||||
|
||||
async def help_test_reloadable(hass, mqtt_mock, caplog, tmp_path, domain, config):
|
||||
"""Test reloading an MQTT platform."""
|
||||
# Create and test an old config of 2 entities based on the config supplied
|
||||
@@ -1549,21 +1568,10 @@ async def help_test_reloadable(hass, mqtt_mock, caplog, tmp_path, domain, config
|
||||
new_config_2["name"] = "test_new_2"
|
||||
new_config_3 = copy.deepcopy(config)
|
||||
new_config_3["name"] = "test_new_3"
|
||||
new_yaml_config_file = tmp_path / "configuration.yaml"
|
||||
new_yaml_config = yaml.dump({domain: [new_config_1, new_config_2, new_config_3]})
|
||||
new_yaml_config_file.write_text(new_yaml_config)
|
||||
assert new_yaml_config_file.read_text() == new_yaml_config
|
||||
|
||||
with patch.object(hass_config, "YAML_CONFIG_FILE", new_yaml_config_file):
|
||||
await hass.services.async_call(
|
||||
"mqtt",
|
||||
SERVICE_RELOAD,
|
||||
{},
|
||||
blocking=True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert "<Event event_mqtt_reloaded[L]>" in caplog.text
|
||||
await help_test_reload_with_config(
|
||||
hass, caplog, tmp_path, domain, [new_config_1, new_config_2, new_config_3]
|
||||
)
|
||||
|
||||
assert len(hass.states.async_all(domain)) == 3
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import asyncio
|
||||
from datetime import datetime, timedelta
|
||||
import json
|
||||
import ssl
|
||||
from unittest.mock import AsyncMock, MagicMock, call, mock_open, patch
|
||||
from unittest.mock import ANY, AsyncMock, MagicMock, call, mock_open, patch
|
||||
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
@@ -1540,6 +1540,68 @@ async def test_mqtt_ws_get_device_debug_info(
|
||||
assert response["result"] == expected_result
|
||||
|
||||
|
||||
async def test_mqtt_ws_get_device_debug_info_binary(
|
||||
hass, device_reg, hass_ws_client, mqtt_mock
|
||||
):
|
||||
"""Test MQTT websocket device debug info."""
|
||||
config = {
|
||||
"device": {"identifiers": ["0AFFD2"]},
|
||||
"platform": "mqtt",
|
||||
"topic": "foobar/image",
|
||||
"unique_id": "unique",
|
||||
}
|
||||
data = json.dumps(config)
|
||||
|
||||
async_fire_mqtt_message(hass, "homeassistant/camera/bla/config", data)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify device entry is created
|
||||
device_entry = device_reg.async_get_device({("mqtt", "0AFFD2")})
|
||||
assert device_entry is not None
|
||||
|
||||
small_png = (
|
||||
b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x04\x00\x00\x00\x04\x08\x06"
|
||||
b"\x00\x00\x00\xa9\xf1\x9e~\x00\x00\x00\x13IDATx\xdac\xfc\xcf\xc0P\xcf\x80\x04"
|
||||
b"\x18I\x17\x00\x00\xf2\xae\x05\xfdR\x01\xc2\xde\x00\x00\x00\x00IEND\xaeB`\x82"
|
||||
)
|
||||
async_fire_mqtt_message(hass, "foobar/image", small_png)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
await client.send_json(
|
||||
{"id": 5, "type": "mqtt/device/debug_info", "device_id": device_entry.id}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
expected_result = {
|
||||
"entities": [
|
||||
{
|
||||
"entity_id": "camera.mqtt_camera",
|
||||
"subscriptions": [
|
||||
{
|
||||
"topic": "foobar/image",
|
||||
"messages": [
|
||||
{
|
||||
"payload": str(small_png),
|
||||
"qos": 0,
|
||||
"retain": False,
|
||||
"time": ANY,
|
||||
"topic": "foobar/image",
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
"discovery_data": {
|
||||
"payload": config,
|
||||
"topic": "homeassistant/camera/bla/config",
|
||||
},
|
||||
}
|
||||
],
|
||||
"triggers": [],
|
||||
}
|
||||
assert response["result"] == expected_result
|
||||
|
||||
|
||||
async def test_debug_info_multiple_devices(hass, mqtt_mock):
|
||||
"""Test we get correct debug_info when multiple devices are present."""
|
||||
devices = [
|
||||
|
||||
@@ -43,6 +43,7 @@ from .test_common import (
|
||||
help_test_entity_disabled_by_default,
|
||||
help_test_entity_id_update_discovery_update,
|
||||
help_test_entity_id_update_subscriptions,
|
||||
help_test_reload_with_config,
|
||||
help_test_reloadable,
|
||||
help_test_setting_attribute_via_mqtt_json_message,
|
||||
help_test_setting_attribute_with_template,
|
||||
@@ -52,7 +53,11 @@ from .test_common import (
|
||||
help_test_update_with_json_attrs_not_dict,
|
||||
)
|
||||
|
||||
from tests.common import async_fire_mqtt_message, async_fire_time_changed
|
||||
from tests.common import (
|
||||
assert_setup_component,
|
||||
async_fire_mqtt_message,
|
||||
async_fire_time_changed,
|
||||
)
|
||||
|
||||
DEFAULT_CONFIG = {
|
||||
sensor.DOMAIN: {"platform": "mqtt", "name": "test", "state_topic": "test-topic"}
|
||||
@@ -263,6 +268,38 @@ async def test_setting_sensor_value_via_mqtt_json_message(hass, mqtt_mock):
|
||||
assert state.state == "100"
|
||||
|
||||
|
||||
async def test_setting_sensor_value_via_mqtt_json_message_and_default_current_state(
|
||||
hass, mqtt_mock
|
||||
):
|
||||
"""Test the setting of the value via MQTT with fall back to current state."""
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
sensor.DOMAIN,
|
||||
{
|
||||
sensor.DOMAIN: {
|
||||
"platform": "mqtt",
|
||||
"name": "test",
|
||||
"state_topic": "test-topic",
|
||||
"unit_of_measurement": "fav unit",
|
||||
"value_template": "{{ value_json.val | is_defined }}-{{ value_json.par }}",
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
async_fire_mqtt_message(
|
||||
hass, "test-topic", '{ "val": "valcontent", "par": "parcontent" }'
|
||||
)
|
||||
state = hass.states.get("sensor.test")
|
||||
|
||||
assert state.state == "valcontent-parcontent"
|
||||
|
||||
async_fire_mqtt_message(hass, "test-topic", '{ "par": "invalidcontent" }')
|
||||
state = hass.states.get("sensor.test")
|
||||
|
||||
assert state.state == "valcontent-parcontent"
|
||||
|
||||
|
||||
async def test_setting_sensor_last_reset_via_mqtt_message(hass, mqtt_mock, caplog):
|
||||
"""Test the setting of the last_reset property via MQTT."""
|
||||
assert await async_setup_component(
|
||||
@@ -935,6 +972,92 @@ async def test_reloadable(hass, mqtt_mock, caplog, tmp_path):
|
||||
await help_test_reloadable(hass, mqtt_mock, caplog, tmp_path, domain, config)
|
||||
|
||||
|
||||
async def test_cleanup_triggers_and_restoring_state(
|
||||
hass, mqtt_mock, caplog, tmp_path, freezer
|
||||
):
|
||||
"""Test cleanup old triggers at reloading and restoring the state."""
|
||||
domain = sensor.DOMAIN
|
||||
config1 = copy.deepcopy(DEFAULT_CONFIG[domain])
|
||||
config1["name"] = "test1"
|
||||
config1["expire_after"] = 30
|
||||
config1["state_topic"] = "test-topic1"
|
||||
config2 = copy.deepcopy(DEFAULT_CONFIG[domain])
|
||||
config2["name"] = "test2"
|
||||
config2["expire_after"] = 5
|
||||
config2["state_topic"] = "test-topic2"
|
||||
|
||||
freezer.move_to("2022-02-02 12:01:00+01:00")
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
domain,
|
||||
{domain: [config1, config2]},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
async_fire_mqtt_message(hass, "test-topic1", "100")
|
||||
state = hass.states.get("sensor.test1")
|
||||
assert state.state == "100"
|
||||
|
||||
async_fire_mqtt_message(hass, "test-topic2", "200")
|
||||
state = hass.states.get("sensor.test2")
|
||||
assert state.state == "200"
|
||||
|
||||
freezer.move_to("2022-02-02 12:01:10+01:00")
|
||||
|
||||
await help_test_reload_with_config(
|
||||
hass, caplog, tmp_path, domain, [config1, config2]
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert "Clean up expire after trigger for sensor.test1" in caplog.text
|
||||
assert "Clean up expire after trigger for sensor.test2" not in caplog.text
|
||||
assert (
|
||||
"State recovered after reload for sensor.test1, remaining time before expiring"
|
||||
in caplog.text
|
||||
)
|
||||
assert "State recovered after reload for sensor.test2" not in caplog.text
|
||||
|
||||
state = hass.states.get("sensor.test1")
|
||||
assert state.state == "100"
|
||||
|
||||
state = hass.states.get("sensor.test2")
|
||||
assert state.state == STATE_UNAVAILABLE
|
||||
|
||||
async_fire_mqtt_message(hass, "test-topic1", "101")
|
||||
state = hass.states.get("sensor.test1")
|
||||
assert state.state == "101"
|
||||
|
||||
async_fire_mqtt_message(hass, "test-topic2", "201")
|
||||
state = hass.states.get("sensor.test2")
|
||||
assert state.state == "201"
|
||||
|
||||
|
||||
async def test_skip_restoring_state_with_over_due_expire_trigger(
|
||||
hass, mqtt_mock, caplog, freezer
|
||||
):
|
||||
"""Test restoring a state with over due expire timer."""
|
||||
|
||||
freezer.move_to("2022-02-02 12:02:00+01:00")
|
||||
domain = sensor.DOMAIN
|
||||
config3 = copy.deepcopy(DEFAULT_CONFIG[domain])
|
||||
config3["name"] = "test3"
|
||||
config3["expire_after"] = 10
|
||||
config3["state_topic"] = "test-topic3"
|
||||
fake_state = ha.State(
|
||||
"sensor.test3",
|
||||
"300",
|
||||
{},
|
||||
last_changed=datetime.fromisoformat("2022-02-02 12:01:35+01:00"),
|
||||
)
|
||||
with patch(
|
||||
"homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state",
|
||||
return_value=fake_state,
|
||||
), assert_setup_component(1, domain):
|
||||
assert await async_setup_component(hass, domain, {domain: config3})
|
||||
await hass.async_block_till_done()
|
||||
assert "Skip state recovery after reload for sensor.test3" in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"topic,value,attribute,attribute_value",
|
||||
[
|
||||
|
||||
@@ -74,6 +74,21 @@ async def test_fan_light(hass: HomeAssistant) -> None:
|
||||
assert device.light_on is True
|
||||
|
||||
|
||||
async def test_fan_light_no_brightness(hass: HomeAssistant) -> None:
|
||||
"""Test a fan light without brightness."""
|
||||
device = _mock_device()
|
||||
device.brightness = None
|
||||
await _setup_mocked_entry(hass, device)
|
||||
entity_id = "light.haiku_fan"
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state.state == STATE_ON
|
||||
attributes = state.attributes
|
||||
assert attributes[ATTR_BRIGHTNESS] == 255
|
||||
assert attributes[ATTR_COLOR_MODE] == COLOR_MODE_BRIGHTNESS
|
||||
assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [COLOR_MODE_BRIGHTNESS]
|
||||
|
||||
|
||||
async def test_standalone_light(hass: HomeAssistant) -> None:
|
||||
"""Test a standalone light."""
|
||||
device = _mock_device()
|
||||
|
||||
@@ -418,6 +418,37 @@ async def test_discovered_by_unifi_discovery_direct_connect_updated_but_not_usin
|
||||
assert mock_config.data[CONF_HOST] == "127.0.0.1"
|
||||
|
||||
|
||||
async def test_discovered_host_not_updated_if_existing_is_a_hostname(
|
||||
hass: HomeAssistant, mock_nvr: NVR
|
||||
) -> None:
|
||||
"""Test we only update the host if its an ip address from discovery."""
|
||||
mock_config = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
"host": "a.hostname",
|
||||
"username": "test-username",
|
||||
"password": "test-password",
|
||||
"id": "UnifiProtect",
|
||||
"port": 443,
|
||||
"verify_ssl": True,
|
||||
},
|
||||
unique_id=DEVICE_MAC_ADDRESS.upper().replace(":", ""),
|
||||
)
|
||||
mock_config.add_to_hass(hass)
|
||||
|
||||
with _patch_discovery():
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_DISCOVERY},
|
||||
data=UNIFI_DISCOVERY_DICT,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
assert mock_config.data[CONF_HOST] == "a.hostname"
|
||||
|
||||
|
||||
async def test_discovered_by_unifi_discovery(
|
||||
hass: HomeAssistant, mock_nvr: NVR
|
||||
) -> None:
|
||||
@@ -723,3 +754,24 @@ async def test_discovered_by_unifi_discovery_direct_connect_on_different_interfa
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_discovery_can_be_ignored(hass: HomeAssistant, mock_nvr: NVR) -> None:
|
||||
"""Test a discovery can be ignored."""
|
||||
mock_config = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={},
|
||||
unique_id=DEVICE_MAC_ADDRESS.upper().replace(":", ""),
|
||||
source=config_entries.SOURCE_IGNORE,
|
||||
)
|
||||
mock_config.add_to_hass(hass)
|
||||
with _patch_discovery():
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_DISCOVERY},
|
||||
data=UNIFI_DISCOVERY_DICT,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
@@ -1742,6 +1742,44 @@ async def test_repeat_count(hass, caplog, count):
|
||||
)
|
||||
|
||||
|
||||
async def test_repeat_count_0(hass, caplog):
|
||||
"""Test repeat action w/ count option."""
|
||||
event = "test_event"
|
||||
events = async_capture_events(hass, event)
|
||||
count = 0
|
||||
|
||||
alias = "condition step"
|
||||
sequence = cv.SCRIPT_SCHEMA(
|
||||
{
|
||||
"alias": alias,
|
||||
"repeat": {
|
||||
"count": count,
|
||||
"sequence": {
|
||||
"event": event,
|
||||
"event_data_template": {
|
||||
"first": "{{ repeat.first }}",
|
||||
"index": "{{ repeat.index }}",
|
||||
"last": "{{ repeat.last }}",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
script_obj = script.Script(hass, sequence, "Test Name", "test_domain")
|
||||
|
||||
await script_obj.async_run(context=Context())
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(events) == count
|
||||
assert caplog.text.count(f"Repeating {alias}") == count
|
||||
assert_action_trace(
|
||||
{
|
||||
"0": [{}],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("condition", ["while", "until"])
|
||||
async def test_repeat_condition_warning(hass, caplog, condition):
|
||||
"""Test warning on repeat conditions."""
|
||||
|
||||
@@ -5,6 +5,7 @@ from unittest.mock import MagicMock, Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant import block_async_io
|
||||
from homeassistant.util import async_ as hasync
|
||||
|
||||
|
||||
@@ -70,10 +71,14 @@ def test_run_callback_threadsafe_from_inside_event_loop(mock_ident, _):
|
||||
assert len(loop.call_soon_threadsafe.mock_calls) == 2
|
||||
|
||||
|
||||
def banned_function():
|
||||
"""Mock banned function."""
|
||||
|
||||
|
||||
async def test_check_loop_async():
|
||||
"""Test check_loop detects when called from event loop without integration context."""
|
||||
with pytest.raises(RuntimeError):
|
||||
hasync.check_loop()
|
||||
hasync.check_loop(banned_function)
|
||||
|
||||
|
||||
async def test_check_loop_async_integration(caplog):
|
||||
@@ -98,7 +103,7 @@ async def test_check_loop_async_integration(caplog):
|
||||
),
|
||||
],
|
||||
):
|
||||
hasync.check_loop()
|
||||
hasync.check_loop(banned_function)
|
||||
assert (
|
||||
"Detected blocking call inside the event loop. This is causing stability issues. "
|
||||
"Please report issue for hue doing blocking calls at "
|
||||
@@ -129,7 +134,7 @@ async def test_check_loop_async_integration_non_strict(caplog):
|
||||
),
|
||||
],
|
||||
):
|
||||
hasync.check_loop(strict=False)
|
||||
hasync.check_loop(banned_function, strict=False)
|
||||
assert (
|
||||
"Detected blocking call inside the event loop. This is causing stability issues. "
|
||||
"Please report issue for hue doing blocking calls at "
|
||||
@@ -160,7 +165,7 @@ async def test_check_loop_async_custom(caplog):
|
||||
),
|
||||
],
|
||||
):
|
||||
hasync.check_loop()
|
||||
hasync.check_loop(banned_function)
|
||||
assert (
|
||||
"Detected blocking call inside the event loop. This is causing stability issues. "
|
||||
"Please report issue to the custom component author for hue doing blocking calls "
|
||||
@@ -170,7 +175,7 @@ async def test_check_loop_async_custom(caplog):
|
||||
|
||||
def test_check_loop_sync(caplog):
|
||||
"""Test check_loop does nothing when called from thread."""
|
||||
hasync.check_loop()
|
||||
hasync.check_loop(banned_function)
|
||||
assert "Detected blocking call inside the event loop" not in caplog.text
|
||||
|
||||
|
||||
@@ -179,10 +184,38 @@ def test_protect_loop_sync():
|
||||
func = Mock()
|
||||
with patch("homeassistant.util.async_.check_loop") as mock_check_loop:
|
||||
hasync.protect_loop(func)(1, test=2)
|
||||
mock_check_loop.assert_called_once_with(strict=True)
|
||||
mock_check_loop.assert_called_once_with(func, strict=True)
|
||||
func.assert_called_once_with(1, test=2)
|
||||
|
||||
|
||||
async def test_protect_loop_debugger_sleep(caplog):
|
||||
"""Test time.sleep injected by the debugger is not reported."""
|
||||
block_async_io.enable()
|
||||
|
||||
with patch(
|
||||
"homeassistant.util.async_.extract_stack",
|
||||
return_value=[
|
||||
Mock(
|
||||
filename="/home/paulus/homeassistant/.venv/blah/pydevd.py",
|
||||
lineno="23",
|
||||
line="do_something()",
|
||||
),
|
||||
Mock(
|
||||
filename="/home/paulus/homeassistant/util/async.py",
|
||||
lineno="123",
|
||||
line="protected_loop_func",
|
||||
),
|
||||
Mock(
|
||||
filename="/home/paulus/homeassistant/util/async.py",
|
||||
lineno="123",
|
||||
line="check_loop()",
|
||||
),
|
||||
],
|
||||
):
|
||||
time.sleep(0)
|
||||
assert "Detected blocking call inside the event loop" not in caplog.text
|
||||
|
||||
|
||||
async def test_gather_with_concurrency():
|
||||
"""Test gather_with_concurrency limits the number of running tasks."""
|
||||
|
||||
|
||||
Reference in New Issue
Block a user