forked from home-assistant/core
Compare commits
40 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
30db51a49c | ||
|
|
a537534880 | ||
|
|
ea8ee02403 | ||
|
|
d244af6df1 | ||
|
|
74d38e00e4 | ||
|
|
e01faa7a8f | ||
|
|
8bdce8ef68 | ||
|
|
31df67a4c1 | ||
|
|
fe7c3a7ba5 | ||
|
|
276e8f185b | ||
|
|
741252a32d | ||
|
|
f8db38c0b6 | ||
|
|
4ce6b6dd22 | ||
|
|
de0126c880 | ||
|
|
7bd60bf0fb | ||
|
|
69828da4bc | ||
|
|
261ae2ef33 | ||
|
|
814cbcd13a | ||
|
|
398c7be850 | ||
|
|
25fc64a9e0 | ||
|
|
a543160070 | ||
|
|
51bfe53444 | ||
|
|
cc6afdba3c | ||
|
|
8a8ee3c732 | ||
|
|
27721d5b84 | ||
|
|
fee80a9d4a | ||
|
|
e49da79d1b | ||
|
|
ec541ca7ed | ||
|
|
f5bb9e6047 | ||
|
|
242bd921df | ||
|
|
ba16156a79 | ||
|
|
84d8a7857d | ||
|
|
9607dfe57c | ||
|
|
aeb8dc2c07 | ||
|
|
71fb2d09b7 | ||
|
|
fd8fb59f7a | ||
|
|
49bf1d6bff | ||
|
|
8bd07bcff2 | ||
|
|
85bc863830 | ||
|
|
094c185dee |
@@ -28,6 +28,7 @@ TYPE_BATT6 = "batt6"
|
||||
TYPE_BATT7 = "batt7"
|
||||
TYPE_BATT8 = "batt8"
|
||||
TYPE_BATT9 = "batt9"
|
||||
TYPE_BATTIN = "battin"
|
||||
TYPE_BATTOUT = "battout"
|
||||
TYPE_BATT_CO2 = "batt_co2"
|
||||
TYPE_BATT_LIGHTNING = "batt_lightning"
|
||||
@@ -140,6 +141,13 @@ BINARY_SENSOR_DESCRIPTIONS = (
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
on_state=0,
|
||||
),
|
||||
AmbientBinarySensorDescription(
|
||||
key=TYPE_BATTIN,
|
||||
name="Interior Battery",
|
||||
device_class=BinarySensorDeviceClass.BATTERY,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
on_state=0,
|
||||
),
|
||||
AmbientBinarySensorDescription(
|
||||
key=TYPE_BATT10,
|
||||
name="Soil Monitor Battery 10",
|
||||
|
||||
@@ -108,7 +108,7 @@ class BackupManager:
|
||||
size=round(backup_path.stat().st_size / 1_048_576, 2),
|
||||
)
|
||||
backups[backup.slug] = backup
|
||||
except (OSError, TarError, json.JSONDecodeError) as err:
|
||||
except (OSError, TarError, json.JSONDecodeError, KeyError) as err:
|
||||
LOGGER.warning("Unable to read backup %s: %s", backup_path, err)
|
||||
return backups
|
||||
|
||||
|
||||
@@ -469,7 +469,8 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
|
||||
# The only way we can turn the Chromecast is on is by launching an app
|
||||
if self._chromecast.cast_type == pychromecast.const.CAST_TYPE_CHROMECAST:
|
||||
self._chromecast.play_media(CAST_SPLASH, "image/png")
|
||||
app_data = {"media_id": CAST_SPLASH, "media_type": "image/png"}
|
||||
quick_play(self._chromecast, "default_media_receiver", app_data)
|
||||
else:
|
||||
self._chromecast.start_app(pychromecast.config.APP_MEDIA_RECEIVER)
|
||||
|
||||
|
||||
@@ -75,15 +75,19 @@ def async_condition_from_config(
|
||||
hass: HomeAssistant, config: ConfigType
|
||||
) -> condition.ConditionCheckerType:
|
||||
"""Create a function to test a device condition."""
|
||||
if config[CONF_TYPE] == "is_hvac_mode":
|
||||
attribute = const.ATTR_HVAC_MODE
|
||||
else:
|
||||
attribute = const.ATTR_PRESET_MODE
|
||||
|
||||
def test_is_state(hass: HomeAssistant, variables: TemplateVarsType) -> bool:
|
||||
"""Test if an entity is a certain state."""
|
||||
state = hass.states.get(config[ATTR_ENTITY_ID])
|
||||
return state.attributes.get(attribute) == config[attribute] if state else False
|
||||
if (state := hass.states.get(config[ATTR_ENTITY_ID])) is None:
|
||||
return False
|
||||
|
||||
if config[CONF_TYPE] == "is_hvac_mode":
|
||||
return state.state == config[const.ATTR_HVAC_MODE]
|
||||
|
||||
return (
|
||||
state.attributes.get(const.ATTR_PRESET_MODE)
|
||||
== config[const.ATTR_PRESET_MODE]
|
||||
)
|
||||
|
||||
return test_is_state
|
||||
|
||||
|
||||
@@ -66,9 +66,9 @@ class DevoloBinaryDeviceEntity(DevoloDeviceEntity, BinarySensorEntity):
|
||||
self, homecontrol: HomeControl, device_instance: Zwave, element_uid: str
|
||||
) -> None:
|
||||
"""Initialize a devolo binary sensor."""
|
||||
self._binary_sensor_property = device_instance.binary_sensor_property.get(
|
||||
self._binary_sensor_property = device_instance.binary_sensor_property[
|
||||
element_uid
|
||||
)
|
||||
]
|
||||
|
||||
super().__init__(
|
||||
homecontrol=homecontrol,
|
||||
@@ -82,10 +82,12 @@ class DevoloBinaryDeviceEntity(DevoloDeviceEntity, BinarySensorEntity):
|
||||
)
|
||||
|
||||
if self._attr_device_class is None:
|
||||
if device_instance.binary_sensor_property.get(element_uid).sub_type != "":
|
||||
self._attr_name += f" {device_instance.binary_sensor_property.get(element_uid).sub_type}"
|
||||
if device_instance.binary_sensor_property[element_uid].sub_type != "":
|
||||
self._attr_name += (
|
||||
f" {device_instance.binary_sensor_property[element_uid].sub_type}"
|
||||
)
|
||||
else:
|
||||
self._attr_name += f" {device_instance.binary_sensor_property.get(element_uid).sensor_type}"
|
||||
self._attr_name += f" {device_instance.binary_sensor_property[element_uid].sensor_type}"
|
||||
|
||||
self._value = self._binary_sensor_property.state
|
||||
|
||||
@@ -114,9 +116,9 @@ class DevoloRemoteControl(DevoloDeviceEntity, BinarySensorEntity):
|
||||
key: int,
|
||||
) -> None:
|
||||
"""Initialize a devolo remote control."""
|
||||
self._remote_control_property = device_instance.remote_control_property.get(
|
||||
self._remote_control_property = device_instance.remote_control_property[
|
||||
element_uid
|
||||
)
|
||||
]
|
||||
|
||||
super().__init__(
|
||||
homecontrol=homecontrol,
|
||||
|
||||
@@ -63,7 +63,7 @@ class DevoloCoverDeviceEntity(DevoloMultiLevelSwitchDeviceEntity, CoverEntity):
|
||||
@property
|
||||
def current_cover_position(self) -> int:
|
||||
"""Return the current position. 0 is closed. 100 is open."""
|
||||
return self._value
|
||||
return int(self._value)
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool:
|
||||
|
||||
@@ -46,7 +46,7 @@ class DevoloDeviceEntity(Entity):
|
||||
|
||||
self.subscriber: Subscriber | None = None
|
||||
self.sync_callback = self._sync
|
||||
self._value: int
|
||||
self._value: float
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Call when entity is added to hass."""
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "devolo_home_control",
|
||||
"name": "devolo Home Control",
|
||||
"documentation": "https://www.home-assistant.io/integrations/devolo_home_control",
|
||||
"requirements": ["devolo-home-control-api==0.17.4"],
|
||||
"requirements": ["devolo-home-control-api==0.18.1"],
|
||||
"after_dependencies": ["zeroconf"],
|
||||
"config_flow": true,
|
||||
"codeowners": ["@2Fake", "@Shutgun"],
|
||||
|
||||
@@ -83,7 +83,7 @@ class DevoloMultiLevelDeviceEntity(DevoloDeviceEntity, SensorEntity):
|
||||
"""Abstract representation of a multi level sensor within devolo Home Control."""
|
||||
|
||||
@property
|
||||
def native_value(self) -> int:
|
||||
def native_value(self) -> float:
|
||||
"""Return the state of the sensor."""
|
||||
return self._value
|
||||
|
||||
|
||||
@@ -54,8 +54,8 @@ class DevoloSirenDeviceEntity(DevoloMultiLevelSwitchDeviceEntity, SirenEntity):
|
||||
)
|
||||
self._attr_available_tones = [
|
||||
*range(
|
||||
self._multi_level_switch_property.min,
|
||||
self._multi_level_switch_property.max + 1,
|
||||
int(self._multi_level_switch_property.min),
|
||||
int(self._multi_level_switch_property.max) + 1,
|
||||
)
|
||||
]
|
||||
self._attr_supported_features = (
|
||||
|
||||
@@ -50,9 +50,9 @@ class DevoloSwitch(DevoloDeviceEntity, SwitchEntity):
|
||||
device_instance=device_instance,
|
||||
element_uid=element_uid,
|
||||
)
|
||||
self._binary_switch_property = self._device_instance.binary_switch_property.get(
|
||||
self._attr_unique_id
|
||||
)
|
||||
self._binary_switch_property = self._device_instance.binary_switch_property[
|
||||
self._attr_unique_id # type: ignore[index]
|
||||
]
|
||||
self._attr_is_on = self._binary_switch_property.state
|
||||
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
|
||||
@@ -134,10 +134,16 @@ class DlnaDmrFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
discovery_service_list = discovery_info.upnp.get(ssdp.ATTR_UPNP_SERVICE_LIST)
|
||||
if not discovery_service_list:
|
||||
return self.async_abort(reason="not_dmr")
|
||||
discovery_service_ids = {
|
||||
service.get("serviceId")
|
||||
for service in discovery_service_list.get("service") or []
|
||||
}
|
||||
|
||||
services = discovery_service_list.get("service")
|
||||
if not services:
|
||||
discovery_service_ids: set[str] = set()
|
||||
elif isinstance(services, list):
|
||||
discovery_service_ids = {service.get("serviceId") for service in services}
|
||||
else:
|
||||
# Only one service defined (etree_to_dict failed to make a list)
|
||||
discovery_service_ids = {services.get("serviceId")}
|
||||
|
||||
if not DmrDevice.SERVICE_IDS.issubset(discovery_service_ids):
|
||||
return self.async_abort(reason="not_dmr")
|
||||
|
||||
|
||||
@@ -77,10 +77,16 @@ class DlnaDmsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
discovery_service_list = discovery_info.upnp.get(ssdp.ATTR_UPNP_SERVICE_LIST)
|
||||
if not discovery_service_list:
|
||||
return self.async_abort(reason="not_dms")
|
||||
discovery_service_ids = {
|
||||
service.get("serviceId")
|
||||
for service in discovery_service_list.get("service") or []
|
||||
}
|
||||
|
||||
services = discovery_service_list.get("service")
|
||||
if not services:
|
||||
discovery_service_ids: set[str] = set()
|
||||
elif isinstance(services, list):
|
||||
discovery_service_ids = {service.get("serviceId") for service in services}
|
||||
else:
|
||||
# Only one service defined (etree_to_dict failed to make a list)
|
||||
discovery_service_ids = {services.get("serviceId")}
|
||||
|
||||
if not DmsDevice.SERVICE_IDS.issubset(discovery_service_ids):
|
||||
return self.async_abort(reason="not_dms")
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
from functools import partial
|
||||
|
||||
from homeassistant.components.light import (
|
||||
@@ -198,16 +199,21 @@ class FibaroLight(FibaroDevice, LightEntity):
|
||||
|
||||
Dimmable and RGB lights can be on based on different
|
||||
properties, so we need to check here several values.
|
||||
|
||||
JSON for HC2 uses always string, HC3 uses int for integers.
|
||||
"""
|
||||
props = self.fibaro_device.properties
|
||||
if self.current_binary_state:
|
||||
return True
|
||||
if "brightness" in props and props.brightness != "0":
|
||||
return True
|
||||
if "currentProgram" in props and props.currentProgram != "0":
|
||||
return True
|
||||
if "currentProgramID" in props and props.currentProgramID != "0":
|
||||
return True
|
||||
with suppress(ValueError, TypeError):
|
||||
if "brightness" in props and int(props.brightness) != 0:
|
||||
return True
|
||||
with suppress(ValueError, TypeError):
|
||||
if "currentProgram" in props and int(props.currentProgram) != 0:
|
||||
return True
|
||||
with suppress(ValueError, TypeError):
|
||||
if "currentProgramID" in props and int(props.currentProgramID) != 0:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "generic",
|
||||
"name": "Generic Camera",
|
||||
"config_flow": true,
|
||||
"requirements": ["av==8.1.0", "pillow==9.0.1"],
|
||||
"requirements": ["ha-av==9.1.1-3", "pillow==9.0.1"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/generic",
|
||||
"codeowners": ["@davet2001"],
|
||||
"iot_class": "local_push"
|
||||
|
||||
@@ -193,9 +193,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||
# Force a token refresh to fix a bug where tokens were persisted with
|
||||
# expires_in (relative time delta) and expires_at (absolute time) swapped.
|
||||
if session.token["expires_at"] >= datetime(2070, 1, 1).timestamp():
|
||||
# A google session token typically only lasts a few days between refresh.
|
||||
now = datetime.now()
|
||||
if session.token["expires_at"] >= (now + timedelta(days=365)).timestamp():
|
||||
session.token["expires_in"] = 0
|
||||
session.token["expires_at"] = datetime.now().timestamp()
|
||||
session.token["expires_at"] = now.timestamp()
|
||||
try:
|
||||
await session.async_ensure_token_valid()
|
||||
except aiohttp.ClientResponseError as err:
|
||||
|
||||
@@ -711,7 +711,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
dev_reg = await async_get_registry(hass)
|
||||
coordinator = HassioDataUpdateCoordinator(hass, entry, dev_reg)
|
||||
hass.data[ADDONS_COORDINATOR] = coordinator
|
||||
await coordinator.async_refresh()
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
|
||||
|
||||
@@ -848,8 +848,8 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
new_data[DATA_KEY_ADDONS] = {
|
||||
addon[ATTR_SLUG]: {
|
||||
**addon,
|
||||
**((addons_stats or {}).get(addon[ATTR_SLUG], {})),
|
||||
ATTR_AUTO_UPDATE: addons_info.get(addon[ATTR_SLUG], {}).get(
|
||||
**((addons_stats or {}).get(addon[ATTR_SLUG]) or {}),
|
||||
ATTR_AUTO_UPDATE: (addons_info.get(addon[ATTR_SLUG]) or {}).get(
|
||||
ATTR_AUTO_UPDATE, False
|
||||
),
|
||||
ATTR_CHANGELOG: (addons_changelogs or {}).get(addon[ATTR_SLUG]),
|
||||
@@ -952,15 +952,27 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
|
||||
async def _update_addon_stats(self, slug):
|
||||
"""Update single addon stats."""
|
||||
stats = await self.hassio.get_addon_stats(slug)
|
||||
return (slug, stats)
|
||||
try:
|
||||
stats = await self.hassio.get_addon_stats(slug)
|
||||
return (slug, stats)
|
||||
except HassioAPIError as err:
|
||||
_LOGGER.warning("Could not fetch stats for %s: %s", slug, err)
|
||||
return (slug, None)
|
||||
|
||||
async def _update_addon_changelog(self, slug):
|
||||
"""Return the changelog for an add-on."""
|
||||
changelog = await self.hassio.get_addon_changelog(slug)
|
||||
return (slug, changelog)
|
||||
try:
|
||||
changelog = await self.hassio.get_addon_changelog(slug)
|
||||
return (slug, changelog)
|
||||
except HassioAPIError as err:
|
||||
_LOGGER.warning("Could not fetch changelog for %s: %s", slug, err)
|
||||
return (slug, None)
|
||||
|
||||
async def _update_addon_info(self, slug):
|
||||
"""Return the info for an add-on."""
|
||||
info = await self.hassio.get_addon_info(slug)
|
||||
return (slug, info)
|
||||
try:
|
||||
info = await self.hassio.get_addon_info(slug)
|
||||
return (slug, info)
|
||||
except HassioAPIError as err:
|
||||
_LOGGER.warning("Could not fetch info for %s: %s", slug, err)
|
||||
return (slug, None)
|
||||
|
||||
@@ -90,7 +90,7 @@ class HassioSupervisorEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
||||
"""Return True if entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and DATA_KEY_OS in self.coordinator.data
|
||||
and DATA_KEY_SUPERVISOR in self.coordinator.data
|
||||
and self.entity_description.key
|
||||
in self.coordinator.data[DATA_KEY_SUPERVISOR]
|
||||
)
|
||||
|
||||
@@ -312,7 +312,6 @@ class Dishwasher(
|
||||
"""Dishwasher class."""
|
||||
|
||||
PROGRAMS = [
|
||||
{"name": "Dishcare.Dishwasher.Program.PreRinse"},
|
||||
{"name": "Dishcare.Dishwasher.Program.Auto1"},
|
||||
{"name": "Dishcare.Dishwasher.Program.Auto2"},
|
||||
{"name": "Dishcare.Dishwasher.Program.Auto3"},
|
||||
|
||||
@@ -63,7 +63,7 @@ class HomeKitSmokeSensor(HomeKitEntity, BinarySensorEntity):
|
||||
class HomeKitCarbonMonoxideSensor(HomeKitEntity, BinarySensorEntity):
|
||||
"""Representation of a Homekit BO sensor."""
|
||||
|
||||
_attr_device_class = BinarySensorDeviceClass.GAS
|
||||
_attr_device_class = BinarySensorDeviceClass.CO
|
||||
|
||||
def get_characteristic_types(self) -> list[str]:
|
||||
"""Define the homekit characteristics the entity is tracking."""
|
||||
|
||||
@@ -293,7 +293,10 @@ class HomekitControllerFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
self._abort_if_unique_id_configured(updates=updated_ip_port)
|
||||
|
||||
for progress in self._async_in_progress(include_uninitialized=True):
|
||||
if progress["context"].get("unique_id") == normalized_hkid:
|
||||
context = progress["context"]
|
||||
if context.get("unique_id") == normalized_hkid and not context.get(
|
||||
"pairing"
|
||||
):
|
||||
if paired:
|
||||
# If the device gets paired, we want to dismiss
|
||||
# an existing discovery since we can no longer
|
||||
@@ -350,6 +353,7 @@ class HomekitControllerFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
await self._async_setup_controller()
|
||||
|
||||
if pair_info and self.finish_pairing:
|
||||
self.context["pairing"] = True
|
||||
code = pair_info["pairing_code"]
|
||||
try:
|
||||
code = ensure_pin_format(
|
||||
|
||||
@@ -296,9 +296,3 @@ class KNXClimate(KnxEntity, ClimateEntity):
|
||||
await super().async_added_to_hass()
|
||||
if self._device.mode is not None:
|
||||
self._device.mode.register_device_updated_cb(self.after_update_callback)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Disconnect device object when removed."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if self._device.mode is not None:
|
||||
self._device.mode.unregister_device_updated_cb(self.after_update_callback)
|
||||
|
||||
@@ -45,4 +45,5 @@ class KnxEntity(Entity):
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Disconnect device object when removed."""
|
||||
self._device.unregister_device_updated_cb(self.after_update_callback)
|
||||
# will also remove callbacks
|
||||
self._device.shutdown()
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "KNX",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/knx",
|
||||
"requirements": ["xknx==0.20.1"],
|
||||
"requirements": ["xknx==0.20.2"],
|
||||
"codeowners": ["@Julius2342", "@farmio", "@marvin-w"],
|
||||
"quality_scale": "silver",
|
||||
"iot_class": "local_push",
|
||||
|
||||
@@ -32,6 +32,9 @@ def async_process_play_media_url(
|
||||
"""Update a media URL with authentication if it points at Home Assistant."""
|
||||
parsed = yarl.URL(media_content_id)
|
||||
|
||||
if parsed.scheme and parsed.scheme not in ("http", "https"):
|
||||
return media_content_id
|
||||
|
||||
if parsed.is_absolute():
|
||||
if not is_hass_url(hass, media_content_id):
|
||||
return media_content_id
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
"""Provides the constants needed for component."""
|
||||
from enum import IntEnum
|
||||
|
||||
# How long our auth signature on the content should be valid for
|
||||
CONTENT_AUTH_EXPIRY_TIME = 3600 * 24
|
||||
|
||||
@@ -90,6 +92,32 @@ REPEAT_MODE_OFF = "off"
|
||||
REPEAT_MODE_ONE = "one"
|
||||
REPEAT_MODES = [REPEAT_MODE_OFF, REPEAT_MODE_ALL, REPEAT_MODE_ONE]
|
||||
|
||||
|
||||
class MediaPlayerEntityFeature(IntEnum):
|
||||
"""Supported features of the media player entity."""
|
||||
|
||||
PAUSE = 1
|
||||
SEEK = 2
|
||||
VOLUME_SET = 4
|
||||
VOLUME_MUTE = 8
|
||||
PREVIOUS_TRACK = 16
|
||||
NEXT_TRACK = 32
|
||||
|
||||
TURN_ON = 128
|
||||
TURN_OFF = 256
|
||||
PLAY_MEDIA = 512
|
||||
VOLUME_STEP = 1024
|
||||
SELECT_SOURCE = 2048
|
||||
STOP = 4096
|
||||
CLEAR_PLAYLIST = 8192
|
||||
PLAY = 16384
|
||||
SHUFFLE_SET = 32768
|
||||
SELECT_SOUND_MODE = 65536
|
||||
BROWSE_MEDIA = 131072
|
||||
REPEAT_SET = 262144
|
||||
GROUPING = 524288
|
||||
|
||||
|
||||
SUPPORT_PAUSE = 1
|
||||
SUPPORT_SEEK = 2
|
||||
SUPPORT_VOLUME_SET = 4
|
||||
|
||||
@@ -6,6 +6,7 @@ from collections.abc import Iterable
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_SUPPORTED_FEATURES,
|
||||
SERVICE_MEDIA_PAUSE,
|
||||
SERVICE_MEDIA_PLAY,
|
||||
SERVICE_MEDIA_STOP,
|
||||
@@ -33,6 +34,7 @@ from .const import (
|
||||
SERVICE_PLAY_MEDIA,
|
||||
SERVICE_SELECT_SOUND_MODE,
|
||||
SERVICE_SELECT_SOURCE,
|
||||
MediaPlayerEntityFeature,
|
||||
)
|
||||
|
||||
# mypy: allow-untyped-defs
|
||||
@@ -46,6 +48,8 @@ async def _async_reproduce_states(
|
||||
reproduce_options: dict[str, Any] | None = None,
|
||||
) -> None:
|
||||
"""Reproduce component states."""
|
||||
cur_state = hass.states.get(state.entity_id)
|
||||
features = cur_state.attributes[ATTR_SUPPORTED_FEATURES] if cur_state else 0
|
||||
|
||||
async def call_service(service: str, keys: Iterable) -> None:
|
||||
"""Call service with set of attributes given."""
|
||||
@@ -59,28 +63,48 @@ async def _async_reproduce_states(
|
||||
)
|
||||
|
||||
if state.state == STATE_OFF:
|
||||
await call_service(SERVICE_TURN_OFF, [])
|
||||
if features & MediaPlayerEntityFeature.TURN_OFF:
|
||||
await call_service(SERVICE_TURN_OFF, [])
|
||||
# entities that are off have no other attributes to restore
|
||||
return
|
||||
|
||||
if state.state in (
|
||||
STATE_ON,
|
||||
STATE_PLAYING,
|
||||
STATE_IDLE,
|
||||
STATE_PAUSED,
|
||||
if (
|
||||
state.state
|
||||
in (
|
||||
STATE_ON,
|
||||
STATE_PLAYING,
|
||||
STATE_IDLE,
|
||||
STATE_PAUSED,
|
||||
)
|
||||
and features & MediaPlayerEntityFeature.TURN_ON
|
||||
):
|
||||
await call_service(SERVICE_TURN_ON, [])
|
||||
|
||||
if ATTR_MEDIA_VOLUME_LEVEL in state.attributes:
|
||||
cur_state = hass.states.get(state.entity_id)
|
||||
features = cur_state.attributes[ATTR_SUPPORTED_FEATURES] if cur_state else 0
|
||||
|
||||
if (
|
||||
ATTR_MEDIA_VOLUME_LEVEL in state.attributes
|
||||
and features & MediaPlayerEntityFeature.VOLUME_SET
|
||||
):
|
||||
await call_service(SERVICE_VOLUME_SET, [ATTR_MEDIA_VOLUME_LEVEL])
|
||||
|
||||
if ATTR_MEDIA_VOLUME_MUTED in state.attributes:
|
||||
if (
|
||||
ATTR_MEDIA_VOLUME_MUTED in state.attributes
|
||||
and features & MediaPlayerEntityFeature.VOLUME_MUTE
|
||||
):
|
||||
await call_service(SERVICE_VOLUME_MUTE, [ATTR_MEDIA_VOLUME_MUTED])
|
||||
|
||||
if ATTR_INPUT_SOURCE in state.attributes:
|
||||
if (
|
||||
ATTR_INPUT_SOURCE in state.attributes
|
||||
and features & MediaPlayerEntityFeature.SELECT_SOURCE
|
||||
):
|
||||
await call_service(SERVICE_SELECT_SOURCE, [ATTR_INPUT_SOURCE])
|
||||
|
||||
if ATTR_SOUND_MODE in state.attributes:
|
||||
if (
|
||||
ATTR_SOUND_MODE in state.attributes
|
||||
and features & MediaPlayerEntityFeature.SELECT_SOUND_MODE
|
||||
):
|
||||
await call_service(SERVICE_SELECT_SOUND_MODE, [ATTR_SOUND_MODE])
|
||||
|
||||
already_playing = False
|
||||
@@ -88,18 +112,25 @@ async def _async_reproduce_states(
|
||||
if (ATTR_MEDIA_CONTENT_TYPE in state.attributes) and (
|
||||
ATTR_MEDIA_CONTENT_ID in state.attributes
|
||||
):
|
||||
await call_service(
|
||||
SERVICE_PLAY_MEDIA,
|
||||
[ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_ENQUEUE],
|
||||
)
|
||||
if features & MediaPlayerEntityFeature.PLAY_MEDIA:
|
||||
await call_service(
|
||||
SERVICE_PLAY_MEDIA,
|
||||
[ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_ENQUEUE],
|
||||
)
|
||||
already_playing = True
|
||||
|
||||
if state.state == STATE_PLAYING and not already_playing:
|
||||
if (
|
||||
not already_playing
|
||||
and state.state == STATE_PLAYING
|
||||
and features & MediaPlayerEntityFeature.PLAY
|
||||
):
|
||||
await call_service(SERVICE_MEDIA_PLAY, [])
|
||||
elif state.state == STATE_IDLE:
|
||||
await call_service(SERVICE_MEDIA_STOP, [])
|
||||
if features & MediaPlayerEntityFeature.STOP:
|
||||
await call_service(SERVICE_MEDIA_STOP, [])
|
||||
elif state.state == STATE_PAUSED:
|
||||
await call_service(SERVICE_MEDIA_PAUSE, [])
|
||||
if features & MediaPlayerEntityFeature.PAUSE:
|
||||
await call_service(SERVICE_MEDIA_PAUSE, [])
|
||||
|
||||
|
||||
async def async_reproduce_states(
|
||||
|
||||
@@ -155,7 +155,7 @@ async def async_setup_entry(
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SET_ABSOLUTE_POSITION,
|
||||
SET_ABSOLUTE_POSITION_SCHEMA,
|
||||
SERVICE_SET_ABSOLUTE_POSITION,
|
||||
"async_set_absolute_position",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -463,7 +463,7 @@ class MpdDevice(MediaPlayerEntity):
|
||||
if media_source.is_media_source_id(media_id):
|
||||
media_type = MEDIA_TYPE_MUSIC
|
||||
play_item = await media_source.async_resolve_media(self.hass, media_id)
|
||||
media_id = play_item.url
|
||||
media_id = async_process_play_media_url(self.hass, play_item.url)
|
||||
|
||||
if media_type == MEDIA_TYPE_PLAYLIST:
|
||||
_LOGGER.debug("Playing playlist: %s", media_id)
|
||||
@@ -476,8 +476,6 @@ class MpdDevice(MediaPlayerEntity):
|
||||
await self._client.load(media_id)
|
||||
await self._client.play()
|
||||
else:
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
|
||||
await self._client.clear()
|
||||
self._currentplaylist = None
|
||||
await self._client.add(media_id)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "openhome",
|
||||
"name": "Linn / OpenHome",
|
||||
"documentation": "https://www.home-assistant.io/integrations/openhome",
|
||||
"requirements": ["openhomedevice==2.0.1"],
|
||||
"requirements": ["openhomedevice==2.0.2"],
|
||||
"codeowners": ["@bazwilliams"],
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["async_upnp_client", "openhomedevice"]
|
||||
|
||||
@@ -39,8 +39,7 @@ def _select_option_open_closed_pedestrian(
|
||||
OverkizCommandParam.CLOSED: OverkizCommand.CLOSE,
|
||||
OverkizCommandParam.OPEN: OverkizCommand.OPEN,
|
||||
OverkizCommandParam.PEDESTRIAN: OverkizCommand.SET_PEDESTRIAN_POSITION,
|
||||
}[OverkizCommandParam(option)],
|
||||
None,
|
||||
}[OverkizCommandParam(option)]
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -51,6 +51,7 @@ class ProsegurAlarm(alarm.AlarmControlPanelEntity):
|
||||
self.contract = contract
|
||||
self._auth = auth
|
||||
|
||||
self._attr_code_arm_required = False
|
||||
self._attr_name = f"contract {self.contract}"
|
||||
self._attr_unique_id = self.contract
|
||||
self._attr_supported_features = SUPPORT_ALARM_ARM_AWAY | SUPPORT_ALARM_ARM_HOME
|
||||
|
||||
@@ -12,7 +12,7 @@ import logging
|
||||
import os
|
||||
import re
|
||||
from statistics import mean
|
||||
from typing import TYPE_CHECKING, Any, Literal
|
||||
from typing import TYPE_CHECKING, Any, Literal, overload
|
||||
|
||||
from sqlalchemy import bindparam, func
|
||||
from sqlalchemy.exc import SQLAlchemyError, StatementError
|
||||
@@ -125,9 +125,9 @@ STATISTICS_META_BAKERY = "recorder_statistics_meta_bakery"
|
||||
STATISTICS_SHORT_TERM_BAKERY = "recorder_statistics_short_term_bakery"
|
||||
|
||||
|
||||
# Convert pressure and temperature statistics from the native unit used for statistics
|
||||
# to the units configured by the user
|
||||
UNIT_CONVERSIONS = {
|
||||
# Convert pressure, temperature and volume statistics from the normalized unit used for
|
||||
# statistics to the unit configured by the user
|
||||
STATISTIC_UNIT_TO_DISPLAY_UNIT_CONVERSIONS = {
|
||||
PRESSURE_PA: lambda x, units: pressure_util.convert(
|
||||
x, PRESSURE_PA, units.pressure_unit
|
||||
)
|
||||
@@ -145,6 +145,17 @@ UNIT_CONVERSIONS = {
|
||||
else None,
|
||||
}
|
||||
|
||||
# Convert volume statistics from the display unit configured by the user
|
||||
# to the normalized unit used for statistics
|
||||
# This is used to support adjusting statistics in the display unit
|
||||
DISPLAY_UNIT_TO_STATISTIC_UNIT_CONVERSIONS: dict[
|
||||
str, Callable[[float, UnitSystem], float]
|
||||
] = {
|
||||
VOLUME_CUBIC_FEET: lambda x, units: volume_util.convert(
|
||||
x, _configured_unit(VOLUME_CUBIC_METERS, units), VOLUME_CUBIC_METERS
|
||||
),
|
||||
}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -721,7 +732,17 @@ def get_metadata(
|
||||
)
|
||||
|
||||
|
||||
@overload
|
||||
def _configured_unit(unit: None, units: UnitSystem) -> None:
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def _configured_unit(unit: str, units: UnitSystem) -> str:
|
||||
...
|
||||
|
||||
|
||||
def _configured_unit(unit: str | None, units: UnitSystem) -> str | None:
|
||||
"""Return the pressure and temperature units configured by the user."""
|
||||
if unit == PRESSURE_PA:
|
||||
return units.pressure_unit
|
||||
@@ -1163,7 +1184,7 @@ def _sorted_statistics_to_dict(
|
||||
statistic_id = metadata[meta_id]["statistic_id"]
|
||||
convert: Callable[[Any, Any], float | None]
|
||||
if convert_units:
|
||||
convert = UNIT_CONVERSIONS.get(unit, lambda x, units: x) # type: ignore[arg-type,no-any-return]
|
||||
convert = STATISTIC_UNIT_TO_DISPLAY_UNIT_CONVERSIONS.get(unit, lambda x, units: x) # type: ignore[arg-type,no-any-return]
|
||||
else:
|
||||
convert = no_conversion
|
||||
ent_results = result[meta_id]
|
||||
@@ -1323,17 +1344,26 @@ def adjust_statistics(
|
||||
if statistic_id not in metadata:
|
||||
return True
|
||||
|
||||
tables: tuple[type[Statistics | StatisticsShortTerm], ...] = (
|
||||
Statistics,
|
||||
units = instance.hass.config.units
|
||||
statistic_unit = metadata[statistic_id][1]["unit_of_measurement"]
|
||||
display_unit = _configured_unit(statistic_unit, units)
|
||||
convert = DISPLAY_UNIT_TO_STATISTIC_UNIT_CONVERSIONS.get(display_unit, lambda x, units: x) # type: ignore[arg-type]
|
||||
sum_adjustment = convert(sum_adjustment, units)
|
||||
|
||||
_adjust_sum_statistics(
|
||||
session,
|
||||
StatisticsShortTerm,
|
||||
metadata[statistic_id][0],
|
||||
start_time,
|
||||
sum_adjustment,
|
||||
)
|
||||
|
||||
_adjust_sum_statistics(
|
||||
session,
|
||||
Statistics,
|
||||
metadata[statistic_id][0],
|
||||
start_time.replace(minute=0),
|
||||
sum_adjustment,
|
||||
)
|
||||
for table in tables:
|
||||
_adjust_sum_statistics(
|
||||
session,
|
||||
table,
|
||||
metadata[statistic_id][0],
|
||||
start_time,
|
||||
sum_adjustment,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Renault",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/renault",
|
||||
"requirements": ["renault-api==0.1.10"],
|
||||
"requirements": ["renault-api==0.1.11"],
|
||||
"codeowners": ["@epenet"],
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["renault_api"],
|
||||
|
||||
@@ -363,9 +363,8 @@ class SamsungTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
if not entry:
|
||||
return None
|
||||
entry_kw_args: dict = {}
|
||||
if (
|
||||
self.unique_id
|
||||
and entry.unique_id is None
|
||||
if self.unique_id and (
|
||||
entry.unique_id is None
|
||||
or (is_unique_match and self.unique_id != entry.unique_id)
|
||||
):
|
||||
entry_kw_args["unique_id"] = self.unique_id
|
||||
|
||||
@@ -224,9 +224,7 @@ class SamsungTVDevice(MediaPlayerEntity):
|
||||
startup_tasks.append(self._async_startup_app_list())
|
||||
|
||||
if self._dmr_device and not self._dmr_device.is_subscribed:
|
||||
startup_tasks.append(
|
||||
self._dmr_device.async_subscribe_services(auto_resubscribe=True)
|
||||
)
|
||||
startup_tasks.append(self._async_resubscribe_dmr())
|
||||
if not self._dmr_device and self._ssdp_rendering_control_location:
|
||||
startup_tasks.append(self._async_startup_dmr())
|
||||
|
||||
@@ -284,7 +282,7 @@ class SamsungTVDevice(MediaPlayerEntity):
|
||||
# NETWORK,NONE
|
||||
upnp_factory = UpnpFactory(upnp_requester, non_strict=True)
|
||||
upnp_device: UpnpDevice | None = None
|
||||
with contextlib.suppress(UpnpConnectionError):
|
||||
with contextlib.suppress(UpnpConnectionError, UpnpResponseError):
|
||||
upnp_device = await upnp_factory.async_create_device(
|
||||
self._ssdp_rendering_control_location
|
||||
)
|
||||
@@ -319,6 +317,11 @@ class SamsungTVDevice(MediaPlayerEntity):
|
||||
LOGGER.debug("Error while subscribing during device connect: %r", err)
|
||||
raise
|
||||
|
||||
async def _async_resubscribe_dmr(self) -> None:
|
||||
assert self._dmr_device
|
||||
with contextlib.suppress(UpnpConnectionError):
|
||||
await self._dmr_device.async_subscribe_services(auto_resubscribe=True)
|
||||
|
||||
async def _async_shutdown_dmr(self) -> None:
|
||||
"""Handle removal."""
|
||||
if (dmr_device := self._dmr_device) is not None:
|
||||
|
||||
@@ -5,3 +5,4 @@ KNOWN_PLAYERS = "known_players"
|
||||
PLAYER_DISCOVERY_UNSUB = "player_discovery_unsub"
|
||||
DISCOVERY_TASK = "discovery_task"
|
||||
DEFAULT_PORT = 9000
|
||||
SQUEEZEBOX_SOURCE_STRINGS = ("source:", "wavin:", "spotify:")
|
||||
|
||||
@@ -63,7 +63,13 @@ from .browse_media import (
|
||||
library_payload,
|
||||
media_source_content_filter,
|
||||
)
|
||||
from .const import DISCOVERY_TASK, DOMAIN, KNOWN_PLAYERS, PLAYER_DISCOVERY_UNSUB
|
||||
from .const import (
|
||||
DISCOVERY_TASK,
|
||||
DOMAIN,
|
||||
KNOWN_PLAYERS,
|
||||
PLAYER_DISCOVERY_UNSUB,
|
||||
SQUEEZEBOX_SOURCE_STRINGS,
|
||||
)
|
||||
|
||||
SERVICE_CALL_METHOD = "call_method"
|
||||
SERVICE_CALL_QUERY = "call_query"
|
||||
@@ -475,7 +481,9 @@ class SqueezeBoxEntity(MediaPlayerEntity):
|
||||
media_id = play_item.url
|
||||
|
||||
if media_type in MEDIA_TYPE_MUSIC:
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
if not media_id.startswith(SQUEEZEBOX_SOURCE_STRINGS):
|
||||
# do not process special squeezebox "source" media ids
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
|
||||
await self._player.async_load_url(media_id, cmd)
|
||||
return
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "stream",
|
||||
"name": "Stream",
|
||||
"documentation": "https://www.home-assistant.io/integrations/stream",
|
||||
"requirements": ["PyTurboJPEG==1.6.6", "av==8.1.0"],
|
||||
"requirements": ["PyTurboJPEG==1.6.6", "ha-av==9.1.1-3"],
|
||||
"dependencies": ["http"],
|
||||
"codeowners": ["@hunterjm", "@uvjustin", "@allenporter"],
|
||||
"quality_scale": "internal",
|
||||
|
||||
@@ -202,6 +202,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry.data[CONF_API_KEY],
|
||||
entry.data[CONF_LOCATION][CONF_LATITUDE],
|
||||
entry.data[CONF_LOCATION][CONF_LONGITUDE],
|
||||
unit_system="metric",
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
|
||||
@@ -31,16 +31,14 @@ from homeassistant.const import (
|
||||
LENGTH_MILES,
|
||||
PERCENTAGE,
|
||||
PRESSURE_HPA,
|
||||
PRESSURE_INHG,
|
||||
SPEED_METERS_PER_SECOND,
|
||||
SPEED_MILES_PER_HOUR,
|
||||
TEMP_FAHRENHEIT,
|
||||
TEMP_CELSIUS,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.util.distance import convert as distance_convert
|
||||
from homeassistant.util.pressure import convert as pressure_convert
|
||||
|
||||
from . import TomorrowioDataUpdateCoordinator, TomorrowioEntity
|
||||
from .const import (
|
||||
@@ -80,7 +78,7 @@ class TomorrowioSensorEntityDescription(SensorEntityDescription):
|
||||
unit_imperial: str | None = None
|
||||
unit_metric: str | None = None
|
||||
multiplication_factor: Callable[[float], float] | float | None = None
|
||||
metric_conversion: Callable[[float], float] | float | None = None
|
||||
imperial_conversion: Callable[[float], float] | float | None = None
|
||||
value_map: Any | None = None
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
@@ -105,13 +103,13 @@ SENSOR_TYPES = (
|
||||
TomorrowioSensorEntityDescription(
|
||||
key=TMRW_ATTR_FEELS_LIKE,
|
||||
name="Feels Like",
|
||||
native_unit_of_measurement=TEMP_FAHRENHEIT,
|
||||
native_unit_of_measurement=TEMP_CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
),
|
||||
TomorrowioSensorEntityDescription(
|
||||
key=TMRW_ATTR_DEW_POINT,
|
||||
name="Dew Point",
|
||||
native_unit_of_measurement=TEMP_FAHRENHEIT,
|
||||
native_unit_of_measurement=TEMP_CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
),
|
||||
# Data comes in as inHg
|
||||
@@ -119,9 +117,6 @@ SENSOR_TYPES = (
|
||||
key=TMRW_ATTR_PRESSURE_SURFACE_LEVEL,
|
||||
name="Pressure (Surface Level)",
|
||||
native_unit_of_measurement=PRESSURE_HPA,
|
||||
multiplication_factor=lambda val: pressure_convert(
|
||||
val, PRESSURE_INHG, PRESSURE_HPA
|
||||
),
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
),
|
||||
# Data comes in as BTUs/(hr * ft^2)
|
||||
@@ -131,7 +126,7 @@ SENSOR_TYPES = (
|
||||
name="Global Horizontal Irradiance",
|
||||
unit_imperial=IRRADIATION_BTUS_PER_HOUR_SQUARE_FOOT,
|
||||
unit_metric=IRRADIATION_WATTS_PER_SQUARE_METER,
|
||||
metric_conversion=3.15459,
|
||||
imperial_conversion=(1 / 3.15459),
|
||||
),
|
||||
# Data comes in as miles
|
||||
TomorrowioSensorEntityDescription(
|
||||
@@ -139,8 +134,8 @@ SENSOR_TYPES = (
|
||||
name="Cloud Base",
|
||||
unit_imperial=LENGTH_MILES,
|
||||
unit_metric=LENGTH_KILOMETERS,
|
||||
metric_conversion=lambda val: distance_convert(
|
||||
val, LENGTH_MILES, LENGTH_KILOMETERS
|
||||
imperial_conversion=lambda val: distance_convert(
|
||||
val, LENGTH_KILOMETERS, LENGTH_MILES
|
||||
),
|
||||
),
|
||||
# Data comes in as miles
|
||||
@@ -149,8 +144,8 @@ SENSOR_TYPES = (
|
||||
name="Cloud Ceiling",
|
||||
unit_imperial=LENGTH_MILES,
|
||||
unit_metric=LENGTH_KILOMETERS,
|
||||
metric_conversion=lambda val: distance_convert(
|
||||
val, LENGTH_MILES, LENGTH_KILOMETERS
|
||||
imperial_conversion=lambda val: distance_convert(
|
||||
val, LENGTH_KILOMETERS, LENGTH_MILES
|
||||
),
|
||||
),
|
||||
TomorrowioSensorEntityDescription(
|
||||
@@ -164,8 +159,10 @@ SENSOR_TYPES = (
|
||||
name="Wind Gust",
|
||||
unit_imperial=SPEED_MILES_PER_HOUR,
|
||||
unit_metric=SPEED_METERS_PER_SECOND,
|
||||
metric_conversion=lambda val: distance_convert(val, LENGTH_MILES, LENGTH_METERS)
|
||||
/ 3600,
|
||||
imperial_conversion=lambda val: distance_convert(
|
||||
val, LENGTH_METERS, LENGTH_MILES
|
||||
)
|
||||
* 3600,
|
||||
),
|
||||
TomorrowioSensorEntityDescription(
|
||||
key=TMRW_ATTR_PRECIPITATION_TYPE,
|
||||
@@ -183,20 +180,16 @@ SENSOR_TYPES = (
|
||||
multiplication_factor=convert_ppb_to_ugm3(48),
|
||||
device_class=SensorDeviceClass.OZONE,
|
||||
),
|
||||
# Data comes in as ug/ft^3
|
||||
TomorrowioSensorEntityDescription(
|
||||
key=TMRW_ATTR_PARTICULATE_MATTER_25,
|
||||
name="Particulate Matter < 2.5 μm",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
multiplication_factor=3.2808399**3,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
),
|
||||
# Data comes in as ug/ft^3
|
||||
TomorrowioSensorEntityDescription(
|
||||
key=TMRW_ATTR_PARTICULATE_MATTER_10,
|
||||
name="Particulate Matter < 10 μm",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
multiplication_factor=3.2808399**3,
|
||||
device_class=SensorDeviceClass.PM10,
|
||||
),
|
||||
# Data comes in as ppb
|
||||
@@ -360,15 +353,15 @@ class BaseTomorrowioSensorEntity(TomorrowioEntity, SensorEntity):
|
||||
if desc.multiplication_factor is not None:
|
||||
state = handle_conversion(state, desc.multiplication_factor)
|
||||
|
||||
# If an imperial unit isn't provided, we always want to convert to metric since
|
||||
# that is what the UI expects
|
||||
# If there is an imperial conversion needed and the instance is using imperial,
|
||||
# apply the conversion logic.
|
||||
if (
|
||||
desc.metric_conversion
|
||||
desc.imperial_conversion
|
||||
and desc.unit_imperial is not None
|
||||
and desc.unit_imperial != desc.unit_metric
|
||||
and self.hass.config.units.is_metric
|
||||
and not self.hass.config.units.is_metric
|
||||
):
|
||||
return handle_conversion(state, desc.metric_conversion)
|
||||
return handle_conversion(state, desc.imperial_conversion)
|
||||
|
||||
return state
|
||||
|
||||
|
||||
80
homeassistant/components/zha/diagnostics.py
Normal file
80
homeassistant/components/zha/diagnostics.py
Normal file
@@ -0,0 +1,80 @@
|
||||
"""Provides diagnostics for ZHA."""
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
from typing import Any
|
||||
|
||||
import bellows
|
||||
import pkg_resources
|
||||
import zigpy
|
||||
from zigpy.config import CONF_NWK_EXTENDED_PAN_ID
|
||||
import zigpy_deconz
|
||||
import zigpy_xbee
|
||||
import zigpy_zigate
|
||||
import zigpy_znp
|
||||
|
||||
from homeassistant.components.diagnostics.util import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_UNIQUE_ID
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .core.const import ATTR_IEEE, DATA_ZHA, DATA_ZHA_CONFIG, DATA_ZHA_GATEWAY
|
||||
from .core.device import ZHADevice
|
||||
from .core.gateway import ZHAGateway
|
||||
from .core.helpers import async_get_zha_device
|
||||
|
||||
KEYS_TO_REDACT = {
|
||||
ATTR_IEEE,
|
||||
CONF_UNIQUE_ID,
|
||||
"network_key",
|
||||
CONF_NWK_EXTENDED_PAN_ID,
|
||||
"partner_ieee",
|
||||
}
|
||||
|
||||
|
||||
def shallow_asdict(obj: Any) -> dict:
|
||||
"""Return a shallow copy of a dataclass as a dict."""
|
||||
if hasattr(obj, "__dataclass_fields__"):
|
||||
result = {}
|
||||
|
||||
for field in dataclasses.fields(obj):
|
||||
result[field.name] = shallow_asdict(getattr(obj, field.name))
|
||||
|
||||
return result
|
||||
if hasattr(obj, "as_dict"):
|
||||
return obj.as_dict()
|
||||
return obj
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry
|
||||
) -> dict:
|
||||
"""Return diagnostics for a config entry."""
|
||||
config: dict = hass.data[DATA_ZHA].get(DATA_ZHA_CONFIG, {})
|
||||
gateway: ZHAGateway = hass.data[DATA_ZHA][DATA_ZHA_GATEWAY]
|
||||
return async_redact_data(
|
||||
{
|
||||
"config": config,
|
||||
"config_entry": config_entry.as_dict(),
|
||||
"application_state": shallow_asdict(gateway.application_controller.state),
|
||||
"versions": {
|
||||
"bellows": bellows.__version__,
|
||||
"zigpy": zigpy.__version__,
|
||||
"zigpy_deconz": zigpy_deconz.__version__,
|
||||
"zigpy_xbee": zigpy_xbee.__version__,
|
||||
"zigpy_znp": zigpy_znp.__version__,
|
||||
"zigpy_zigate": zigpy_zigate.__version__,
|
||||
"zhaquirks": pkg_resources.get_distribution("zha-quirks").version,
|
||||
},
|
||||
},
|
||||
KEYS_TO_REDACT,
|
||||
)
|
||||
|
||||
|
||||
async def async_get_device_diagnostics(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry, device: dr.DeviceEntry
|
||||
) -> dict:
|
||||
"""Return diagnostics for a device."""
|
||||
zha_device: ZHADevice = await async_get_zha_device(hass, device.id)
|
||||
return async_redact_data(zha_device.zha_device_info, KEYS_TO_REDACT)
|
||||
@@ -7,9 +7,9 @@
|
||||
"bellows==0.29.0",
|
||||
"pyserial==3.5",
|
||||
"pyserial-asyncio==0.6",
|
||||
"zha-quirks==0.0.71",
|
||||
"zigpy-deconz==0.15.0",
|
||||
"zigpy==0.44.1",
|
||||
"zha-quirks==0.0.72",
|
||||
"zigpy-deconz==0.14.0",
|
||||
"zigpy==0.44.2",
|
||||
"zigpy-xbee==0.14.0",
|
||||
"zigpy-zigate==0.8.0",
|
||||
"zigpy-znp==0.7.0"
|
||||
|
||||
@@ -7,7 +7,7 @@ from .backports.enum import StrEnum
|
||||
|
||||
MAJOR_VERSION: Final = 2022
|
||||
MINOR_VERSION: Final = 4
|
||||
PATCH_VERSION: Final = "2"
|
||||
PATCH_VERSION: Final = "4"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
|
||||
|
||||
@@ -347,10 +347,6 @@ auroranoaa==0.0.2
|
||||
# homeassistant.components.aurora_abb_powerone
|
||||
aurorapy==0.2.6
|
||||
|
||||
# homeassistant.components.generic
|
||||
# homeassistant.components.stream
|
||||
av==8.1.0
|
||||
|
||||
# homeassistant.components.avea
|
||||
# avea==1.5.1
|
||||
|
||||
@@ -530,7 +526,7 @@ deluge-client==1.7.1
|
||||
denonavr==0.10.10
|
||||
|
||||
# homeassistant.components.devolo_home_control
|
||||
devolo-home-control-api==0.17.4
|
||||
devolo-home-control-api==0.18.1
|
||||
|
||||
# homeassistant.components.devolo_home_network
|
||||
devolo-plc-api==0.7.1
|
||||
@@ -766,6 +762,10 @@ gstreamer-player==1.1.2
|
||||
# homeassistant.components.profiler
|
||||
guppy3==3.1.2
|
||||
|
||||
# homeassistant.components.generic
|
||||
# homeassistant.components.stream
|
||||
ha-av==9.1.1-3
|
||||
|
||||
# homeassistant.components.ffmpeg
|
||||
ha-ffmpeg==3.0.2
|
||||
|
||||
@@ -1141,7 +1141,7 @@ openerz-api==0.1.0
|
||||
openevsewifi==1.1.0
|
||||
|
||||
# homeassistant.components.openhome
|
||||
openhomedevice==2.0.1
|
||||
openhomedevice==2.0.2
|
||||
|
||||
# homeassistant.components.opensensemap
|
||||
opensensemap-api==0.2.0
|
||||
@@ -2043,7 +2043,7 @@ raspyrfm-client==1.2.8
|
||||
regenmaschine==2022.01.0
|
||||
|
||||
# homeassistant.components.renault
|
||||
renault-api==0.1.10
|
||||
renault-api==0.1.11
|
||||
|
||||
# homeassistant.components.python_script
|
||||
restrictedpython==5.2
|
||||
@@ -2435,7 +2435,7 @@ xbox-webapi==2.0.11
|
||||
xboxapi==2.0.1
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknx==0.20.1
|
||||
xknx==0.20.2
|
||||
|
||||
# homeassistant.components.bluesound
|
||||
# homeassistant.components.fritz
|
||||
@@ -2473,7 +2473,7 @@ zengge==0.2
|
||||
zeroconf==0.38.4
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha-quirks==0.0.71
|
||||
zha-quirks==0.0.72
|
||||
|
||||
# homeassistant.components.zhong_hong
|
||||
zhong_hong_hvac==1.0.9
|
||||
@@ -2482,7 +2482,7 @@ zhong_hong_hvac==1.0.9
|
||||
ziggo-mediabox-xl==1.1.0
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-deconz==0.15.0
|
||||
zigpy-deconz==0.14.0
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-xbee==0.14.0
|
||||
@@ -2494,7 +2494,7 @@ zigpy-zigate==0.8.0
|
||||
zigpy-znp==0.7.0
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy==0.44.1
|
||||
zigpy==0.44.2
|
||||
|
||||
# homeassistant.components.zoneminder
|
||||
zm-py==0.5.2
|
||||
|
||||
@@ -277,10 +277,6 @@ auroranoaa==0.0.2
|
||||
# homeassistant.components.aurora_abb_powerone
|
||||
aurorapy==0.2.6
|
||||
|
||||
# homeassistant.components.generic
|
||||
# homeassistant.components.stream
|
||||
av==8.1.0
|
||||
|
||||
# homeassistant.components.axis
|
||||
axis==44
|
||||
|
||||
@@ -385,7 +381,7 @@ deluge-client==1.7.1
|
||||
denonavr==0.10.10
|
||||
|
||||
# homeassistant.components.devolo_home_control
|
||||
devolo-home-control-api==0.17.4
|
||||
devolo-home-control-api==0.18.1
|
||||
|
||||
# homeassistant.components.devolo_home_network
|
||||
devolo-plc-api==0.7.1
|
||||
@@ -536,6 +532,10 @@ growattServer==1.1.0
|
||||
# homeassistant.components.profiler
|
||||
guppy3==3.1.2
|
||||
|
||||
# homeassistant.components.generic
|
||||
# homeassistant.components.stream
|
||||
ha-av==9.1.1-3
|
||||
|
||||
# homeassistant.components.ffmpeg
|
||||
ha-ffmpeg==3.0.2
|
||||
|
||||
@@ -1327,7 +1327,7 @@ radios==0.1.1
|
||||
regenmaschine==2022.01.0
|
||||
|
||||
# homeassistant.components.renault
|
||||
renault-api==0.1.10
|
||||
renault-api==0.1.11
|
||||
|
||||
# homeassistant.components.python_script
|
||||
restrictedpython==5.2
|
||||
@@ -1575,7 +1575,7 @@ wolf_smartset==0.1.11
|
||||
xbox-webapi==2.0.11
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknx==0.20.1
|
||||
xknx==0.20.2
|
||||
|
||||
# homeassistant.components.bluesound
|
||||
# homeassistant.components.fritz
|
||||
@@ -1601,10 +1601,10 @@ youless-api==0.16
|
||||
zeroconf==0.38.4
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha-quirks==0.0.71
|
||||
zha-quirks==0.0.72
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-deconz==0.15.0
|
||||
zigpy-deconz==0.14.0
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-xbee==0.14.0
|
||||
@@ -1616,7 +1616,7 @@ zigpy-zigate==0.8.0
|
||||
zigpy-znp==0.7.0
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy==0.44.1
|
||||
zigpy==0.44.2
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.35.2
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[metadata]
|
||||
name = homeassistant
|
||||
version = 2022.4.2
|
||||
version = 2022.4.4
|
||||
author = The Home Assistant Authors
|
||||
author_email = hello@home-assistant.io
|
||||
license = Apache-2.0
|
||||
|
||||
@@ -1157,7 +1157,7 @@ async def test_entity_media_content_type(hass: HomeAssistant):
|
||||
assert state.attributes.get("media_content_type") == "movie"
|
||||
|
||||
|
||||
async def test_entity_control(hass: HomeAssistant):
|
||||
async def test_entity_control(hass: HomeAssistant, quick_play_mock):
|
||||
"""Test various device and media controls."""
|
||||
entity_id = "media_player.speaker"
|
||||
reg = er.async_get(hass)
|
||||
@@ -1200,8 +1200,13 @@ async def test_entity_control(hass: HomeAssistant):
|
||||
|
||||
# Turn on
|
||||
await common.async_turn_on(hass, entity_id)
|
||||
chromecast.play_media.assert_called_once_with(
|
||||
"https://www.home-assistant.io/images/cast/splash.png", "image/png"
|
||||
quick_play_mock.assert_called_once_with(
|
||||
chromecast,
|
||||
"default_media_receiver",
|
||||
{
|
||||
"media_id": "https://www.home-assistant.io/images/cast/splash.png",
|
||||
"media_type": "image/png",
|
||||
},
|
||||
)
|
||||
chromecast.quit_app.reset_mock()
|
||||
|
||||
|
||||
@@ -92,15 +92,6 @@ async def test_get_conditions(
|
||||
|
||||
async def test_if_state(hass, calls):
|
||||
"""Test for turn_on and turn_off conditions."""
|
||||
hass.states.async_set(
|
||||
"climate.entity",
|
||||
const.HVAC_MODE_COOL,
|
||||
{
|
||||
const.ATTR_HVAC_MODE: const.HVAC_MODE_COOL,
|
||||
const.ATTR_PRESET_MODE: const.PRESET_AWAY,
|
||||
},
|
||||
)
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
automation.DOMAIN,
|
||||
@@ -147,6 +138,20 @@ async def test_if_state(hass, calls):
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
# Should not fire, entity doesn't exist yet
|
||||
hass.bus.async_fire("test_event1")
|
||||
await hass.async_block_till_done()
|
||||
assert len(calls) == 0
|
||||
|
||||
hass.states.async_set(
|
||||
"climate.entity",
|
||||
const.HVAC_MODE_COOL,
|
||||
{
|
||||
const.ATTR_PRESET_MODE: const.PRESET_AWAY,
|
||||
},
|
||||
)
|
||||
|
||||
hass.bus.async_fire("test_event1")
|
||||
await hass.async_block_till_done()
|
||||
assert len(calls) == 1
|
||||
@@ -156,7 +161,6 @@ async def test_if_state(hass, calls):
|
||||
"climate.entity",
|
||||
const.HVAC_MODE_AUTO,
|
||||
{
|
||||
const.ATTR_HVAC_MODE: const.HVAC_MODE_AUTO,
|
||||
const.ATTR_PRESET_MODE: const.PRESET_AWAY,
|
||||
},
|
||||
)
|
||||
@@ -176,7 +180,6 @@ async def test_if_state(hass, calls):
|
||||
"climate.entity",
|
||||
const.HVAC_MODE_AUTO,
|
||||
{
|
||||
const.ATTR_HVAC_MODE: const.HVAC_MODE_AUTO,
|
||||
const.ATTR_PRESET_MODE: const.PRESET_HOME,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -388,7 +388,7 @@ async def test_ssdp_flow_upnp_udn(
|
||||
|
||||
async def test_ssdp_missing_services(hass: HomeAssistant) -> None:
|
||||
"""Test SSDP ignores devices that are missing required services."""
|
||||
# No services defined at all
|
||||
# No service list at all
|
||||
discovery = dataclasses.replace(MOCK_DISCOVERY)
|
||||
discovery.upnp = discovery.upnp.copy()
|
||||
del discovery.upnp[ssdp.ATTR_UPNP_SERVICE_LIST]
|
||||
@@ -400,6 +400,18 @@ async def test_ssdp_missing_services(hass: HomeAssistant) -> None:
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "not_dmr"
|
||||
|
||||
# Service list does not contain services
|
||||
discovery = dataclasses.replace(MOCK_DISCOVERY)
|
||||
discovery.upnp = discovery.upnp.copy()
|
||||
discovery.upnp[ssdp.ATTR_UPNP_SERVICE_LIST] = {"bad_key": "bad_value"}
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DLNA_DOMAIN,
|
||||
context={"source": config_entries.SOURCE_SSDP},
|
||||
data=discovery,
|
||||
)
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "not_dmr"
|
||||
|
||||
# AVTransport service is missing
|
||||
discovery = dataclasses.replace(MOCK_DISCOVERY)
|
||||
discovery.upnp = discovery.upnp.copy()
|
||||
@@ -417,6 +429,28 @@ async def test_ssdp_missing_services(hass: HomeAssistant) -> None:
|
||||
assert result["reason"] == "not_dmr"
|
||||
|
||||
|
||||
async def test_ssdp_single_service(hass: HomeAssistant) -> None:
|
||||
"""Test SSDP discovery info with only one service defined.
|
||||
|
||||
THe etree_to_dict function turns multiple services into a list of dicts, but
|
||||
a single service into only a dict.
|
||||
"""
|
||||
discovery = dataclasses.replace(MOCK_DISCOVERY)
|
||||
discovery.upnp = discovery.upnp.copy()
|
||||
service_list = discovery.upnp[ssdp.ATTR_UPNP_SERVICE_LIST].copy()
|
||||
# Turn mock's list of service dicts into a single dict
|
||||
service_list["service"] = service_list["service"][0]
|
||||
discovery.upnp[ssdp.ATTR_UPNP_SERVICE_LIST] = service_list
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DLNA_DOMAIN,
|
||||
context={"source": config_entries.SOURCE_SSDP},
|
||||
data=discovery,
|
||||
)
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "not_dmr"
|
||||
|
||||
|
||||
async def test_ssdp_ignore_device(hass: HomeAssistant) -> None:
|
||||
"""Test SSDP discovery ignores certain devices."""
|
||||
discovery = dataclasses.replace(MOCK_DISCOVERY)
|
||||
|
||||
@@ -325,7 +325,7 @@ async def test_ssdp_flow_upnp_udn(
|
||||
|
||||
async def test_ssdp_missing_services(hass: HomeAssistant) -> None:
|
||||
"""Test SSDP ignores devices that are missing required services."""
|
||||
# No services defined at all
|
||||
# No service list at all
|
||||
discovery = dataclasses.replace(MOCK_DISCOVERY)
|
||||
discovery.upnp = dict(discovery.upnp)
|
||||
del discovery.upnp[ssdp.ATTR_UPNP_SERVICE_LIST]
|
||||
@@ -337,6 +337,18 @@ async def test_ssdp_missing_services(hass: HomeAssistant) -> None:
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "not_dms"
|
||||
|
||||
# Service list does not contain services
|
||||
discovery = dataclasses.replace(MOCK_DISCOVERY)
|
||||
discovery.upnp = dict(discovery.upnp)
|
||||
discovery.upnp[ssdp.ATTR_UPNP_SERVICE_LIST] = {"bad_key": "bad_value"}
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_SSDP},
|
||||
data=discovery,
|
||||
)
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "not_dms"
|
||||
|
||||
# ContentDirectory service is missing
|
||||
discovery = dataclasses.replace(MOCK_DISCOVERY)
|
||||
discovery.upnp = dict(discovery.upnp)
|
||||
@@ -352,3 +364,25 @@ async def test_ssdp_missing_services(hass: HomeAssistant) -> None:
|
||||
)
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "not_dms"
|
||||
|
||||
|
||||
async def test_ssdp_single_service(hass: HomeAssistant) -> None:
|
||||
"""Test SSDP discovery info with only one service defined.
|
||||
|
||||
THe etree_to_dict function turns multiple services into a list of dicts, but
|
||||
a single service into only a dict.
|
||||
"""
|
||||
discovery = dataclasses.replace(MOCK_DISCOVERY)
|
||||
discovery.upnp = dict(discovery.upnp)
|
||||
service_list = dict(discovery.upnp[ssdp.ATTR_UPNP_SERVICE_LIST])
|
||||
# Turn mock's list of service dicts into a single dict
|
||||
service_list["service"] = service_list["service"][0]
|
||||
discovery.upnp[ssdp.ATTR_UPNP_SERVICE_LIST] = service_list
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_SSDP},
|
||||
data=discovery,
|
||||
)
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "not_dms"
|
||||
|
||||
@@ -509,3 +509,30 @@ async def test_no_os_entity(hass):
|
||||
|
||||
# Verify that the entity does not exist
|
||||
assert not hass.states.get("update.home_assistant_operating_system_update")
|
||||
|
||||
|
||||
async def test_setting_up_core_update_when_addon_fails(hass, caplog):
|
||||
"""Test setting up core update when single addon fails."""
|
||||
with patch.dict(os.environ, MOCK_ENVIRON), patch(
|
||||
"homeassistant.components.hassio.HassIO.get_addon_stats",
|
||||
side_effect=HassioAPIError("add-on is not running"),
|
||||
), patch(
|
||||
"homeassistant.components.hassio.HassIO.get_addon_changelog",
|
||||
side_effect=HassioAPIError("add-on is not running"),
|
||||
), patch(
|
||||
"homeassistant.components.hassio.HassIO.get_addon_info",
|
||||
side_effect=HassioAPIError("add-on is not running"),
|
||||
):
|
||||
result = await async_setup_component(
|
||||
hass,
|
||||
"hassio",
|
||||
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
|
||||
)
|
||||
assert result
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify that the core update entity does exist
|
||||
state = hass.states.get("update.home_assistant_core_update")
|
||||
assert state
|
||||
assert state.state == "on"
|
||||
assert "Could not fetch stats for test: add-on is not running" in caplog.text
|
||||
|
||||
@@ -114,7 +114,7 @@ async def test_carbon_monoxide_sensor_read_state(hass, utcnow):
|
||||
state = await helper.poll_and_get_state()
|
||||
assert state.state == "on"
|
||||
|
||||
assert state.attributes["device_class"] == BinarySensorDeviceClass.GAS
|
||||
assert state.attributes["device_class"] == BinarySensorDeviceClass.CO
|
||||
|
||||
|
||||
def create_occupancy_sensor_service(accessory):
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Tests for homekit_controller config flow."""
|
||||
import asyncio
|
||||
from unittest import mock
|
||||
import unittest.mock
|
||||
from unittest.mock import AsyncMock, patch
|
||||
@@ -14,6 +15,7 @@ from homeassistant import config_entries
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.components.homekit_controller import config_flow
|
||||
from homeassistant.components.homekit_controller.const import KNOWN_DEVICES
|
||||
from homeassistant.data_entry_flow import RESULT_TYPE_ABORT, RESULT_TYPE_CREATE_ENTRY
|
||||
from homeassistant.helpers import device_registry
|
||||
|
||||
from tests.common import MockConfigEntry, mock_device_registry
|
||||
@@ -133,7 +135,7 @@ def get_flow_context(hass, result):
|
||||
|
||||
|
||||
def get_device_discovery_info(
|
||||
device, upper_case_props=False, missing_csharp=False
|
||||
device, upper_case_props=False, missing_csharp=False, paired=False
|
||||
) -> zeroconf.ZeroconfServiceInfo:
|
||||
"""Turn a aiohomekit format zeroconf entry into a homeassistant one."""
|
||||
result = zeroconf.ZeroconfServiceInfo(
|
||||
@@ -150,7 +152,7 @@ def get_device_discovery_info(
|
||||
"s#": device.description.state_num,
|
||||
"ff": "0",
|
||||
"ci": "0",
|
||||
"sf": "1",
|
||||
"sf": "0" if paired else "1",
|
||||
"sh": "",
|
||||
},
|
||||
type="_hap._tcp.local.",
|
||||
@@ -250,10 +252,8 @@ async def test_abort_duplicate_flow(hass, controller):
|
||||
async def test_pair_already_paired_1(hass, controller):
|
||||
"""Already paired."""
|
||||
device = setup_mock_accessory(controller)
|
||||
discovery_info = get_device_discovery_info(device)
|
||||
|
||||
# Flag device as already paired
|
||||
discovery_info.properties["sf"] = 0x0
|
||||
discovery_info = get_device_discovery_info(device, paired=True)
|
||||
|
||||
# Device is discovered
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
@@ -692,6 +692,7 @@ async def test_pair_form_errors_on_finish(hass, controller, exception, expected)
|
||||
"title_placeholders": {"name": "TestDevice"},
|
||||
"unique_id": "00:00:00:00:00:00",
|
||||
"source": config_entries.SOURCE_ZEROCONF,
|
||||
"pairing": True,
|
||||
}
|
||||
|
||||
|
||||
@@ -883,3 +884,69 @@ async def test_discovery_dismiss_existing_flow_on_paired(hass, controller):
|
||||
len(hass.config_entries.flow.async_progress_by_handler("homekit_controller"))
|
||||
== 0
|
||||
)
|
||||
|
||||
|
||||
async def test_mdns_update_to_paired_during_pairing(hass, controller):
|
||||
"""Test we do not abort pairing if mdns is updated to reflect paired during pairing."""
|
||||
device = setup_mock_accessory(controller)
|
||||
discovery_info = get_device_discovery_info(device)
|
||||
discovery_info_paired = get_device_discovery_info(device, paired=True)
|
||||
|
||||
# Device is discovered
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
"homekit_controller",
|
||||
context={"source": config_entries.SOURCE_ZEROCONF},
|
||||
data=discovery_info,
|
||||
)
|
||||
|
||||
assert get_flow_context(hass, result) == {
|
||||
"title_placeholders": {"name": "TestDevice"},
|
||||
"unique_id": "00:00:00:00:00:00",
|
||||
"source": config_entries.SOURCE_ZEROCONF,
|
||||
}
|
||||
|
||||
mdns_update_to_paired = asyncio.Event()
|
||||
|
||||
original_async_start_pairing = device.async_start_pairing
|
||||
|
||||
async def _async_start_pairing(*args, **kwargs):
|
||||
finish_pairing = await original_async_start_pairing(*args, **kwargs)
|
||||
|
||||
async def _finish_pairing(*args, **kwargs):
|
||||
# Insert an event wait to make sure
|
||||
# we trigger the mdns update in the middle of the pairing
|
||||
await mdns_update_to_paired.wait()
|
||||
return await finish_pairing(*args, **kwargs)
|
||||
|
||||
return _finish_pairing
|
||||
|
||||
with patch.object(device, "async_start_pairing", _async_start_pairing):
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
|
||||
assert result["type"] == "form"
|
||||
assert get_flow_context(hass, result) == {
|
||||
"title_placeholders": {"name": "TestDevice"},
|
||||
"unique_id": "00:00:00:00:00:00",
|
||||
"source": config_entries.SOURCE_ZEROCONF,
|
||||
}
|
||||
|
||||
# User enters pairing code
|
||||
task = asyncio.create_task(
|
||||
hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input={"pairing_code": "111-22-333"}
|
||||
)
|
||||
)
|
||||
# Make sure when the device is discovered as paired via mdns
|
||||
# it does not abort pairing if it happens before pairing is finished
|
||||
result2 = await hass.config_entries.flow.async_init(
|
||||
"homekit_controller",
|
||||
context={"source": config_entries.SOURCE_ZEROCONF},
|
||||
data=discovery_info_paired,
|
||||
)
|
||||
assert result2["type"] == RESULT_TYPE_ABORT
|
||||
assert result2["reason"] == "already_paired"
|
||||
mdns_update_to_paired.set()
|
||||
result = await task
|
||||
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
|
||||
assert result["title"] == "Koogeek-LS1-20833F"
|
||||
assert result["data"] == {}
|
||||
|
||||
@@ -36,6 +36,11 @@ async def test_process_play_media_url(hass, mock_sign_path):
|
||||
async_process_play_media_url(hass, "https://not-hass.com/path")
|
||||
== "https://not-hass.com/path"
|
||||
)
|
||||
# Not changing a url that is not http/https
|
||||
assert (
|
||||
async_process_play_media_url(hass, "file:///tmp/test.mp3")
|
||||
== "file:///tmp/test.mp3"
|
||||
)
|
||||
|
||||
# Testing signing hass URLs
|
||||
assert (
|
||||
|
||||
@@ -14,9 +14,11 @@ from homeassistant.components.media_player.const import (
|
||||
SERVICE_PLAY_MEDIA,
|
||||
SERVICE_SELECT_SOUND_MODE,
|
||||
SERVICE_SELECT_SOURCE,
|
||||
MediaPlayerEntityFeature,
|
||||
)
|
||||
from homeassistant.components.media_player.reproduce_state import async_reproduce_states
|
||||
from homeassistant.const import (
|
||||
ATTR_SUPPORTED_FEATURES,
|
||||
SERVICE_MEDIA_PAUSE,
|
||||
SERVICE_MEDIA_PLAY,
|
||||
SERVICE_MEDIA_STOP,
|
||||
@@ -39,31 +41,47 @@ ENTITY_2 = "media_player.test2"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"service,state",
|
||||
"service,state,supported_feature",
|
||||
[
|
||||
(SERVICE_TURN_ON, STATE_ON),
|
||||
(SERVICE_TURN_OFF, STATE_OFF),
|
||||
(SERVICE_MEDIA_PLAY, STATE_PLAYING),
|
||||
(SERVICE_MEDIA_STOP, STATE_IDLE),
|
||||
(SERVICE_MEDIA_PAUSE, STATE_PAUSED),
|
||||
(SERVICE_TURN_ON, STATE_ON, MediaPlayerEntityFeature.TURN_ON),
|
||||
(SERVICE_TURN_OFF, STATE_OFF, MediaPlayerEntityFeature.TURN_OFF),
|
||||
(SERVICE_MEDIA_PLAY, STATE_PLAYING, MediaPlayerEntityFeature.PLAY),
|
||||
(SERVICE_MEDIA_STOP, STATE_IDLE, MediaPlayerEntityFeature.STOP),
|
||||
(SERVICE_MEDIA_PAUSE, STATE_PAUSED, MediaPlayerEntityFeature.PAUSE),
|
||||
],
|
||||
)
|
||||
async def test_state(hass, service, state):
|
||||
async def test_state(hass, service, state, supported_feature):
|
||||
"""Test that we can turn a state into a service call."""
|
||||
calls_1 = async_mock_service(hass, DOMAIN, service)
|
||||
if service != SERVICE_TURN_ON:
|
||||
async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
|
||||
|
||||
# Don't support the feature won't call the service
|
||||
hass.states.async_set(ENTITY_1, "something", {ATTR_SUPPORTED_FEATURES: 0})
|
||||
await async_reproduce_states(hass, [State(ENTITY_1, state)])
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert len(calls_1) == 0
|
||||
|
||||
hass.states.async_set(
|
||||
ENTITY_1, "something", {ATTR_SUPPORTED_FEATURES: supported_feature}
|
||||
)
|
||||
await async_reproduce_states(hass, [State(ENTITY_1, state)])
|
||||
assert len(calls_1) == 1
|
||||
assert calls_1[0].data == {"entity_id": ENTITY_1}
|
||||
|
||||
|
||||
async def test_turn_on_with_mode(hass):
|
||||
"""Test that state with additional attributes call multiple services."""
|
||||
hass.states.async_set(
|
||||
ENTITY_1,
|
||||
"something",
|
||||
{
|
||||
ATTR_SUPPORTED_FEATURES: MediaPlayerEntityFeature.TURN_ON
|
||||
| MediaPlayerEntityFeature.SELECT_SOUND_MODE
|
||||
},
|
||||
)
|
||||
|
||||
calls_1 = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
|
||||
calls_2 = async_mock_service(hass, DOMAIN, SERVICE_SELECT_SOUND_MODE)
|
||||
|
||||
@@ -82,6 +100,13 @@ async def test_turn_on_with_mode(hass):
|
||||
|
||||
async def test_multiple_same_state(hass):
|
||||
"""Test that multiple states with same state gets calls."""
|
||||
for entity in ENTITY_1, ENTITY_2:
|
||||
hass.states.async_set(
|
||||
entity,
|
||||
"something",
|
||||
{ATTR_SUPPORTED_FEATURES: MediaPlayerEntityFeature.TURN_ON},
|
||||
)
|
||||
|
||||
calls_1 = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
|
||||
|
||||
await async_reproduce_states(hass, [State(ENTITY_1, "on"), State(ENTITY_2, "on")])
|
||||
@@ -96,6 +121,16 @@ async def test_multiple_same_state(hass):
|
||||
|
||||
async def test_multiple_different_state(hass):
|
||||
"""Test that multiple states with different state gets calls."""
|
||||
for entity in ENTITY_1, ENTITY_2:
|
||||
hass.states.async_set(
|
||||
entity,
|
||||
"something",
|
||||
{
|
||||
ATTR_SUPPORTED_FEATURES: MediaPlayerEntityFeature.TURN_ON
|
||||
| MediaPlayerEntityFeature.TURN_OFF
|
||||
},
|
||||
)
|
||||
|
||||
calls_1 = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
|
||||
calls_2 = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF)
|
||||
|
||||
@@ -111,6 +146,12 @@ async def test_multiple_different_state(hass):
|
||||
|
||||
async def test_state_with_context(hass):
|
||||
"""Test that context is forwarded."""
|
||||
hass.states.async_set(
|
||||
ENTITY_1,
|
||||
"something",
|
||||
{ATTR_SUPPORTED_FEATURES: MediaPlayerEntityFeature.TURN_ON},
|
||||
)
|
||||
|
||||
calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
|
||||
|
||||
context = Context()
|
||||
@@ -126,6 +167,16 @@ async def test_state_with_context(hass):
|
||||
|
||||
async def test_attribute_no_state(hass):
|
||||
"""Test that no state service call is made with none state."""
|
||||
hass.states.async_set(
|
||||
ENTITY_1,
|
||||
"something",
|
||||
{
|
||||
ATTR_SUPPORTED_FEATURES: MediaPlayerEntityFeature.TURN_ON
|
||||
| MediaPlayerEntityFeature.TURN_OFF
|
||||
| MediaPlayerEntityFeature.SELECT_SOUND_MODE
|
||||
},
|
||||
)
|
||||
|
||||
calls_1 = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
|
||||
calls_2 = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF)
|
||||
calls_3 = async_mock_service(hass, DOMAIN, SERVICE_SELECT_SOUND_MODE)
|
||||
@@ -145,16 +196,38 @@ async def test_attribute_no_state(hass):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"service,attribute",
|
||||
"service,attribute,supported_feature",
|
||||
[
|
||||
(SERVICE_VOLUME_SET, ATTR_MEDIA_VOLUME_LEVEL),
|
||||
(SERVICE_VOLUME_MUTE, ATTR_MEDIA_VOLUME_MUTED),
|
||||
(SERVICE_SELECT_SOURCE, ATTR_INPUT_SOURCE),
|
||||
(SERVICE_SELECT_SOUND_MODE, ATTR_SOUND_MODE),
|
||||
(
|
||||
SERVICE_VOLUME_SET,
|
||||
ATTR_MEDIA_VOLUME_LEVEL,
|
||||
MediaPlayerEntityFeature.VOLUME_SET,
|
||||
),
|
||||
(
|
||||
SERVICE_VOLUME_MUTE,
|
||||
ATTR_MEDIA_VOLUME_MUTED,
|
||||
MediaPlayerEntityFeature.VOLUME_MUTE,
|
||||
),
|
||||
(
|
||||
SERVICE_SELECT_SOURCE,
|
||||
ATTR_INPUT_SOURCE,
|
||||
MediaPlayerEntityFeature.SELECT_SOURCE,
|
||||
),
|
||||
(
|
||||
SERVICE_SELECT_SOUND_MODE,
|
||||
ATTR_SOUND_MODE,
|
||||
MediaPlayerEntityFeature.SELECT_SOUND_MODE,
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_attribute(hass, service, attribute):
|
||||
async def test_attribute(hass, service, attribute, supported_feature):
|
||||
"""Test that service call is made for each attribute."""
|
||||
hass.states.async_set(
|
||||
ENTITY_1,
|
||||
"something",
|
||||
{ATTR_SUPPORTED_FEATURES: supported_feature},
|
||||
)
|
||||
|
||||
calls_1 = async_mock_service(hass, DOMAIN, service)
|
||||
|
||||
value = "dummy"
|
||||
@@ -168,7 +241,12 @@ async def test_attribute(hass, service, attribute):
|
||||
|
||||
|
||||
async def test_play_media(hass):
|
||||
"""Test that no state service call is made with none state."""
|
||||
"""Test playing media."""
|
||||
hass.states.async_set(
|
||||
ENTITY_1,
|
||||
"something",
|
||||
{ATTR_SUPPORTED_FEATURES: MediaPlayerEntityFeature.PLAY_MEDIA},
|
||||
)
|
||||
calls_1 = async_mock_service(hass, DOMAIN, SERVICE_PLAY_MEDIA)
|
||||
|
||||
value_1 = "dummy_1"
|
||||
|
||||
@@ -94,10 +94,11 @@ async def test_object_growth_logging(hass, caplog):
|
||||
assert hass.services.has_service(DOMAIN, SERVICE_START_LOG_OBJECTS)
|
||||
assert hass.services.has_service(DOMAIN, SERVICE_STOP_LOG_OBJECTS)
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN, SERVICE_START_LOG_OBJECTS, {CONF_SCAN_INTERVAL: 10}
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
with patch("homeassistant.components.profiler.objgraph.growth"):
|
||||
await hass.services.async_call(
|
||||
DOMAIN, SERVICE_START_LOG_OBJECTS, {CONF_SCAN_INTERVAL: 10}
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert "Growth" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
@@ -1451,6 +1451,31 @@ async def test_update_missing_mac_unique_id_ssdp_location_added_from_ssdp(
|
||||
assert entry.unique_id == "be9554b9-c9fb-41f4-8920-22da015376a4"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures(
|
||||
"remote", "remotews", "remoteencws_failing", "rest_api_failing"
|
||||
)
|
||||
async def test_update_zeroconf_discovery_preserved_unique_id(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Test zeroconf discovery preserves unique id."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={**MOCK_OLD_ENTRY, CONF_MAC: "aa:bb:zz:ee:rr:oo"},
|
||||
unique_id="original",
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_ZEROCONF},
|
||||
data=MOCK_ZEROCONF_DATA,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert result["type"] == "abort"
|
||||
assert result["reason"] == "not_supported"
|
||||
assert entry.data[CONF_MAC] == "aa:bb:zz:ee:rr:oo"
|
||||
assert entry.unique_id == "original"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("remotews", "rest_api", "remoteencws_failing")
|
||||
async def test_update_missing_mac_unique_id_added_ssdp_location_updated_from_ssdp(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -325,20 +325,20 @@ def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes
|
||||
|
||||
@pytest.mark.parametrize("state_class", ["total"])
|
||||
@pytest.mark.parametrize(
|
||||
"units,device_class,unit,display_unit,factor,factor2",
|
||||
"units,device_class,unit,display_unit,factor",
|
||||
[
|
||||
(IMPERIAL_SYSTEM, "energy", "kWh", "kWh", 1, 1),
|
||||
(IMPERIAL_SYSTEM, "energy", "Wh", "kWh", 1 / 1000, 1),
|
||||
(IMPERIAL_SYSTEM, "monetary", "EUR", "EUR", 1, 1),
|
||||
(IMPERIAL_SYSTEM, "monetary", "SEK", "SEK", 1, 1),
|
||||
(IMPERIAL_SYSTEM, "gas", "m³", "ft³", 35.314666711, 35.314666711),
|
||||
(IMPERIAL_SYSTEM, "gas", "ft³", "ft³", 1, 35.314666711),
|
||||
(METRIC_SYSTEM, "energy", "kWh", "kWh", 1, 1),
|
||||
(METRIC_SYSTEM, "energy", "Wh", "kWh", 1 / 1000, 1),
|
||||
(METRIC_SYSTEM, "monetary", "EUR", "EUR", 1, 1),
|
||||
(METRIC_SYSTEM, "monetary", "SEK", "SEK", 1, 1),
|
||||
(METRIC_SYSTEM, "gas", "m³", "m³", 1, 1),
|
||||
(METRIC_SYSTEM, "gas", "ft³", "m³", 0.0283168466, 1),
|
||||
(IMPERIAL_SYSTEM, "energy", "kWh", "kWh", 1),
|
||||
(IMPERIAL_SYSTEM, "energy", "Wh", "kWh", 1 / 1000),
|
||||
(IMPERIAL_SYSTEM, "monetary", "EUR", "EUR", 1),
|
||||
(IMPERIAL_SYSTEM, "monetary", "SEK", "SEK", 1),
|
||||
(IMPERIAL_SYSTEM, "gas", "m³", "ft³", 35.314666711),
|
||||
(IMPERIAL_SYSTEM, "gas", "ft³", "ft³", 1),
|
||||
(METRIC_SYSTEM, "energy", "kWh", "kWh", 1),
|
||||
(METRIC_SYSTEM, "energy", "Wh", "kWh", 1 / 1000),
|
||||
(METRIC_SYSTEM, "monetary", "EUR", "EUR", 1),
|
||||
(METRIC_SYSTEM, "monetary", "SEK", "SEK", 1),
|
||||
(METRIC_SYSTEM, "gas", "m³", "m³", 1),
|
||||
(METRIC_SYSTEM, "gas", "ft³", "m³", 0.0283168466),
|
||||
],
|
||||
)
|
||||
async def test_compile_hourly_sum_statistics_amount(
|
||||
@@ -351,7 +351,6 @@ async def test_compile_hourly_sum_statistics_amount(
|
||||
unit,
|
||||
display_unit,
|
||||
factor,
|
||||
factor2,
|
||||
):
|
||||
"""Test compiling hourly statistics."""
|
||||
period0 = dt_util.utcnow()
|
||||
@@ -480,8 +479,8 @@ async def test_compile_hourly_sum_statistics_amount(
|
||||
assert response["success"]
|
||||
await async_wait_recording_done_without_instance(hass)
|
||||
|
||||
expected_stats["sensor.test1"][1]["sum"] = approx(factor * 40.0 + factor2 * 100)
|
||||
expected_stats["sensor.test1"][2]["sum"] = approx(factor * 70.0 + factor2 * 100)
|
||||
expected_stats["sensor.test1"][1]["sum"] = approx(factor * 40.0 + 100)
|
||||
expected_stats["sensor.test1"][2]["sum"] = approx(factor * 70.0 + 100)
|
||||
stats = statistics_during_period(hass, period0, period="5minute")
|
||||
assert stats == expected_stats
|
||||
|
||||
@@ -499,8 +498,8 @@ async def test_compile_hourly_sum_statistics_amount(
|
||||
assert response["success"]
|
||||
await async_wait_recording_done_without_instance(hass)
|
||||
|
||||
expected_stats["sensor.test1"][1]["sum"] = approx(factor * 40.0 + factor2 * 100)
|
||||
expected_stats["sensor.test1"][2]["sum"] = approx(factor * 70.0 - factor2 * 300)
|
||||
expected_stats["sensor.test1"][1]["sum"] = approx(factor * 40.0 + 100)
|
||||
expected_stats["sensor.test1"][2]["sum"] = approx(factor * 70.0 - 300)
|
||||
stats = statistics_during_period(hass, period0, period="5minute")
|
||||
assert stats == expected_stats
|
||||
|
||||
@@ -2464,6 +2463,16 @@ def test_compile_statistics_hourly_daily_monthly_summary(
|
||||
},
|
||||
]
|
||||
|
||||
# Adjust the inserted statistics
|
||||
sum_adjustment = -10
|
||||
sum_adjustement_start = zero + timedelta(minutes=65)
|
||||
for i in range(13, 24):
|
||||
expected_sums["sensor.test4"][i] += sum_adjustment
|
||||
recorder.async_adjust_statistics(
|
||||
"sensor.test4", sum_adjustement_start, sum_adjustment
|
||||
)
|
||||
wait_recording_done(hass)
|
||||
|
||||
stats = statistics_during_period(hass, zero, period="5minute")
|
||||
expected_stats = {
|
||||
"sensor.test1": [],
|
||||
|
||||
@@ -146,8 +146,8 @@ async def test_v4_sensor(hass: HomeAssistant) -> None:
|
||||
check_sensor_state(hass, CO, "0.0")
|
||||
check_sensor_state(hass, NO2, "20.08")
|
||||
check_sensor_state(hass, SO2, "4.32")
|
||||
check_sensor_state(hass, PM25, "5.3")
|
||||
check_sensor_state(hass, PM10, "20.13")
|
||||
check_sensor_state(hass, PM25, "0.15")
|
||||
check_sensor_state(hass, PM10, "0.57")
|
||||
check_sensor_state(hass, MEP_AQI, "23")
|
||||
check_sensor_state(hass, MEP_HEALTH_CONCERN, "good")
|
||||
check_sensor_state(hass, MEP_PRIMARY_POLLUTANT, "pm10")
|
||||
@@ -158,14 +158,14 @@ async def test_v4_sensor(hass: HomeAssistant) -> None:
|
||||
check_sensor_state(hass, GRASS_POLLEN, "none")
|
||||
check_sensor_state(hass, WEED_POLLEN, "none")
|
||||
check_sensor_state(hass, TREE_POLLEN, "none")
|
||||
check_sensor_state(hass, FEELS_LIKE, "38.5")
|
||||
check_sensor_state(hass, DEW_POINT, "22.68")
|
||||
check_sensor_state(hass, PRESSURE_SURFACE_LEVEL, "997.97")
|
||||
check_sensor_state(hass, GHI, "0.0")
|
||||
check_sensor_state(hass, CLOUD_BASE, "1.19")
|
||||
check_sensor_state(hass, FEELS_LIKE, "101.3")
|
||||
check_sensor_state(hass, DEW_POINT, "72.82")
|
||||
check_sensor_state(hass, PRESSURE_SURFACE_LEVEL, "29.47")
|
||||
check_sensor_state(hass, GHI, "0")
|
||||
check_sensor_state(hass, CLOUD_BASE, "0.74")
|
||||
check_sensor_state(hass, CLOUD_COVER, "100")
|
||||
check_sensor_state(hass, CLOUD_CEILING, "1.19")
|
||||
check_sensor_state(hass, WIND_GUST, "5.65")
|
||||
check_sensor_state(hass, CLOUD_CEILING, "0.74")
|
||||
check_sensor_state(hass, WIND_GUST, "12.64")
|
||||
check_sensor_state(hass, PRECIPITATION_TYPE, "rain")
|
||||
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ from zigpy.const import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE
|
||||
import zigpy.device
|
||||
import zigpy.group
|
||||
import zigpy.profiles
|
||||
from zigpy.state import State
|
||||
import zigpy.types
|
||||
import zigpy.zdo.types as zdo_t
|
||||
|
||||
@@ -54,6 +55,7 @@ def zigpy_app_controller():
|
||||
app.ieee.return_value = zigpy.types.EUI64.convert("00:15:8d:00:02:32:4f:32")
|
||||
type(app).nwk = PropertyMock(return_value=zigpy.types.NWK(0x0000))
|
||||
type(app).devices = PropertyMock(return_value={})
|
||||
type(app).state = PropertyMock(return_value=State())
|
||||
return app
|
||||
|
||||
|
||||
|
||||
86
tests/components/zha/test_diagnostics.py
Normal file
86
tests/components/zha/test_diagnostics.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""Tests for the diagnostics data provided by the ESPHome integration."""
|
||||
|
||||
|
||||
import pytest
|
||||
import zigpy.profiles.zha as zha
|
||||
import zigpy.zcl.clusters.security as security
|
||||
|
||||
from homeassistant.components.diagnostics.const import REDACTED
|
||||
from homeassistant.components.zha.core.device import ZHADevice
|
||||
from homeassistant.components.zha.diagnostics import KEYS_TO_REDACT
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import async_get
|
||||
|
||||
from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE
|
||||
|
||||
from tests.components.diagnostics import (
|
||||
get_diagnostics_for_config_entry,
|
||||
get_diagnostics_for_device,
|
||||
)
|
||||
|
||||
CONFIG_ENTRY_DIAGNOSTICS_KEYS = [
|
||||
"config",
|
||||
"config_entry",
|
||||
"application_state",
|
||||
"versions",
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def zigpy_device(zigpy_device_mock):
|
||||
"""Device tracker zigpy device."""
|
||||
endpoints = {
|
||||
1: {
|
||||
SIG_EP_INPUT: [security.IasAce.cluster_id],
|
||||
SIG_EP_OUTPUT: [],
|
||||
SIG_EP_TYPE: zha.DeviceType.IAS_ANCILLARY_CONTROL,
|
||||
SIG_EP_PROFILE: zha.PROFILE_ID,
|
||||
}
|
||||
}
|
||||
return zigpy_device_mock(
|
||||
endpoints, node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00"
|
||||
)
|
||||
|
||||
|
||||
async def test_diagnostics_for_config_entry(
|
||||
hass: HomeAssistant,
|
||||
hass_client,
|
||||
config_entry,
|
||||
zha_device_joined,
|
||||
zigpy_device,
|
||||
):
|
||||
"""Test diagnostics for config entry."""
|
||||
await zha_device_joined(zigpy_device)
|
||||
diagnostics_data = await get_diagnostics_for_config_entry(
|
||||
hass, hass_client, config_entry
|
||||
)
|
||||
assert diagnostics_data
|
||||
for key in CONFIG_ENTRY_DIAGNOSTICS_KEYS:
|
||||
assert key in diagnostics_data
|
||||
assert diagnostics_data[key] is not None
|
||||
|
||||
|
||||
async def test_diagnostics_for_device(
|
||||
hass: HomeAssistant,
|
||||
hass_client,
|
||||
config_entry,
|
||||
zha_device_joined,
|
||||
zigpy_device,
|
||||
):
|
||||
"""Test diagnostics for device."""
|
||||
|
||||
zha_device: ZHADevice = await zha_device_joined(zigpy_device)
|
||||
dev_reg = async_get(hass)
|
||||
device = dev_reg.async_get_device({("zha", str(zha_device.ieee))})
|
||||
assert device
|
||||
diagnostics_data = await get_diagnostics_for_device(
|
||||
hass, hass_client, config_entry, device
|
||||
)
|
||||
assert diagnostics_data
|
||||
device_info: dict = zha_device.zha_device_info
|
||||
for key, value in device_info.items():
|
||||
assert key in diagnostics_data
|
||||
if key not in KEYS_TO_REDACT:
|
||||
assert key in diagnostics_data
|
||||
else:
|
||||
assert diagnostics_data[key] == REDACTED
|
||||
Reference in New Issue
Block a user