forked from home-assistant/core
Compare commits
2 Commits
core_trigg
...
media-sele
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2e2853058c | ||
|
|
48fd22c3d2 |
@@ -89,7 +89,6 @@ from .helpers import (
|
||||
restore_state,
|
||||
template,
|
||||
translation,
|
||||
trigger,
|
||||
)
|
||||
from .helpers.dispatcher import async_dispatcher_send_internal
|
||||
from .helpers.storage import get_internal_store_manager
|
||||
@@ -453,7 +452,6 @@ async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
||||
create_eager_task(restore_state.async_load(hass)),
|
||||
create_eager_task(hass.config_entries.async_initialize()),
|
||||
create_eager_task(async_get_system_info(hass)),
|
||||
create_eager_task(trigger.async_setup(hass)),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -105,6 +105,11 @@ DEFAULT_MAX_HUMIDITY = 99
|
||||
|
||||
CONVERTIBLE_ATTRIBUTE = [ATTR_TEMPERATURE, ATTR_TARGET_TEMP_LOW, ATTR_TARGET_TEMP_HIGH]
|
||||
|
||||
# Can be removed in 2025.1 after deprecation period of the new feature flags
|
||||
CHECK_TURN_ON_OFF_FEATURE_FLAG = (
|
||||
ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF
|
||||
)
|
||||
|
||||
SET_TEMPERATURE_SCHEMA = vol.All(
|
||||
cv.has_at_least_one_key(
|
||||
ATTR_TEMPERATURE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW
|
||||
|
||||
@@ -300,6 +300,10 @@ class CoverEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def supported_features(self) -> CoverEntityFeature:
|
||||
"""Flag supported features."""
|
||||
if (features := self._attr_supported_features) is not None:
|
||||
if type(features) is int:
|
||||
new_features = CoverEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
supported_features = (
|
||||
|
||||
@@ -87,7 +87,6 @@ class DevoloScannerEntity( # pylint: disable=hass-enforce-class-module
|
||||
):
|
||||
"""Representation of a devolo device tracker."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "device_tracker"
|
||||
|
||||
def __init__(
|
||||
@@ -100,7 +99,6 @@ class DevoloScannerEntity( # pylint: disable=hass-enforce-class-module
|
||||
super().__init__(coordinator)
|
||||
self._device = device
|
||||
self._attr_mac_address = mac
|
||||
self._attr_name = mac
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, str]:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["discord"],
|
||||
"requirements": ["nextcord==3.1.0"]
|
||||
"requirements": ["nextcord==2.6.0"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["googleapiclient"],
|
||||
"requirements": ["gcal-sync==7.1.0", "oauth2client==4.1.3", "ical==10.0.4"]
|
||||
"requirements": ["gcal-sync==7.1.0", "oauth2client==4.1.3", "ical==10.0.0"]
|
||||
}
|
||||
|
||||
@@ -133,8 +133,8 @@ class HERERoutingDataUpdateCoordinator(DataUpdateCoordinator[HERETravelTimeData]
|
||||
def _parse_routing_response(self, response: dict[str, Any]) -> HERETravelTimeData:
|
||||
"""Parse the routing response dict to a HERETravelTimeData."""
|
||||
distance: float = 0.0
|
||||
duration: int = 0
|
||||
duration_in_traffic: int = 0
|
||||
duration: float = 0.0
|
||||
duration_in_traffic: float = 0.0
|
||||
|
||||
for section in response["routes"][0]["sections"]:
|
||||
distance += DistanceConverter.convert(
|
||||
@@ -167,8 +167,8 @@ class HERERoutingDataUpdateCoordinator(DataUpdateCoordinator[HERETravelTimeData]
|
||||
destination_name = names[0]["value"]
|
||||
return HERETravelTimeData(
|
||||
attribution=None,
|
||||
duration=duration,
|
||||
duration_in_traffic=duration_in_traffic,
|
||||
duration=round(duration / 60),
|
||||
duration_in_traffic=round(duration_in_traffic / 60),
|
||||
distance=distance,
|
||||
origin=f"{mapped_origin_lat},{mapped_origin_lon}",
|
||||
destination=f"{mapped_destination_lat},{mapped_destination_lon}",
|
||||
@@ -271,13 +271,13 @@ class HERETransitDataUpdateCoordinator(
|
||||
UnitOfLength.METERS,
|
||||
UnitOfLength.KILOMETERS,
|
||||
)
|
||||
duration: int = sum(
|
||||
duration: float = sum(
|
||||
section["travelSummary"]["duration"] for section in sections
|
||||
)
|
||||
return HERETravelTimeData(
|
||||
attribution=attribution,
|
||||
duration=duration,
|
||||
duration_in_traffic=duration,
|
||||
duration=round(duration / 60),
|
||||
duration_in_traffic=round(duration / 60),
|
||||
distance=distance,
|
||||
origin=f"{mapped_origin_lat},{mapped_origin_lon}",
|
||||
destination=f"{mapped_destination_lat},{mapped_destination_lon}",
|
||||
|
||||
@@ -56,8 +56,7 @@ def sensor_descriptions(travel_mode: str) -> tuple[SensorEntityDescription, ...]
|
||||
key=ATTR_DURATION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
suggested_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
translation_key="duration_in_traffic",
|
||||
@@ -65,8 +64,7 @@ def sensor_descriptions(travel_mode: str) -> tuple[SensorEntityDescription, ...]
|
||||
key=ATTR_DURATION_IN_TRAFFIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
suggested_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
translation_key="distance",
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/home_connect",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiohomeconnect==0.18.0"],
|
||||
"requirements": ["aiohomeconnect==0.17.1"],
|
||||
"zeroconf": ["_homeconnect._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup: done
|
||||
appropriate-polling:
|
||||
status: done
|
||||
comment: |
|
||||
Full polling is performed at the configuration entry setup and
|
||||
device polling is performed when a CONNECTED or a PAIRED event is received.
|
||||
If many CONNECTED or PAIRED events are received for a device within a short time span,
|
||||
the integration will stop polling for that device and will create a repair issue.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices: done
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default:
|
||||
status: done
|
||||
comment: |
|
||||
Event entities are disabled by default to prevent user confusion regarding
|
||||
which events are supported by its appliance.
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration doesn't have settings in its configuration flow.
|
||||
repair-issues: done
|
||||
stale-devices: done
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioautomower"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioautomower==2025.6.0"]
|
||||
"requirements": ["aioautomower==2025.5.1"]
|
||||
}
|
||||
|
||||
@@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==2.0.9"]
|
||||
"requirements": ["pylamarzocco==2.0.8"]
|
||||
}
|
||||
|
||||
@@ -58,10 +58,6 @@ ENTITIES: tuple[LaMarzoccoNumberEntityDescription, ...] = (
|
||||
CoffeeBoiler, machine.dashboard.config[WidgetType.CM_COFFEE_BOILER]
|
||||
).target_temperature
|
||||
),
|
||||
available_fn=(
|
||||
lambda coordinator: WidgetType.CM_COFFEE_BOILER
|
||||
in coordinator.device.dashboard.config
|
||||
),
|
||||
),
|
||||
LaMarzoccoNumberEntityDescription(
|
||||
key="smart_standby_time",
|
||||
@@ -225,7 +221,7 @@ class LaMarzoccoNumberEntity(LaMarzoccoEntity, NumberEntity):
|
||||
entity_description: LaMarzoccoNumberEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | int:
|
||||
def native_value(self) -> float:
|
||||
"""Return the current value."""
|
||||
return self.entity_description.native_value_fn(self.coordinator.device)
|
||||
|
||||
|
||||
@@ -57,10 +57,6 @@ ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = (
|
||||
).ready_start_time
|
||||
),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
available_fn=(
|
||||
lambda coordinator: WidgetType.CM_COFFEE_BOILER
|
||||
in coordinator.device.dashboard.config
|
||||
),
|
||||
),
|
||||
LaMarzoccoSensorEntityDescription(
|
||||
key="steam_boiler_ready_time",
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_calendar",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["ical"],
|
||||
"requirements": ["ical==10.0.4"]
|
||||
"requirements": ["ical==10.0.0"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_todo",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["ical==10.0.4"]
|
||||
"requirements": ["ical==10.0.0"]
|
||||
}
|
||||
|
||||
@@ -814,6 +814,19 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Flag media player features that are supported."""
|
||||
return self._attr_supported_features
|
||||
|
||||
@property
|
||||
def supported_features_compat(self) -> MediaPlayerEntityFeature:
|
||||
"""Return the supported features as MediaPlayerEntityFeature.
|
||||
|
||||
Remove this compatibility shim in 2025.1 or later.
|
||||
"""
|
||||
features = self.supported_features
|
||||
if type(features) is int:
|
||||
new_features = MediaPlayerEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
def turn_on(self) -> None:
|
||||
"""Turn the media player on."""
|
||||
raise NotImplementedError
|
||||
@@ -953,85 +966,87 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
@property
|
||||
def support_play(self) -> bool:
|
||||
"""Boolean if play is supported."""
|
||||
return MediaPlayerEntityFeature.PLAY in self.supported_features
|
||||
return MediaPlayerEntityFeature.PLAY in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_pause(self) -> bool:
|
||||
"""Boolean if pause is supported."""
|
||||
return MediaPlayerEntityFeature.PAUSE in self.supported_features
|
||||
return MediaPlayerEntityFeature.PAUSE in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_stop(self) -> bool:
|
||||
"""Boolean if stop is supported."""
|
||||
return MediaPlayerEntityFeature.STOP in self.supported_features
|
||||
return MediaPlayerEntityFeature.STOP in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_seek(self) -> bool:
|
||||
"""Boolean if seek is supported."""
|
||||
return MediaPlayerEntityFeature.SEEK in self.supported_features
|
||||
return MediaPlayerEntityFeature.SEEK in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_volume_set(self) -> bool:
|
||||
"""Boolean if setting volume is supported."""
|
||||
return MediaPlayerEntityFeature.VOLUME_SET in self.supported_features
|
||||
return MediaPlayerEntityFeature.VOLUME_SET in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_volume_mute(self) -> bool:
|
||||
"""Boolean if muting volume is supported."""
|
||||
return MediaPlayerEntityFeature.VOLUME_MUTE in self.supported_features
|
||||
return MediaPlayerEntityFeature.VOLUME_MUTE in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_previous_track(self) -> bool:
|
||||
"""Boolean if previous track command supported."""
|
||||
return MediaPlayerEntityFeature.PREVIOUS_TRACK in self.supported_features
|
||||
return MediaPlayerEntityFeature.PREVIOUS_TRACK in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_next_track(self) -> bool:
|
||||
"""Boolean if next track command supported."""
|
||||
return MediaPlayerEntityFeature.NEXT_TRACK in self.supported_features
|
||||
return MediaPlayerEntityFeature.NEXT_TRACK in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_play_media(self) -> bool:
|
||||
"""Boolean if play media command supported."""
|
||||
return MediaPlayerEntityFeature.PLAY_MEDIA in self.supported_features
|
||||
return MediaPlayerEntityFeature.PLAY_MEDIA in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_select_source(self) -> bool:
|
||||
"""Boolean if select source command supported."""
|
||||
return MediaPlayerEntityFeature.SELECT_SOURCE in self.supported_features
|
||||
return MediaPlayerEntityFeature.SELECT_SOURCE in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_select_sound_mode(self) -> bool:
|
||||
"""Boolean if select sound mode command supported."""
|
||||
return MediaPlayerEntityFeature.SELECT_SOUND_MODE in self.supported_features
|
||||
return (
|
||||
MediaPlayerEntityFeature.SELECT_SOUND_MODE in self.supported_features_compat
|
||||
)
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_clear_playlist(self) -> bool:
|
||||
"""Boolean if clear playlist command supported."""
|
||||
return MediaPlayerEntityFeature.CLEAR_PLAYLIST in self.supported_features
|
||||
return MediaPlayerEntityFeature.CLEAR_PLAYLIST in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_shuffle_set(self) -> bool:
|
||||
"""Boolean if shuffle is supported."""
|
||||
return MediaPlayerEntityFeature.SHUFFLE_SET in self.supported_features
|
||||
return MediaPlayerEntityFeature.SHUFFLE_SET in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_grouping(self) -> bool:
|
||||
"""Boolean if player grouping is supported."""
|
||||
return MediaPlayerEntityFeature.GROUPING in self.supported_features
|
||||
return MediaPlayerEntityFeature.GROUPING in self.supported_features_compat
|
||||
|
||||
async def async_toggle(self) -> None:
|
||||
"""Toggle the power on the media player."""
|
||||
@@ -1059,7 +1074,7 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
if (
|
||||
self.volume_level is not None
|
||||
and self.volume_level < 1
|
||||
and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features
|
||||
and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features_compat
|
||||
):
|
||||
await self.async_set_volume_level(
|
||||
min(1, self.volume_level + self.volume_step)
|
||||
@@ -1077,7 +1092,7 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
if (
|
||||
self.volume_level is not None
|
||||
and self.volume_level > 0
|
||||
and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features
|
||||
and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features_compat
|
||||
):
|
||||
await self.async_set_volume_level(
|
||||
max(0, self.volume_level - self.volume_step)
|
||||
@@ -1120,7 +1135,7 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def capability_attributes(self) -> dict[str, Any]:
|
||||
"""Return capability attributes."""
|
||||
data: dict[str, Any] = {}
|
||||
supported_features = self.supported_features
|
||||
supported_features = self.supported_features_compat
|
||||
|
||||
if (
|
||||
source_list := self.source_list
|
||||
@@ -1349,7 +1364,7 @@ async def websocket_browse_media(
|
||||
connection.send_error(msg["id"], "entity_not_found", "Entity not found")
|
||||
return
|
||||
|
||||
if MediaPlayerEntityFeature.BROWSE_MEDIA not in player.supported_features:
|
||||
if MediaPlayerEntityFeature.BROWSE_MEDIA not in player.supported_features_compat:
|
||||
connection.send_message(
|
||||
websocket_api.error_message(
|
||||
msg["id"], ERR_NOT_SUPPORTED, "Player does not support browsing media"
|
||||
@@ -1432,7 +1447,7 @@ async def websocket_search_media(
|
||||
connection.send_error(msg["id"], "entity_not_found", "Entity not found")
|
||||
return
|
||||
|
||||
if MediaPlayerEntityFeature.SEARCH_MEDIA not in player.supported_features:
|
||||
if MediaPlayerEntityFeature.SEARCH_MEDIA not in player.supported_features_compat:
|
||||
connection.send_message(
|
||||
websocket_api.error_message(
|
||||
msg["id"], ERR_NOT_SUPPORTED, "Player does not support searching media"
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import dns.asyncresolver
|
||||
import dns.rdata
|
||||
import dns.rdataclass
|
||||
import dns.rdatatype
|
||||
@@ -23,23 +22,20 @@ PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def prevent_dnspython_blocking_operations() -> None:
|
||||
"""Prevent dnspython blocking operations by pre-loading required data."""
|
||||
|
||||
# Blocking import: https://github.com/rthalley/dnspython/issues/1083
|
||||
def load_dnspython_rdata_classes() -> None:
|
||||
"""Load dnspython rdata classes used by mcstatus."""
|
||||
for rdtype in dns.rdatatype.RdataType:
|
||||
if not dns.rdatatype.is_metatype(rdtype) or rdtype == dns.rdatatype.OPT:
|
||||
dns.rdata.get_rdata_class(dns.rdataclass.IN, rdtype) # type: ignore[no-untyped-call]
|
||||
|
||||
# Blocking open: https://github.com/rthalley/dnspython/issues/1200
|
||||
dns.asyncresolver.get_default_resolver()
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: MinecraftServerConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Minecraft Server from a config entry."""
|
||||
await hass.async_add_executor_job(prevent_dnspython_blocking_operations)
|
||||
|
||||
# Workaround to avoid blocking imports from dnspython (https://github.com/rthalley/dnspython/issues/1083)
|
||||
await hass.async_add_executor_job(load_dnspython_rdata_classes)
|
||||
|
||||
# Create coordinator instance and store it.
|
||||
coordinator = MinecraftServerCoordinator(hass, entry)
|
||||
|
||||
@@ -9,10 +9,5 @@
|
||||
"reload": {
|
||||
"service": "mdi:reload"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"mqtt": {
|
||||
"trigger": "mdi:swap-horizontal"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -988,23 +988,6 @@
|
||||
"description": "Reloads MQTT entities from the YAML-configuration."
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"mqtt": {
|
||||
"name": "MQTT",
|
||||
"description": "When a specific message is received on a given MQTT topic.",
|
||||
"description_configured": "When an MQTT message has been received",
|
||||
"fields": {
|
||||
"payload": {
|
||||
"name": "Payload",
|
||||
"description": "The payload to trigger on."
|
||||
},
|
||||
"topic": {
|
||||
"name": "Topic",
|
||||
"description": "MQTT topic to listen to."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"addon_start_failed": {
|
||||
"message": "Failed to correctly start {addon} add-on."
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
# Describes the format for MQTT triggers
|
||||
|
||||
mqtt:
|
||||
fields:
|
||||
payload:
|
||||
example: "on"
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
topic:
|
||||
example: "living_room/switch/ac"
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
@@ -6,15 +6,15 @@
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "The API key for your NextDNS account"
|
||||
"api_key": "API Key for your NextDNS account"
|
||||
}
|
||||
},
|
||||
"profiles": {
|
||||
"data": {
|
||||
"profile_name": "Profile"
|
||||
"profile": "Profile"
|
||||
},
|
||||
"data_description": {
|
||||
"profile_name": "The NextDNS configuration profile you want to integrate"
|
||||
"profile": "NextDNS configuration profile you want to integrate"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
|
||||
@@ -66,7 +66,7 @@ class OneDriveUpdateCoordinator(DataUpdateCoordinator[Drive]):
|
||||
translation_domain=DOMAIN, translation_key="authentication_failed"
|
||||
) from err
|
||||
except OneDriveException as err:
|
||||
_LOGGER.debug("Failed to fetch drive data: %s", err, exc_info=True)
|
||||
_LOGGER.debug("Failed to fetch drive data: %s")
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN, translation_key="update_failed"
|
||||
) from err
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["ical"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["ical==10.0.4"]
|
||||
"requirements": ["ical==10.0.0"]
|
||||
}
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiorussound"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aiorussound==4.6.0"],
|
||||
"requirements": ["aiorussound==4.5.2"],
|
||||
"zeroconf": ["_rio._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -22,13 +22,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: FederwiegeConfigEntry) -
|
||||
|
||||
federwiege = Federwiege(hass.loop, connection)
|
||||
federwiege.register()
|
||||
federwiege.connect()
|
||||
|
||||
entry.runtime_data = federwiege
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
federwiege.connect()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pysmarlaapi", "pysignalr"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pysmarlaapi==0.9.0"]
|
||||
"requirements": ["pysmarlaapi==0.8.2"]
|
||||
}
|
||||
|
||||
@@ -53,10 +53,9 @@ class SmarlaNumber(SmarlaBaseEntity, NumberEntity):
|
||||
_property: Property[int]
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
def native_value(self) -> float:
|
||||
"""Return the entity value to represent the entity state."""
|
||||
v = self._property.get()
|
||||
return float(v) if v is not None else None
|
||||
return self._property.get()
|
||||
|
||||
def set_native_value(self, value: float) -> None:
|
||||
"""Update to the smarla device."""
|
||||
|
||||
@@ -52,7 +52,7 @@ class SmarlaSwitch(SmarlaBaseEntity, SwitchEntity):
|
||||
_property: Property[bool]
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
def is_on(self) -> bool:
|
||||
"""Return the entity value to represent the entity state."""
|
||||
return self._property.get()
|
||||
|
||||
|
||||
@@ -30,5 +30,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pysmartthings"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pysmartthings==3.2.5"]
|
||||
"requirements": ["pysmartthings==3.2.4"]
|
||||
}
|
||||
|
||||
@@ -312,7 +312,7 @@ class StateVacuumEntity(
|
||||
@property
|
||||
def capability_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return capability attributes."""
|
||||
if VacuumEntityFeature.FAN_SPEED in self.supported_features:
|
||||
if VacuumEntityFeature.FAN_SPEED in self.supported_features_compat:
|
||||
return {ATTR_FAN_SPEED_LIST: self.fan_speed_list}
|
||||
return None
|
||||
|
||||
@@ -330,7 +330,7 @@ class StateVacuumEntity(
|
||||
def state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes of the vacuum cleaner."""
|
||||
data: dict[str, Any] = {}
|
||||
supported_features = self.supported_features
|
||||
supported_features = self.supported_features_compat
|
||||
|
||||
if VacuumEntityFeature.BATTERY in supported_features:
|
||||
data[ATTR_BATTERY_LEVEL] = self.battery_level
|
||||
@@ -369,6 +369,19 @@ class StateVacuumEntity(
|
||||
"""Flag vacuum cleaner features that are supported."""
|
||||
return self._attr_supported_features
|
||||
|
||||
@property
|
||||
def supported_features_compat(self) -> VacuumEntityFeature:
|
||||
"""Return the supported features as VacuumEntityFeature.
|
||||
|
||||
Remove this compatibility shim in 2025.1 or later.
|
||||
"""
|
||||
features = self.supported_features
|
||||
if type(features) is int:
|
||||
new_features = VacuumEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
def stop(self, **kwargs: Any) -> None:
|
||||
"""Stop the vacuum cleaner."""
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -52,13 +52,7 @@ from homeassistant.helpers.json import (
|
||||
json_bytes,
|
||||
json_fragment,
|
||||
)
|
||||
from homeassistant.helpers.service import (
|
||||
async_get_all_descriptions as async_get_all_service_descriptions,
|
||||
)
|
||||
from homeassistant.helpers.trigger import (
|
||||
async_get_all_descriptions as async_get_all_trigger_descriptions,
|
||||
async_subscribe_platform_events as async_subscribe_trigger_platform_events,
|
||||
)
|
||||
from homeassistant.helpers.service import async_get_all_descriptions
|
||||
from homeassistant.loader import (
|
||||
IntegrationNotFound,
|
||||
async_get_integration,
|
||||
@@ -74,10 +68,9 @@ from homeassistant.util.json import format_unserializable_data
|
||||
|
||||
from . import const, decorators, messages
|
||||
from .connection import ActiveConnection
|
||||
from .messages import construct_event_message, construct_result_message
|
||||
from .messages import construct_result_message
|
||||
|
||||
ALL_SERVICE_DESCRIPTIONS_JSON_CACHE = "websocket_api_all_service_descriptions_json"
|
||||
ALL_TRIGGER_DESCRIPTIONS_JSON_CACHE = "websocket_api_all_trigger_descriptions_json"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -95,7 +88,6 @@ def async_register_commands(
|
||||
async_reg(hass, handle_get_config)
|
||||
async_reg(hass, handle_get_services)
|
||||
async_reg(hass, handle_get_states)
|
||||
async_reg(hass, handle_subscribe_trigger_platforms)
|
||||
async_reg(hass, handle_manifest_get)
|
||||
async_reg(hass, handle_integration_setup_info)
|
||||
async_reg(hass, handle_manifest_list)
|
||||
@@ -501,9 +493,9 @@ def _send_handle_entities_init_response(
|
||||
)
|
||||
|
||||
|
||||
async def _async_get_all_service_descriptions_json(hass: HomeAssistant) -> bytes:
|
||||
async def _async_get_all_descriptions_json(hass: HomeAssistant) -> bytes:
|
||||
"""Return JSON of descriptions (i.e. user documentation) for all service calls."""
|
||||
descriptions = await async_get_all_service_descriptions(hass)
|
||||
descriptions = await async_get_all_descriptions(hass)
|
||||
if ALL_SERVICE_DESCRIPTIONS_JSON_CACHE in hass.data:
|
||||
cached_descriptions, cached_json_payload = hass.data[
|
||||
ALL_SERVICE_DESCRIPTIONS_JSON_CACHE
|
||||
@@ -522,57 +514,10 @@ async def handle_get_services(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle get services command."""
|
||||
payload = await _async_get_all_service_descriptions_json(hass)
|
||||
payload = await _async_get_all_descriptions_json(hass)
|
||||
connection.send_message(construct_result_message(msg["id"], payload))
|
||||
|
||||
|
||||
async def _async_get_all_trigger_descriptions_json(hass: HomeAssistant) -> bytes:
|
||||
"""Return JSON of descriptions (i.e. user documentation) for all triggers."""
|
||||
descriptions = await async_get_all_trigger_descriptions(hass)
|
||||
if ALL_TRIGGER_DESCRIPTIONS_JSON_CACHE in hass.data:
|
||||
cached_descriptions, cached_json_payload = hass.data[
|
||||
ALL_TRIGGER_DESCRIPTIONS_JSON_CACHE
|
||||
]
|
||||
# If the descriptions are the same, return the cached JSON payload
|
||||
if cached_descriptions is descriptions:
|
||||
return cast(bytes, cached_json_payload)
|
||||
json_payload = json_bytes(
|
||||
{
|
||||
trigger: description
|
||||
for trigger, description in descriptions.items()
|
||||
if description is not None
|
||||
}
|
||||
)
|
||||
hass.data[ALL_TRIGGER_DESCRIPTIONS_JSON_CACHE] = (descriptions, json_payload)
|
||||
return json_payload
|
||||
|
||||
|
||||
@decorators.websocket_command({vol.Required("type"): "trigger_platforms/subscribe"})
|
||||
@decorators.async_response
|
||||
async def handle_subscribe_trigger_platforms(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle subscribe triggers command."""
|
||||
|
||||
async def on_new_triggers(new_triggers: set[str]) -> None:
|
||||
"""Forward new triggers to websocket."""
|
||||
descriptions = await async_get_all_trigger_descriptions(hass)
|
||||
new_trigger_descriptions = {}
|
||||
for trigger in new_triggers:
|
||||
if (description := descriptions[trigger]) is not None:
|
||||
new_trigger_descriptions[trigger] = description
|
||||
if not new_trigger_descriptions:
|
||||
return
|
||||
connection.send_event(msg["id"], new_trigger_descriptions)
|
||||
|
||||
connection.subscriptions[msg["id"]] = async_subscribe_trigger_platform_events(
|
||||
hass, on_new_triggers
|
||||
)
|
||||
connection.send_result(msg["id"])
|
||||
triggers_json = await _async_get_all_trigger_descriptions_json(hass)
|
||||
connection.send_message(construct_event_message(msg["id"], triggers_json))
|
||||
|
||||
|
||||
@callback
|
||||
@decorators.websocket_command({vol.Required("type"): "get_config"})
|
||||
def handle_get_config(
|
||||
|
||||
@@ -109,19 +109,6 @@ def event_message(iden: int, event: Any) -> dict[str, Any]:
|
||||
return {"id": iden, "type": "event", "event": event}
|
||||
|
||||
|
||||
def construct_event_message(iden: int, event: bytes) -> bytes:
|
||||
"""Construct an event message JSON."""
|
||||
return b"".join(
|
||||
(
|
||||
b'{"id":',
|
||||
str(iden).encode(),
|
||||
b',"type":"event","event":',
|
||||
event,
|
||||
b"}",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def cached_event_message(message_id_as_bytes: bytes, event: Event) -> bytes:
|
||||
"""Return an event message.
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ import asyncio
|
||||
from collections.abc import AsyncGenerator
|
||||
import io
|
||||
import logging
|
||||
import time
|
||||
from typing import Any, Final
|
||||
import wave
|
||||
|
||||
@@ -37,7 +36,6 @@ from homeassistant.components.assist_satellite import (
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.ulid import ulid_now
|
||||
|
||||
from .const import DOMAIN, SAMPLE_CHANNELS, SAMPLE_WIDTH
|
||||
from .data import WyomingService
|
||||
@@ -55,7 +53,6 @@ _PING_SEND_DELAY: Final = 2
|
||||
_PIPELINE_FINISH_TIMEOUT: Final = 1
|
||||
_TTS_SAMPLE_RATE: Final = 22050
|
||||
_ANNOUNCE_CHUNK_BYTES: Final = 2048 # 1024 samples
|
||||
_TTS_TIMEOUT_EXTRA: Final = 1.0
|
||||
|
||||
# Wyoming stage -> Assist stage
|
||||
_STAGES: dict[PipelineStage, assist_pipeline.PipelineStage] = {
|
||||
@@ -128,10 +125,6 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
|
||||
self._ffmpeg_manager: ffmpeg.FFmpegManager | None = None
|
||||
self._played_event_received: asyncio.Event | None = None
|
||||
|
||||
# Randomly set on each pipeline loop run.
|
||||
# Used to ensure TTS timeout is acted on correctly.
|
||||
self._run_loop_id: str | None = None
|
||||
|
||||
@property
|
||||
def pipeline_entity_id(self) -> str | None:
|
||||
"""Return the entity ID of the pipeline to use for the next conversation."""
|
||||
@@ -518,7 +511,6 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
|
||||
wake_word_phrase: str | None = None
|
||||
run_pipeline: RunPipeline | None = None
|
||||
send_ping = True
|
||||
self._run_loop_id = ulid_now()
|
||||
|
||||
# Read events and check for pipeline end in parallel
|
||||
pipeline_ended_task = self.config_entry.async_create_background_task(
|
||||
@@ -706,53 +698,39 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
|
||||
f"Cannot stream audio format to satellite: {tts_result.extension}"
|
||||
)
|
||||
|
||||
# Track the total duration of TTS audio for response timeout
|
||||
total_seconds = 0.0
|
||||
start_time = time.monotonic()
|
||||
data = b"".join([chunk async for chunk in tts_result.async_stream_result()])
|
||||
|
||||
try:
|
||||
data = b"".join([chunk async for chunk in tts_result.async_stream_result()])
|
||||
with io.BytesIO(data) as wav_io, wave.open(wav_io, "rb") as wav_file:
|
||||
sample_rate = wav_file.getframerate()
|
||||
sample_width = wav_file.getsampwidth()
|
||||
sample_channels = wav_file.getnchannels()
|
||||
_LOGGER.debug("Streaming %s TTS sample(s)", wav_file.getnframes())
|
||||
|
||||
with io.BytesIO(data) as wav_io, wave.open(wav_io, "rb") as wav_file:
|
||||
sample_rate = wav_file.getframerate()
|
||||
sample_width = wav_file.getsampwidth()
|
||||
sample_channels = wav_file.getnchannels()
|
||||
_LOGGER.debug("Streaming %s TTS sample(s)", wav_file.getnframes())
|
||||
|
||||
timestamp = 0
|
||||
await self._client.write_event(
|
||||
AudioStart(
|
||||
rate=sample_rate,
|
||||
width=sample_width,
|
||||
channels=sample_channels,
|
||||
timestamp=timestamp,
|
||||
).event()
|
||||
)
|
||||
|
||||
# Stream audio chunks
|
||||
while audio_bytes := wav_file.readframes(_SAMPLES_PER_CHUNK):
|
||||
chunk = AudioChunk(
|
||||
rate=sample_rate,
|
||||
width=sample_width,
|
||||
channels=sample_channels,
|
||||
audio=audio_bytes,
|
||||
timestamp=timestamp,
|
||||
)
|
||||
await self._client.write_event(chunk.event())
|
||||
timestamp += chunk.seconds
|
||||
total_seconds += chunk.seconds
|
||||
|
||||
await self._client.write_event(AudioStop(timestamp=timestamp).event())
|
||||
_LOGGER.debug("TTS streaming complete")
|
||||
finally:
|
||||
send_duration = time.monotonic() - start_time
|
||||
timeout_seconds = max(0, total_seconds - send_duration + _TTS_TIMEOUT_EXTRA)
|
||||
self.config_entry.async_create_background_task(
|
||||
self.hass,
|
||||
self._tts_timeout(timeout_seconds, self._run_loop_id),
|
||||
name="wyoming TTS timeout",
|
||||
timestamp = 0
|
||||
await self._client.write_event(
|
||||
AudioStart(
|
||||
rate=sample_rate,
|
||||
width=sample_width,
|
||||
channels=sample_channels,
|
||||
timestamp=timestamp,
|
||||
).event()
|
||||
)
|
||||
|
||||
# Stream audio chunks
|
||||
while audio_bytes := wav_file.readframes(_SAMPLES_PER_CHUNK):
|
||||
chunk = AudioChunk(
|
||||
rate=sample_rate,
|
||||
width=sample_width,
|
||||
channels=sample_channels,
|
||||
audio=audio_bytes,
|
||||
timestamp=timestamp,
|
||||
)
|
||||
await self._client.write_event(chunk.event())
|
||||
timestamp += chunk.seconds
|
||||
|
||||
await self._client.write_event(AudioStop(timestamp=timestamp).event())
|
||||
_LOGGER.debug("TTS streaming complete")
|
||||
|
||||
async def _stt_stream(self) -> AsyncGenerator[bytes]:
|
||||
"""Yield audio chunks from a queue."""
|
||||
is_first_chunk = True
|
||||
@@ -766,18 +744,6 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
|
||||
|
||||
yield chunk
|
||||
|
||||
async def _tts_timeout(
|
||||
self, timeout_seconds: float, run_loop_id: str | None
|
||||
) -> None:
|
||||
"""Force state change to IDLE in case TTS played event isn't received."""
|
||||
await asyncio.sleep(timeout_seconds + _TTS_TIMEOUT_EXTRA)
|
||||
|
||||
if run_loop_id != self._run_loop_id:
|
||||
# On a different pipeline run now
|
||||
return
|
||||
|
||||
self.tts_response_finished()
|
||||
|
||||
@callback
|
||||
def _handle_timer(
|
||||
self, event_type: intent.TimerEventType, timer: intent.TimerInfo
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"domain": "xiaomi_miio",
|
||||
"name": "Xiaomi Home",
|
||||
"name": "Xiaomi Miio",
|
||||
"codeowners": ["@rytilahti", "@syssi", "@starkillerOG"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/xiaomi_miio",
|
||||
|
||||
@@ -5,37 +5,37 @@
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"incomplete_info": "Incomplete information to set up device, no host or token supplied.",
|
||||
"not_xiaomi_miio": "Device is not (yet) supported by Xiaomi Home integration.",
|
||||
"not_xiaomi_miio": "Device is not (yet) supported by Xiaomi Miio.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"wrong_token": "Checksum error, wrong token",
|
||||
"unknown_device": "The device model is not known, not able to set up the device using config flow.",
|
||||
"cloud_no_devices": "No devices found in this Xiaomi Home account.",
|
||||
"cloud_credentials_incomplete": "Credentials incomplete, please fill in username, password and server region",
|
||||
"cloud_login_error": "Could not log in to Xiaomi Home, check the credentials."
|
||||
"cloud_no_devices": "No devices found in this Xiaomi Miio cloud account.",
|
||||
"cloud_credentials_incomplete": "Cloud credentials incomplete, please fill in username, password and country",
|
||||
"cloud_login_error": "Could not log in to Xiaomi Miio Cloud, check the credentials."
|
||||
},
|
||||
"flow_title": "{name}",
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"description": "The Xiaomi Home integration needs to re-authenticate your account in order to update the tokens or add missing credentials.",
|
||||
"description": "The Xiaomi Miio integration needs to re-authenticate your account in order to update the tokens or add missing cloud credentials.",
|
||||
"title": "[%key:common::config_flow::title::reauth%]"
|
||||
},
|
||||
"cloud": {
|
||||
"data": {
|
||||
"cloud_username": "[%key:common::config_flow::data::username%]",
|
||||
"cloud_password": "[%key:common::config_flow::data::password%]",
|
||||
"cloud_country": "Server region",
|
||||
"cloud_username": "Cloud username",
|
||||
"cloud_password": "Cloud password",
|
||||
"cloud_country": "Cloud server country",
|
||||
"manual": "Configure manually (not recommended)"
|
||||
},
|
||||
"description": "Log in to Xiaomi Home, see https://www.openhab.org/addons/bindings/miio/#country-servers for the server region to use."
|
||||
"description": "Log in to the Xiaomi Miio cloud, see https://www.openhab.org/addons/bindings/miio/#country-servers for the cloud server to use."
|
||||
},
|
||||
"select": {
|
||||
"data": {
|
||||
"select_device": "[%key:common::config_flow::data::device%]"
|
||||
"select_device": "Miio device"
|
||||
},
|
||||
"description": "Select the Xiaomi Home device to set up."
|
||||
"description": "Select the Xiaomi Miio device to set up."
|
||||
},
|
||||
"manual": {
|
||||
"data": {
|
||||
@@ -58,7 +58,7 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"cloud_subdevices": "Use Xiaomi Home service to get connected subdevices"
|
||||
"cloud_subdevices": "Use cloud to get connected subdevices"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -331,7 +331,7 @@
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity ID",
|
||||
"description": "Name of the Xiaomi Home entity."
|
||||
"description": "Name of the Xiaomi Miio entity."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"zha",
|
||||
"universal_silabs_flasher"
|
||||
],
|
||||
"requirements": ["zha==0.0.60"],
|
||||
"requirements": ["zha==0.0.59"],
|
||||
"usb": [
|
||||
{
|
||||
"vid": "10C4",
|
||||
|
||||
@@ -896,7 +896,6 @@ DISCOVERY_SCHEMAS = [
|
||||
writeable=False,
|
||||
),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
# generic text sensors
|
||||
ZWaveDiscoverySchema(
|
||||
@@ -933,7 +932,6 @@ DISCOVERY_SCHEMAS = [
|
||||
),
|
||||
data_template=NumericSensorDataTemplate(),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
# Meter sensors for Meter CC
|
||||
ZWaveDiscoverySchema(
|
||||
@@ -959,7 +957,6 @@ DISCOVERY_SCHEMAS = [
|
||||
writeable=True,
|
||||
),
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
# button for Indicator CC
|
||||
ZWaveDiscoverySchema(
|
||||
@@ -983,7 +980,6 @@ DISCOVERY_SCHEMAS = [
|
||||
writeable=True,
|
||||
),
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
# binary switch
|
||||
# barrier operator signaling states
|
||||
@@ -1188,7 +1184,6 @@ DISCOVERY_SCHEMAS = [
|
||||
any_available_states={(0, "idle")},
|
||||
),
|
||||
allow_multi=True,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
# event
|
||||
# stateful = False
|
||||
|
||||
@@ -7475,7 +7475,7 @@
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Xiaomi Home"
|
||||
"name": "Xiaomi Miio"
|
||||
},
|
||||
"xiaomi_tv": {
|
||||
"integration_type": "hub",
|
||||
|
||||
@@ -1020,11 +1020,15 @@ class MediaSelector(Selector[MediaSelectorConfig]):
|
||||
|
||||
selector_type = "media"
|
||||
|
||||
CONFIG_SCHEMA = BASE_SELECTOR_CONFIG_SCHEMA
|
||||
CONFIG_SCHEMA = BASE_SELECTOR_CONFIG_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional("accept"): [str],
|
||||
}
|
||||
)
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
# Although marked as optional in frontend, this field is required
|
||||
vol.Required("entity_id"): cv.entity_id_or_uuid,
|
||||
# If accept is set, the entity_id field will not be present
|
||||
vol.Optional("entity_id"): cv.entity_id_or_uuid,
|
||||
# Although marked as optional in frontend, this field is required
|
||||
vol.Required("media_content_id"): str,
|
||||
# Although marked as optional in frontend, this field is required
|
||||
|
||||
@@ -682,12 +682,9 @@ def _load_services_file(hass: HomeAssistant, integration: Integration) -> JSON_T
|
||||
|
||||
def _load_services_files(
|
||||
hass: HomeAssistant, integrations: Iterable[Integration]
|
||||
) -> dict[str, JSON_TYPE]:
|
||||
) -> list[JSON_TYPE]:
|
||||
"""Load service files for multiple integrations."""
|
||||
return {
|
||||
integration.domain: _load_services_file(hass, integration)
|
||||
for integration in integrations
|
||||
}
|
||||
return [_load_services_file(hass, integration) for integration in integrations]
|
||||
|
||||
|
||||
@callback
|
||||
@@ -747,9 +744,10 @@ async def async_get_all_descriptions(
|
||||
_LOGGER.error("Failed to load integration: %s", domain, exc_info=int_or_exc)
|
||||
|
||||
if integrations:
|
||||
loaded = await hass.async_add_executor_job(
|
||||
contents = await hass.async_add_executor_job(
|
||||
_load_services_files, hass, integrations
|
||||
)
|
||||
loaded = dict(zip(domains_with_missing_services, contents, strict=False))
|
||||
|
||||
# Load translations for all service domains
|
||||
translations = await translation.async_get_translations(
|
||||
|
||||
@@ -5,11 +5,11 @@ from __future__ import annotations
|
||||
import abc
|
||||
import asyncio
|
||||
from collections import defaultdict
|
||||
from collections.abc import Callable, Coroutine, Iterable
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass, field
|
||||
import functools
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Protocol, TypedDict, cast
|
||||
from typing import Any, Protocol, TypedDict, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -29,24 +29,13 @@ from homeassistant.core import (
|
||||
is_callback,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.loader import (
|
||||
Integration,
|
||||
IntegrationNotFound,
|
||||
async_get_integration,
|
||||
async_get_integrations,
|
||||
)
|
||||
from homeassistant.loader import IntegrationNotFound, async_get_integration
|
||||
from homeassistant.util.async_ import create_eager_task
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.yaml import load_yaml_dict
|
||||
from homeassistant.util.yaml.loader import JSON_TYPE
|
||||
|
||||
from . import config_validation as cv
|
||||
from .integration_platform import async_process_integration_platforms
|
||||
from .template import Template
|
||||
from .typing import ConfigType, TemplateVarsType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_PLATFORM_ALIASES = {
|
||||
"device": "device_automation",
|
||||
"event": "homeassistant",
|
||||
@@ -60,99 +49,6 @@ DATA_PLUGGABLE_ACTIONS: HassKey[defaultdict[tuple, PluggableActionsEntry]] = Has
|
||||
"pluggable_actions"
|
||||
)
|
||||
|
||||
TRIGGER_DESCRIPTION_CACHE: HassKey[dict[str, dict[str, Any] | None]] = HassKey(
|
||||
"trigger_description_cache"
|
||||
)
|
||||
TRIGGER_PLATFORM_SUBSCRIPTIONS: HassKey[
|
||||
list[Callable[[set[str]], Coroutine[Any, Any, None]]]
|
||||
] = HassKey("trigger_platform_subscriptions")
|
||||
TRIGGERS: HassKey[dict[str, str]] = HassKey("triggers")
|
||||
|
||||
|
||||
# Basic schemas to sanity check the trigger descriptions,
|
||||
# full validation is done by hassfest.services
|
||||
_FIELD_SCHEMA = vol.Schema(
|
||||
{},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
_TRIGGER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional("fields"): vol.Schema({str: _FIELD_SCHEMA}),
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
def starts_with_dot(key: str) -> str:
|
||||
"""Check if key starts with dot."""
|
||||
if not key.startswith("."):
|
||||
raise vol.Invalid("Key does not start with .")
|
||||
return key
|
||||
|
||||
|
||||
_TRIGGERS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Remove(vol.All(str, starts_with_dot)): object,
|
||||
cv.slug: vol.Any(None, _TRIGGER_SCHEMA),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the trigger helper."""
|
||||
hass.data[TRIGGER_DESCRIPTION_CACHE] = {}
|
||||
hass.data[TRIGGER_PLATFORM_SUBSCRIPTIONS] = []
|
||||
hass.data[TRIGGERS] = {}
|
||||
await async_process_integration_platforms(
|
||||
hass, "trigger", _register_trigger_platform, wait_for_platforms=True
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_subscribe_platform_events(
|
||||
hass: HomeAssistant,
|
||||
on_event: Callable[[set[str]], Coroutine[Any, Any, None]],
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe to trigger platform events."""
|
||||
trigger_platform_event_subscriptions = hass.data[TRIGGER_PLATFORM_SUBSCRIPTIONS]
|
||||
|
||||
def remove_subscription() -> None:
|
||||
trigger_platform_event_subscriptions.remove(on_event)
|
||||
|
||||
trigger_platform_event_subscriptions.append(on_event)
|
||||
return remove_subscription
|
||||
|
||||
|
||||
async def _register_trigger_platform(
|
||||
hass: HomeAssistant, integration_domain: str, platform: TriggerProtocol
|
||||
) -> None:
|
||||
"""Register a trigger platform."""
|
||||
|
||||
new_triggers: set[str] = set()
|
||||
|
||||
if hasattr(platform, "async_get_triggers"):
|
||||
for trigger_key in await platform.async_get_triggers(hass):
|
||||
hass.data[TRIGGERS][trigger_key] = integration_domain
|
||||
new_triggers.add(trigger_key)
|
||||
elif hasattr(platform, "async_validate_trigger_config") or hasattr(
|
||||
platform, "TRIGGER_SCHEMA"
|
||||
):
|
||||
hass.data[TRIGGERS][integration_domain] = integration_domain
|
||||
new_triggers.add(integration_domain)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Integration %s does not provide trigger support, skipping",
|
||||
integration_domain,
|
||||
)
|
||||
return
|
||||
|
||||
tasks: list[asyncio.Task[None]] = [
|
||||
create_eager_task(listener(new_triggers))
|
||||
for listener in hass.data[TRIGGER_PLATFORM_SUBSCRIPTIONS]
|
||||
]
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
|
||||
class Trigger(abc.ABC):
|
||||
"""Trigger class."""
|
||||
@@ -513,107 +409,3 @@ async def async_initialize_triggers(
|
||||
remove()
|
||||
|
||||
return remove_triggers
|
||||
|
||||
|
||||
def _load_triggers_file(hass: HomeAssistant, integration: Integration) -> JSON_TYPE:
|
||||
"""Load triggers file for an integration."""
|
||||
try:
|
||||
return cast(
|
||||
JSON_TYPE,
|
||||
_TRIGGERS_SCHEMA(
|
||||
load_yaml_dict(str(integration.file_path / "triggers.yaml"))
|
||||
),
|
||||
)
|
||||
except FileNotFoundError:
|
||||
_LOGGER.warning(
|
||||
"Unable to find triggers.yaml for the %s integration", integration.domain
|
||||
)
|
||||
return {}
|
||||
except (HomeAssistantError, vol.Invalid) as ex:
|
||||
_LOGGER.warning(
|
||||
"Unable to parse triggers.yaml for the %s integration: %s",
|
||||
integration.domain,
|
||||
ex,
|
||||
)
|
||||
return {}
|
||||
|
||||
|
||||
def _load_triggers_files(
|
||||
hass: HomeAssistant, integrations: Iterable[Integration]
|
||||
) -> dict[str, JSON_TYPE]:
|
||||
"""Load trigger files for multiple integrations."""
|
||||
return {
|
||||
integration.domain: _load_triggers_file(hass, integration)
|
||||
for integration in integrations
|
||||
}
|
||||
|
||||
|
||||
async def async_get_all_descriptions(
|
||||
hass: HomeAssistant,
|
||||
) -> dict[str, dict[str, Any] | None]:
|
||||
"""Return descriptions (i.e. user documentation) for all triggers."""
|
||||
descriptions_cache = hass.data[TRIGGER_DESCRIPTION_CACHE]
|
||||
|
||||
triggers = hass.data[TRIGGERS]
|
||||
# See if there are new triggers not seen before.
|
||||
# Any trigger that we saw before already has an entry in description_cache.
|
||||
all_triggers = set(triggers)
|
||||
previous_all_triggers = set(descriptions_cache)
|
||||
# If the triggers are the same, we can return the cache
|
||||
if previous_all_triggers == all_triggers:
|
||||
return descriptions_cache
|
||||
|
||||
# Files we loaded for missing descriptions
|
||||
new_triggers_descriptions: dict[str, JSON_TYPE] = {}
|
||||
# We try to avoid making a copy in the event the cache is good,
|
||||
# but now we must make a copy in case new triggers get added
|
||||
# while we are loading the missing ones so we do not
|
||||
# add the new ones to the cache without their descriptions
|
||||
triggers = triggers.copy()
|
||||
|
||||
if missing_triggers := all_triggers.difference(descriptions_cache):
|
||||
domains_with_missing_triggers = {
|
||||
triggers[missing_trigger] for missing_trigger in missing_triggers
|
||||
}
|
||||
ints_or_excs = await async_get_integrations(hass, domains_with_missing_triggers)
|
||||
integrations: list[Integration] = []
|
||||
for domain, int_or_exc in ints_or_excs.items():
|
||||
if type(int_or_exc) is Integration and int_or_exc.has_triggers:
|
||||
integrations.append(int_or_exc)
|
||||
continue
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(int_or_exc, Exception)
|
||||
_LOGGER.debug(
|
||||
"Failed to load triggers.yaml for integration: %s",
|
||||
domain,
|
||||
exc_info=int_or_exc,
|
||||
)
|
||||
|
||||
if integrations:
|
||||
new_triggers_descriptions = await hass.async_add_executor_job(
|
||||
_load_triggers_files, hass, integrations
|
||||
)
|
||||
|
||||
# Make a copy of the old cache and add missing descriptions to it
|
||||
new_descriptions_cache = descriptions_cache.copy()
|
||||
for missing_trigger in missing_triggers:
|
||||
domain = triggers[missing_trigger]
|
||||
|
||||
if (
|
||||
yaml_description := new_triggers_descriptions.get(domain, {}).get( # type: ignore[union-attr]
|
||||
missing_trigger
|
||||
)
|
||||
) is None:
|
||||
_LOGGER.debug(
|
||||
"No trigger descriptions found for trigger %s, skipping",
|
||||
missing_trigger,
|
||||
)
|
||||
new_descriptions_cache[missing_trigger] = None
|
||||
continue
|
||||
|
||||
description = {"fields": yaml_description.get("fields", {})}
|
||||
|
||||
new_descriptions_cache[missing_trigger] = description
|
||||
|
||||
hass.data[TRIGGER_DESCRIPTION_CACHE] = new_descriptions_cache
|
||||
return new_descriptions_cache
|
||||
|
||||
@@ -857,20 +857,15 @@ class Integration:
|
||||
# True.
|
||||
return self.manifest.get("import_executor", True)
|
||||
|
||||
@cached_property
|
||||
def has_services(self) -> bool:
|
||||
"""Return if the integration has services."""
|
||||
return "services.yaml" in self._top_level_files
|
||||
|
||||
@cached_property
|
||||
def has_translations(self) -> bool:
|
||||
"""Return if the integration has translations."""
|
||||
return "translations" in self._top_level_files
|
||||
|
||||
@cached_property
|
||||
def has_triggers(self) -> bool:
|
||||
"""Return if the integration has triggers."""
|
||||
return "triggers.yaml" in self._top_level_files
|
||||
def has_services(self) -> bool:
|
||||
"""Return if the integration has services."""
|
||||
return "services.yaml" in self._top_level_files
|
||||
|
||||
@property
|
||||
def mqtt(self) -> list[str] | None:
|
||||
|
||||
18
requirements_all.txt
generated
18
requirements_all.txt
generated
@@ -201,7 +201,7 @@ aioaseko==1.0.0
|
||||
aioasuswrt==1.4.0
|
||||
|
||||
# homeassistant.components.husqvarna_automower
|
||||
aioautomower==2025.6.0
|
||||
aioautomower==2025.5.1
|
||||
|
||||
# homeassistant.components.azure_devops
|
||||
aioazuredevops==2.2.1
|
||||
@@ -265,7 +265,7 @@ aioharmony==0.5.2
|
||||
aiohasupervisor==0.3.1
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
aiohomeconnect==0.18.0
|
||||
aiohomeconnect==0.17.1
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit==3.2.15
|
||||
@@ -369,7 +369,7 @@ aioridwell==2024.01.0
|
||||
aioruckus==0.42
|
||||
|
||||
# homeassistant.components.russound_rio
|
||||
aiorussound==4.6.0
|
||||
aiorussound==4.5.2
|
||||
|
||||
# homeassistant.components.ruuvi_gateway
|
||||
aioruuvigateway==0.1.0
|
||||
@@ -1203,7 +1203,7 @@ ibmiotf==0.3.4
|
||||
# homeassistant.components.local_calendar
|
||||
# homeassistant.components.local_todo
|
||||
# homeassistant.components.remote_calendar
|
||||
ical==10.0.4
|
||||
ical==10.0.0
|
||||
|
||||
# homeassistant.components.caldav
|
||||
icalendar==6.1.0
|
||||
@@ -1505,7 +1505,7 @@ nexia==2.10.0
|
||||
nextcloudmonitor==1.5.1
|
||||
|
||||
# homeassistant.components.discord
|
||||
nextcord==3.1.0
|
||||
nextcord==2.6.0
|
||||
|
||||
# homeassistant.components.nextdns
|
||||
nextdns==4.0.0
|
||||
@@ -2096,7 +2096,7 @@ pykwb==0.0.8
|
||||
pylacrosse==0.4
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==2.0.9
|
||||
pylamarzocco==2.0.8
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
@@ -2338,10 +2338,10 @@ pysma==0.7.5
|
||||
pysmappee==0.2.29
|
||||
|
||||
# homeassistant.components.smarla
|
||||
pysmarlaapi==0.9.0
|
||||
pysmarlaapi==0.8.2
|
||||
|
||||
# homeassistant.components.smartthings
|
||||
pysmartthings==3.2.5
|
||||
pysmartthings==3.2.4
|
||||
|
||||
# homeassistant.components.smarty
|
||||
pysmarty2==0.10.2
|
||||
@@ -3180,7 +3180,7 @@ zeroconf==0.147.0
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.60
|
||||
zha==0.0.59
|
||||
|
||||
# homeassistant.components.zhong_hong
|
||||
zhong-hong-hvac==1.0.13
|
||||
|
||||
18
requirements_test_all.txt
generated
18
requirements_test_all.txt
generated
@@ -189,7 +189,7 @@ aioaseko==1.0.0
|
||||
aioasuswrt==1.4.0
|
||||
|
||||
# homeassistant.components.husqvarna_automower
|
||||
aioautomower==2025.6.0
|
||||
aioautomower==2025.5.1
|
||||
|
||||
# homeassistant.components.azure_devops
|
||||
aioazuredevops==2.2.1
|
||||
@@ -250,7 +250,7 @@ aioharmony==0.5.2
|
||||
aiohasupervisor==0.3.1
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
aiohomeconnect==0.18.0
|
||||
aiohomeconnect==0.17.1
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit==3.2.15
|
||||
@@ -351,7 +351,7 @@ aioridwell==2024.01.0
|
||||
aioruckus==0.42
|
||||
|
||||
# homeassistant.components.russound_rio
|
||||
aiorussound==4.6.0
|
||||
aiorussound==4.5.2
|
||||
|
||||
# homeassistant.components.ruuvi_gateway
|
||||
aioruuvigateway==0.1.0
|
||||
@@ -1040,7 +1040,7 @@ ibeacon-ble==1.2.0
|
||||
# homeassistant.components.local_calendar
|
||||
# homeassistant.components.local_todo
|
||||
# homeassistant.components.remote_calendar
|
||||
ical==10.0.4
|
||||
ical==10.0.0
|
||||
|
||||
# homeassistant.components.caldav
|
||||
icalendar==6.1.0
|
||||
@@ -1285,7 +1285,7 @@ nexia==2.10.0
|
||||
nextcloudmonitor==1.5.1
|
||||
|
||||
# homeassistant.components.discord
|
||||
nextcord==3.1.0
|
||||
nextcord==2.6.0
|
||||
|
||||
# homeassistant.components.nextdns
|
||||
nextdns==4.0.0
|
||||
@@ -1738,7 +1738,7 @@ pykrakenapi==0.1.8
|
||||
pykulersky==0.5.8
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==2.0.9
|
||||
pylamarzocco==2.0.8
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
@@ -1938,10 +1938,10 @@ pysma==0.7.5
|
||||
pysmappee==0.2.29
|
||||
|
||||
# homeassistant.components.smarla
|
||||
pysmarlaapi==0.9.0
|
||||
pysmarlaapi==0.8.2
|
||||
|
||||
# homeassistant.components.smartthings
|
||||
pysmartthings==3.2.5
|
||||
pysmartthings==3.2.4
|
||||
|
||||
# homeassistant.components.smarty
|
||||
pysmarty2==0.10.2
|
||||
@@ -2621,7 +2621,7 @@ zeroconf==0.147.0
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.60
|
||||
zha==0.0.59
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.63.0
|
||||
|
||||
@@ -28,7 +28,6 @@ from . import (
|
||||
services,
|
||||
ssdp,
|
||||
translations,
|
||||
triggers,
|
||||
usb,
|
||||
zeroconf,
|
||||
)
|
||||
@@ -50,7 +49,6 @@ INTEGRATION_PLUGINS = [
|
||||
services,
|
||||
ssdp,
|
||||
translations,
|
||||
triggers,
|
||||
usb,
|
||||
zeroconf,
|
||||
config_flow, # This needs to run last, after translations are processed
|
||||
|
||||
@@ -120,16 +120,6 @@ CUSTOM_INTEGRATION_SERVICE_ICONS_SCHEMA = cv.schema_with_slug_keys(
|
||||
)
|
||||
|
||||
|
||||
TRIGGER_ICONS_SCHEMA = cv.schema_with_slug_keys(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional("trigger"): icon_value_validator,
|
||||
}
|
||||
),
|
||||
slug_validator=translation_key_validator,
|
||||
)
|
||||
|
||||
|
||||
def icon_schema(
|
||||
core_integration: bool, integration_type: str, no_entity_platform: bool
|
||||
) -> vol.Schema:
|
||||
@@ -174,7 +164,6 @@ def icon_schema(
|
||||
vol.Optional("services"): CORE_SERVICE_ICONS_SCHEMA
|
||||
if core_integration
|
||||
else CUSTOM_INTEGRATION_SERVICE_ICONS_SCHEMA,
|
||||
vol.Optional("triggers"): TRIGGER_ICONS_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -480,6 +480,7 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [
|
||||
"hko",
|
||||
"hlk_sw16",
|
||||
"holiday",
|
||||
"home_connect",
|
||||
"homekit",
|
||||
"homekit_controller",
|
||||
"homematic",
|
||||
@@ -1527,6 +1528,7 @@ INTEGRATIONS_WITHOUT_SCALE = [
|
||||
"hko",
|
||||
"hlk_sw16",
|
||||
"holiday",
|
||||
"home_connect",
|
||||
"homekit",
|
||||
"homekit_controller",
|
||||
"homematic",
|
||||
|
||||
@@ -415,22 +415,6 @@ def gen_strings_schema(config: Config, integration: Integration) -> vol.Schema:
|
||||
},
|
||||
slug_validator=translation_key_validator,
|
||||
),
|
||||
vol.Optional("triggers"): cv.schema_with_slug_keys(
|
||||
{
|
||||
vol.Required("name"): translation_value_validator,
|
||||
vol.Required("description"): translation_value_validator,
|
||||
vol.Required("description_configured"): translation_value_validator,
|
||||
vol.Optional("fields"): cv.schema_with_slug_keys(
|
||||
{
|
||||
vol.Required("name"): str,
|
||||
vol.Required("description"): translation_value_validator,
|
||||
vol.Optional("example"): translation_value_validator,
|
||||
},
|
||||
slug_validator=translation_key_validator,
|
||||
),
|
||||
},
|
||||
slug_validator=translation_key_validator,
|
||||
),
|
||||
vol.Optional("conversation"): {
|
||||
vol.Required("agent"): {
|
||||
vol.Required("done"): translation_value_validator,
|
||||
|
||||
@@ -1,238 +0,0 @@
|
||||
"""Validate triggers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import json
|
||||
import pathlib
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from homeassistant.const import CONF_SELECTOR
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, selector, trigger
|
||||
from homeassistant.util.yaml import load_yaml_dict
|
||||
|
||||
from .model import Config, Integration
|
||||
|
||||
|
||||
def exists(value: Any) -> Any:
|
||||
"""Check if value exists."""
|
||||
if value is None:
|
||||
raise vol.Invalid("Value cannot be None")
|
||||
return value
|
||||
|
||||
|
||||
FIELD_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional("example"): exists,
|
||||
vol.Optional("default"): exists,
|
||||
vol.Optional("required"): bool,
|
||||
vol.Optional(CONF_SELECTOR): selector.validate_selector,
|
||||
}
|
||||
)
|
||||
|
||||
TRIGGER_SCHEMA = vol.Any(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional("fields"): vol.Schema({str: FIELD_SCHEMA}),
|
||||
}
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
TRIGGERS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Remove(vol.All(str, trigger.starts_with_dot)): object,
|
||||
cv.slug: TRIGGER_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
NON_MIGRATED_INTEGRATIONS = {
|
||||
"calendar",
|
||||
"conversation",
|
||||
"device_automation",
|
||||
"geo_location",
|
||||
"homeassistant",
|
||||
"knx",
|
||||
"lg_netcast",
|
||||
"litejet",
|
||||
"persistent_notification",
|
||||
"samsungtv",
|
||||
"sun",
|
||||
"tag",
|
||||
"template",
|
||||
"webhook",
|
||||
"webostv",
|
||||
"zone",
|
||||
"zwave_js",
|
||||
}
|
||||
|
||||
|
||||
def grep_dir(path: pathlib.Path, glob_pattern: str, search_pattern: str) -> bool:
|
||||
"""Recursively go through a dir and it's children and find the regex."""
|
||||
pattern = re.compile(search_pattern)
|
||||
|
||||
for fil in path.glob(glob_pattern):
|
||||
if not fil.is_file():
|
||||
continue
|
||||
|
||||
if pattern.search(fil.read_text()):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def validate_triggers(config: Config, integration: Integration) -> None: # noqa: C901
|
||||
"""Validate triggers."""
|
||||
try:
|
||||
data = load_yaml_dict(str(integration.path / "triggers.yaml"))
|
||||
except FileNotFoundError:
|
||||
# Find if integration uses triggers
|
||||
has_triggers = grep_dir(
|
||||
integration.path,
|
||||
"**/trigger.py",
|
||||
r"async_attach_trigger|async_get_triggers",
|
||||
)
|
||||
|
||||
if has_triggers and integration.domain not in NON_MIGRATED_INTEGRATIONS:
|
||||
integration.add_error(
|
||||
"triggers", "Registers triggers but has no triggers.yaml"
|
||||
)
|
||||
return
|
||||
except HomeAssistantError:
|
||||
integration.add_error("triggers", "Invalid triggers.yaml")
|
||||
return
|
||||
|
||||
try:
|
||||
triggers = TRIGGERS_SCHEMA(data)
|
||||
except vol.Invalid as err:
|
||||
integration.add_error(
|
||||
"triggers", f"Invalid triggers.yaml: {humanize_error(data, err)}"
|
||||
)
|
||||
return
|
||||
|
||||
icons_file = integration.path / "icons.json"
|
||||
icons = {}
|
||||
if icons_file.is_file():
|
||||
with contextlib.suppress(ValueError):
|
||||
icons = json.loads(icons_file.read_text())
|
||||
trigger_icons = icons.get("triggers", {})
|
||||
|
||||
# Try loading translation strings
|
||||
if integration.core:
|
||||
strings_file = integration.path / "strings.json"
|
||||
else:
|
||||
# For custom integrations, use the en.json file
|
||||
strings_file = integration.path / "translations/en.json"
|
||||
|
||||
strings = {}
|
||||
if strings_file.is_file():
|
||||
with contextlib.suppress(ValueError):
|
||||
strings = json.loads(strings_file.read_text())
|
||||
|
||||
error_msg_suffix = "in the translations file"
|
||||
if not integration.core:
|
||||
error_msg_suffix = f"and is not {error_msg_suffix}"
|
||||
|
||||
# For each trigger in the integration:
|
||||
# 1. Check if the trigger description is set, if not,
|
||||
# check if it's in the strings file else add an error.
|
||||
# 2. Check if the trigger has an icon set in icons.json.
|
||||
# raise an error if not.,
|
||||
for trigger_name, trigger_schema in triggers.items():
|
||||
if integration.core and trigger_name not in trigger_icons:
|
||||
# This is enforced for Core integrations only
|
||||
integration.add_error(
|
||||
"triggers",
|
||||
f"Trigger {trigger_name} has no icon in icons.json.",
|
||||
)
|
||||
if trigger_schema is None:
|
||||
continue
|
||||
if "name" not in trigger_schema and integration.core:
|
||||
try:
|
||||
strings["triggers"][trigger_name]["name"]
|
||||
except KeyError:
|
||||
integration.add_error(
|
||||
"triggers",
|
||||
f"Trigger {trigger_name} has no name {error_msg_suffix}",
|
||||
)
|
||||
|
||||
if "description" not in trigger_schema and integration.core:
|
||||
try:
|
||||
strings["triggers"][trigger_name]["description"]
|
||||
except KeyError:
|
||||
integration.add_error(
|
||||
"triggers",
|
||||
f"Trigger {trigger_name} has no description {error_msg_suffix}",
|
||||
)
|
||||
|
||||
# The same check is done for the description in each of the fields of the
|
||||
# trigger schema.
|
||||
for field_name, field_schema in trigger_schema.get("fields", {}).items():
|
||||
if "fields" in field_schema:
|
||||
# This is a section
|
||||
continue
|
||||
if "name" not in field_schema and integration.core:
|
||||
try:
|
||||
strings["triggers"][trigger_name]["fields"][field_name]["name"]
|
||||
except KeyError:
|
||||
integration.add_error(
|
||||
"triggers",
|
||||
(
|
||||
f"Trigger {trigger_name} has a field {field_name} with no "
|
||||
f"name {error_msg_suffix}"
|
||||
),
|
||||
)
|
||||
|
||||
if "description" not in field_schema and integration.core:
|
||||
try:
|
||||
strings["triggers"][trigger_name]["fields"][field_name][
|
||||
"description"
|
||||
]
|
||||
except KeyError:
|
||||
integration.add_error(
|
||||
"triggers",
|
||||
(
|
||||
f"Trigger {trigger_name} has a field {field_name} with no "
|
||||
f"description {error_msg_suffix}"
|
||||
),
|
||||
)
|
||||
|
||||
if "selector" in field_schema:
|
||||
with contextlib.suppress(KeyError):
|
||||
translation_key = field_schema["selector"]["select"][
|
||||
"translation_key"
|
||||
]
|
||||
try:
|
||||
strings["selector"][translation_key]
|
||||
except KeyError:
|
||||
integration.add_error(
|
||||
"triggers",
|
||||
f"Trigger {trigger_name} has a field {field_name} with a selector with a translation key {translation_key} that is not in the translations file",
|
||||
)
|
||||
|
||||
# The same check is done for the description in each of the sections of the
|
||||
# trigger schema.
|
||||
for section_name, section_schema in trigger_schema.get("fields", {}).items():
|
||||
if "fields" not in section_schema:
|
||||
# This is not a section
|
||||
continue
|
||||
if "name" not in section_schema and integration.core:
|
||||
try:
|
||||
strings["triggers"][trigger_name]["sections"][section_name]["name"]
|
||||
except KeyError:
|
||||
integration.add_error(
|
||||
"triggers",
|
||||
f"Trigger {trigger_name} has a section {section_name} with no name {error_msg_suffix}",
|
||||
)
|
||||
|
||||
|
||||
def validate(integrations: dict[str, Integration], config: Config) -> None:
|
||||
"""Handle dependencies for integrations."""
|
||||
# check triggers.yaml is valid
|
||||
for integration in integrations.values():
|
||||
validate_triggers(config, integration)
|
||||
@@ -87,7 +87,6 @@ from homeassistant.helpers import (
|
||||
restore_state as rs,
|
||||
storage,
|
||||
translation,
|
||||
trigger,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
@@ -296,7 +295,6 @@ async def async_test_home_assistant(
|
||||
# Load the registries
|
||||
entity.async_setup(hass)
|
||||
loader.async_setup(hass)
|
||||
await trigger.async_setup(hass)
|
||||
|
||||
# setup translation cache instead of calling translation.async_setup(hass)
|
||||
hass.data[translation.TRANSLATION_FLATTEN_CACHE] = translation._TranslationCache(
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from enum import Enum
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import cover
|
||||
from homeassistant.components.cover import CoverState
|
||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_PLATFORM, SERVICE_TOGGLE
|
||||
@@ -11,7 +13,11 @@ from homeassistant.setup import async_setup_component
|
||||
|
||||
from .common import MockCover
|
||||
|
||||
from tests.common import help_test_all, setup_test_component_platform
|
||||
from tests.common import (
|
||||
MockEntityPlatform,
|
||||
help_test_all,
|
||||
setup_test_component_platform,
|
||||
)
|
||||
|
||||
|
||||
async def test_services(
|
||||
@@ -153,3 +159,24 @@ def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, s
|
||||
def test_all() -> None:
|
||||
"""Test module.__all__ is correctly set."""
|
||||
help_test_all(cover)
|
||||
|
||||
|
||||
def test_deprecated_supported_features_ints(
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test deprecated supported features ints."""
|
||||
|
||||
class MockCoverEntity(cover.CoverEntity):
|
||||
_attr_supported_features = 1
|
||||
|
||||
entity = MockCoverEntity()
|
||||
entity.hass = hass
|
||||
entity.platform = MockEntityPlatform(hass)
|
||||
assert entity.supported_features is cover.CoverEntityFeature(1)
|
||||
assert "MockCoverEntity" in caplog.text
|
||||
assert "is using deprecated supported features values" in caplog.text
|
||||
assert "Instead it should use" in caplog.text
|
||||
assert "CoverEntityFeature.OPEN" in caplog.text
|
||||
caplog.clear()
|
||||
assert entity.supported_features is cover.CoverEntityFeature(1)
|
||||
assert "is using deprecated supported features values" not in caplog.text
|
||||
|
||||
@@ -3,13 +3,12 @@
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'band': '5 GHz',
|
||||
'friendly_name': 'AA:BB:CC:DD:EE:FF',
|
||||
'mac': 'AA:BB:CC:DD:EE:FF',
|
||||
'source_type': <SourceType.ROUTER: 'router'>,
|
||||
'wifi': 'Main',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'device_tracker.aa_bb_cc_dd_ee_ff',
|
||||
'entity_id': 'device_tracker.devolo_home_network_1234567890_aa_bb_cc_dd_ee_ff',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
|
||||
@@ -17,12 +17,13 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import configure_integration
|
||||
from .const import CONNECTED_STATIONS, NO_CONNECTED_STATIONS
|
||||
from .const import CONNECTED_STATIONS, DISCOVERY_INFO, NO_CONNECTED_STATIONS
|
||||
from .mock import MockDevice
|
||||
|
||||
from tests.common import async_fire_time_changed
|
||||
|
||||
STATION = CONNECTED_STATIONS[0]
|
||||
SERIAL = DISCOVERY_INFO.properties["SN"]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
@@ -34,7 +35,9 @@ async def test_device_tracker(
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test device tracker states."""
|
||||
state_key = f"{PLATFORM}.{STATION.mac_address.lower().replace(':', '_')}"
|
||||
state_key = (
|
||||
f"{PLATFORM}.{DOMAIN}_{SERIAL}_{STATION.mac_address.lower().replace(':', '_')}"
|
||||
)
|
||||
entry = configure_integration(hass)
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
@@ -74,12 +77,14 @@ async def test_restoring_clients(
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test restoring existing device_tracker entities."""
|
||||
state_key = f"{PLATFORM}.{STATION.mac_address.lower().replace(':', '_')}"
|
||||
state_key = (
|
||||
f"{PLATFORM}.{DOMAIN}_{SERIAL}_{STATION.mac_address.lower().replace(':', '_')}"
|
||||
)
|
||||
entry = configure_integration(hass)
|
||||
entity_registry.async_get_or_create(
|
||||
PLATFORM,
|
||||
DOMAIN,
|
||||
f"{STATION.mac_address}",
|
||||
f"{SERIAL}_{STATION.mac_address}",
|
||||
config_entry=entry,
|
||||
)
|
||||
|
||||
|
||||
@@ -150,10 +150,10 @@ async def test_sensor(
|
||||
duration = hass.states.get("sensor.test_duration")
|
||||
assert duration.attributes.get("unit_of_measurement") == UnitOfTime.MINUTES
|
||||
assert duration.attributes.get(ATTR_ICON) == icon
|
||||
assert duration.state == "26.1833333333333"
|
||||
assert duration.state == "26"
|
||||
|
||||
assert float(hass.states.get("sensor.test_distance").state) == pytest.approx(13.682)
|
||||
assert hass.states.get("sensor.test_duration_in_traffic").state == "29.6"
|
||||
assert hass.states.get("sensor.test_duration_in_traffic").state == "30"
|
||||
assert hass.states.get("sensor.test_origin").state == "22nd St NW"
|
||||
assert (
|
||||
hass.states.get("sensor.test_origin").attributes.get(ATTR_LATITUDE)
|
||||
@@ -501,13 +501,13 @@ async def test_restore_state(hass: HomeAssistant) -> None:
|
||||
"1234",
|
||||
attributes={
|
||||
ATTR_LAST_RESET: last_reset,
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfTime.SECONDS,
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfTime.MINUTES,
|
||||
ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT,
|
||||
},
|
||||
),
|
||||
{
|
||||
"native_value": 1234,
|
||||
"native_unit_of_measurement": UnitOfTime.SECONDS,
|
||||
"native_unit_of_measurement": UnitOfTime.MINUTES,
|
||||
"icon": "mdi:car",
|
||||
"last_reset": last_reset,
|
||||
},
|
||||
@@ -518,13 +518,13 @@ async def test_restore_state(hass: HomeAssistant) -> None:
|
||||
"5678",
|
||||
attributes={
|
||||
ATTR_LAST_RESET: last_reset,
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfTime.SECONDS,
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfTime.MINUTES,
|
||||
ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT,
|
||||
},
|
||||
),
|
||||
{
|
||||
"native_value": 5678,
|
||||
"native_unit_of_measurement": UnitOfTime.SECONDS,
|
||||
"native_unit_of_measurement": UnitOfTime.MINUTES,
|
||||
"icon": "mdi:car",
|
||||
"last_reset": last_reset,
|
||||
},
|
||||
@@ -596,12 +596,12 @@ async def test_restore_state(hass: HomeAssistant) -> None:
|
||||
|
||||
# restore from cache
|
||||
state = hass.states.get("sensor.test_duration")
|
||||
assert state.state == "20.5666666666667"
|
||||
assert state.state == "1234"
|
||||
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTime.MINUTES
|
||||
assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT
|
||||
|
||||
state = hass.states.get("sensor.test_duration_in_traffic")
|
||||
assert state.state == "94.6333333333333"
|
||||
assert state.state == "5678"
|
||||
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTime.MINUTES
|
||||
assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT
|
||||
|
||||
@@ -799,12 +799,10 @@ async def test_multiple_sections(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
duration = hass.states.get("sensor.test_duration")
|
||||
assert duration.state == "18.4833333333333"
|
||||
assert duration.state == "18"
|
||||
|
||||
assert float(hass.states.get("sensor.test_distance").state) == pytest.approx(3.583)
|
||||
assert (
|
||||
hass.states.get("sensor.test_duration_in_traffic").state == "18.4833333333333"
|
||||
)
|
||||
assert hass.states.get("sensor.test_duration_in_traffic").state == "18"
|
||||
assert hass.states.get("sensor.test_origin").state == "Chemin de Halage"
|
||||
assert (
|
||||
hass.states.get("sensor.test_origin").attributes.get(ATTR_LATITUDE)
|
||||
|
||||
@@ -243,14 +243,7 @@
|
||||
"1/29/1": [3, 29, 47, 144, 145, 156],
|
||||
"1/29/2": [],
|
||||
"1/29/3": [],
|
||||
"1/29/4": [
|
||||
{
|
||||
"0": null,
|
||||
"1": 15,
|
||||
"2": 2,
|
||||
"3": "Solar"
|
||||
}
|
||||
],
|
||||
"1/29/4": [],
|
||||
"1/29/65532": 0,
|
||||
"1/29/65533": 2,
|
||||
"1/29/65528": [],
|
||||
|
||||
@@ -152,9 +152,7 @@ def test_support_properties(hass: HomeAssistant, property_suffix: str) -> None:
|
||||
entity4 = MediaPlayerEntity()
|
||||
entity4.hass = hass
|
||||
entity4.platform = MockEntityPlatform(hass)
|
||||
entity4._attr_supported_features = media_player.MediaPlayerEntityFeature(
|
||||
all_features - feature
|
||||
)
|
||||
entity4._attr_supported_features = all_features - feature
|
||||
|
||||
assert getattr(entity1, f"support_{property_suffix}") is False
|
||||
assert getattr(entity2, f"support_{property_suffix}") is True
|
||||
@@ -654,3 +652,27 @@ async def test_get_async_get_browse_image_quoting(
|
||||
url = player.get_browse_image_url("album", media_content_id)
|
||||
await client.get(url)
|
||||
mock_browse_image.assert_called_with("album", media_content_id, None)
|
||||
|
||||
|
||||
def test_deprecated_supported_features_ints(
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test deprecated supported features ints."""
|
||||
|
||||
class MockMediaPlayerEntity(MediaPlayerEntity):
|
||||
@property
|
||||
def supported_features(self) -> int:
|
||||
"""Return supported features."""
|
||||
return 1
|
||||
|
||||
entity = MockMediaPlayerEntity()
|
||||
entity.hass = hass
|
||||
entity.platform = MockEntityPlatform(hass)
|
||||
assert entity.supported_features_compat is MediaPlayerEntityFeature(1)
|
||||
assert "MockMediaPlayerEntity" in caplog.text
|
||||
assert "is using deprecated supported features values" in caplog.text
|
||||
assert "Instead it should use" in caplog.text
|
||||
assert "MediaPlayerEntityFeature.PAUSE" in caplog.text
|
||||
caplog.clear()
|
||||
assert entity.supported_features_compat is MediaPlayerEntityFeature(1)
|
||||
assert "is using deprecated supported features values" not in caplog.text
|
||||
|
||||
@@ -62,100 +62,6 @@ def mock_setup_entry() -> Generator[AsyncMock]:
|
||||
yield mock_setup_entry
|
||||
|
||||
|
||||
def _init_host_mock(host_mock: MagicMock) -> None:
|
||||
host_mock.get_host_data = AsyncMock(return_value=None)
|
||||
host_mock.get_states = AsyncMock(return_value=None)
|
||||
host_mock.check_new_firmware = AsyncMock(return_value=False)
|
||||
host_mock.unsubscribe = AsyncMock(return_value=True)
|
||||
host_mock.logout = AsyncMock(return_value=True)
|
||||
host_mock.reboot = AsyncMock()
|
||||
host_mock.set_ptz_command = AsyncMock()
|
||||
host_mock.is_nvr = True
|
||||
host_mock.is_hub = False
|
||||
host_mock.mac_address = TEST_MAC
|
||||
host_mock.uid = TEST_UID
|
||||
host_mock.onvif_enabled = True
|
||||
host_mock.rtmp_enabled = True
|
||||
host_mock.rtsp_enabled = True
|
||||
host_mock.nvr_name = TEST_NVR_NAME
|
||||
host_mock.port = TEST_PORT
|
||||
host_mock.use_https = TEST_USE_HTTPS
|
||||
host_mock.is_admin = True
|
||||
host_mock.user_level = "admin"
|
||||
host_mock.protocol = "rtsp"
|
||||
host_mock.channels = [0]
|
||||
host_mock.stream_channels = [0]
|
||||
host_mock.new_devices = False
|
||||
host_mock.sw_version_update_required = False
|
||||
host_mock.hardware_version = "IPC_00000"
|
||||
host_mock.sw_version = "v1.0.0.0.0.0000"
|
||||
host_mock.sw_upload_progress.return_value = 100
|
||||
host_mock.manufacturer = "Reolink"
|
||||
host_mock.model = TEST_HOST_MODEL
|
||||
host_mock.supported.return_value = True
|
||||
host_mock.item_number.return_value = TEST_ITEM_NUMBER
|
||||
host_mock.camera_model.return_value = TEST_CAM_MODEL
|
||||
host_mock.camera_name.return_value = TEST_NVR_NAME
|
||||
host_mock.camera_hardware_version.return_value = "IPC_00001"
|
||||
host_mock.camera_sw_version.return_value = "v1.1.0.0.0.0000"
|
||||
host_mock.camera_sw_version_update_required.return_value = False
|
||||
host_mock.camera_uid.return_value = TEST_UID_CAM
|
||||
host_mock.camera_online.return_value = True
|
||||
host_mock.channel_for_uid.return_value = 0
|
||||
host_mock.get_encoding.return_value = "h264"
|
||||
host_mock.firmware_update_available.return_value = False
|
||||
host_mock.session_active = True
|
||||
host_mock.timeout = 60
|
||||
host_mock.renewtimer.return_value = 600
|
||||
host_mock.wifi_connection = False
|
||||
host_mock.wifi_signal = None
|
||||
host_mock.whiteled_mode_list.return_value = []
|
||||
host_mock.zoom_range.return_value = {
|
||||
"zoom": {"pos": {"min": 0, "max": 100}},
|
||||
"focus": {"pos": {"min": 0, "max": 100}},
|
||||
}
|
||||
host_mock.capabilities = {"Host": ["RTSP"], "0": ["motion_detection"]}
|
||||
host_mock.checked_api_versions = {"GetEvents": 1}
|
||||
host_mock.abilities = {"abilityChn": [{"aiTrack": {"permit": 0, "ver": 0}}]}
|
||||
host_mock.get_raw_host_data.return_value = (
|
||||
"{'host':'TEST_RESPONSE','channel':'TEST_RESPONSE'}"
|
||||
)
|
||||
|
||||
# enums
|
||||
host_mock.whiteled_mode.return_value = 1
|
||||
host_mock.whiteled_mode_list.return_value = ["off", "auto"]
|
||||
host_mock.doorbell_led.return_value = "Off"
|
||||
host_mock.doorbell_led_list.return_value = ["stayoff", "auto"]
|
||||
host_mock.auto_track_method.return_value = 3
|
||||
host_mock.daynight_state.return_value = "Black&White"
|
||||
host_mock.hub_alarm_tone_id.return_value = 1
|
||||
host_mock.hub_visitor_tone_id.return_value = 1
|
||||
host_mock.recording_packing_time_list = ["30 Minutes", "60 Minutes"]
|
||||
host_mock.recording_packing_time = "60 Minutes"
|
||||
|
||||
# Baichuan
|
||||
host_mock.baichuan_only = False
|
||||
# Disable tcp push by default for tests
|
||||
host_mock.baichuan.port = TEST_BC_PORT
|
||||
host_mock.baichuan.events_active = False
|
||||
host_mock.baichuan.unsubscribe_events = AsyncMock()
|
||||
host_mock.baichuan.check_subscribe_events = AsyncMock()
|
||||
host_mock.baichuan.mac_address.return_value = TEST_MAC_CAM
|
||||
host_mock.baichuan.privacy_mode.return_value = False
|
||||
host_mock.baichuan.day_night_state.return_value = "day"
|
||||
host_mock.baichuan.subscribe_events.side_effect = ReolinkError("Test error")
|
||||
host_mock.baichuan.active_scene = "off"
|
||||
host_mock.baichuan.scene_names = ["off", "home"]
|
||||
host_mock.baichuan.abilities = {
|
||||
0: {"chnID": 0, "aitype": 34615},
|
||||
"Host": {"pushAlarm": 7},
|
||||
}
|
||||
host_mock.baichuan.smart_location_list.return_value = [0]
|
||||
host_mock.baichuan.smart_ai_type_list.return_value = ["people"]
|
||||
host_mock.baichuan.smart_ai_index.return_value = 1
|
||||
host_mock.baichuan.smart_ai_name.return_value = "zone1"
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def reolink_connect_class() -> Generator[MagicMock]:
|
||||
"""Mock reolink connection and return both the host_mock and host_mock_class."""
|
||||
@@ -165,8 +71,97 @@ def reolink_connect_class() -> Generator[MagicMock]:
|
||||
) as host_mock_class,
|
||||
):
|
||||
host_mock = host_mock_class.return_value
|
||||
host_mock.get_host_data.return_value = None
|
||||
host_mock.get_states.return_value = None
|
||||
host_mock.supported.return_value = True
|
||||
host_mock.check_new_firmware.return_value = False
|
||||
host_mock.unsubscribe.return_value = True
|
||||
host_mock.logout.return_value = True
|
||||
host_mock.is_nvr = True
|
||||
host_mock.is_hub = False
|
||||
host_mock.mac_address = TEST_MAC
|
||||
host_mock.uid = TEST_UID
|
||||
host_mock.onvif_enabled = True
|
||||
host_mock.rtmp_enabled = True
|
||||
host_mock.rtsp_enabled = True
|
||||
host_mock.nvr_name = TEST_NVR_NAME
|
||||
host_mock.port = TEST_PORT
|
||||
host_mock.use_https = TEST_USE_HTTPS
|
||||
host_mock.is_admin = True
|
||||
host_mock.user_level = "admin"
|
||||
host_mock.protocol = "rtsp"
|
||||
host_mock.channels = [0]
|
||||
host_mock.stream_channels = [0]
|
||||
host_mock.new_devices = False
|
||||
host_mock.sw_version_update_required = False
|
||||
host_mock.hardware_version = "IPC_00000"
|
||||
host_mock.sw_version = "v1.0.0.0.0.0000"
|
||||
host_mock.sw_upload_progress.return_value = 100
|
||||
host_mock.manufacturer = "Reolink"
|
||||
host_mock.model = TEST_HOST_MODEL
|
||||
host_mock.item_number.return_value = TEST_ITEM_NUMBER
|
||||
host_mock.camera_model.return_value = TEST_CAM_MODEL
|
||||
host_mock.camera_name.return_value = TEST_NVR_NAME
|
||||
host_mock.camera_hardware_version.return_value = "IPC_00001"
|
||||
host_mock.camera_sw_version.return_value = "v1.1.0.0.0.0000"
|
||||
host_mock.camera_sw_version_update_required.return_value = False
|
||||
host_mock.camera_uid.return_value = TEST_UID_CAM
|
||||
host_mock.camera_online.return_value = True
|
||||
host_mock.channel_for_uid.return_value = 0
|
||||
host_mock.get_encoding.return_value = "h264"
|
||||
host_mock.firmware_update_available.return_value = False
|
||||
host_mock.session_active = True
|
||||
host_mock.timeout = 60
|
||||
host_mock.renewtimer.return_value = 600
|
||||
host_mock.wifi_connection = False
|
||||
host_mock.wifi_signal = None
|
||||
host_mock.whiteled_mode_list.return_value = []
|
||||
host_mock.zoom_range.return_value = {
|
||||
"zoom": {"pos": {"min": 0, "max": 100}},
|
||||
"focus": {"pos": {"min": 0, "max": 100}},
|
||||
}
|
||||
host_mock.capabilities = {"Host": ["RTSP"], "0": ["motion_detection"]}
|
||||
host_mock.checked_api_versions = {"GetEvents": 1}
|
||||
host_mock.abilities = {"abilityChn": [{"aiTrack": {"permit": 0, "ver": 0}}]}
|
||||
host_mock.get_raw_host_data.return_value = (
|
||||
"{'host':'TEST_RESPONSE','channel':'TEST_RESPONSE'}"
|
||||
)
|
||||
|
||||
reolink_connect.chime_list = []
|
||||
|
||||
# enums
|
||||
host_mock.whiteled_mode.return_value = 1
|
||||
host_mock.whiteled_mode_list.return_value = ["off", "auto"]
|
||||
host_mock.doorbell_led.return_value = "Off"
|
||||
host_mock.doorbell_led_list.return_value = ["stayoff", "auto"]
|
||||
host_mock.auto_track_method.return_value = 3
|
||||
host_mock.daynight_state.return_value = "Black&White"
|
||||
host_mock.hub_alarm_tone_id.return_value = 1
|
||||
host_mock.hub_visitor_tone_id.return_value = 1
|
||||
host_mock.recording_packing_time_list = ["30 Minutes", "60 Minutes"]
|
||||
host_mock.recording_packing_time = "60 Minutes"
|
||||
|
||||
# Baichuan
|
||||
host_mock.baichuan = create_autospec(Baichuan)
|
||||
_init_host_mock(host_mock)
|
||||
host_mock.baichuan_only = False
|
||||
# Disable tcp push by default for tests
|
||||
host_mock.baichuan.port = TEST_BC_PORT
|
||||
host_mock.baichuan.events_active = False
|
||||
host_mock.baichuan.mac_address.return_value = TEST_MAC_CAM
|
||||
host_mock.baichuan.privacy_mode.return_value = False
|
||||
host_mock.baichuan.day_night_state.return_value = "day"
|
||||
host_mock.baichuan.subscribe_events.side_effect = ReolinkError("Test error")
|
||||
host_mock.baichuan.active_scene = "off"
|
||||
host_mock.baichuan.scene_names = ["off", "home"]
|
||||
host_mock.baichuan.abilities = {
|
||||
0: {"chnID": 0, "aitype": 34615},
|
||||
"Host": {"pushAlarm": 7},
|
||||
}
|
||||
host_mock.baichuan.smart_location_list.return_value = [0]
|
||||
host_mock.baichuan.smart_ai_type_list.return_value = ["people"]
|
||||
host_mock.baichuan.smart_ai_index.return_value = 1
|
||||
host_mock.baichuan.smart_ai_name.return_value = "zone1"
|
||||
|
||||
yield host_mock_class
|
||||
|
||||
|
||||
@@ -178,18 +173,6 @@ def reolink_connect(
|
||||
return reolink_connect_class.return_value
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def reolink_host() -> Generator[MagicMock]:
|
||||
"""Mock reolink Host class."""
|
||||
with patch(
|
||||
"homeassistant.components.reolink.host.Host", autospec=False
|
||||
) as host_mock_class:
|
||||
host_mock = host_mock_class.return_value
|
||||
host_mock.baichuan = MagicMock()
|
||||
_init_host_mock(host_mock)
|
||||
yield host_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def reolink_platforms() -> Generator[None]:
|
||||
"""Mock reolink entry setup."""
|
||||
|
||||
@@ -21,7 +21,7 @@ from tests.common import MockConfigEntry
|
||||
async def test_button(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
reolink_host: MagicMock,
|
||||
reolink_connect: MagicMock,
|
||||
) -> None:
|
||||
"""Test button entity with ptz up."""
|
||||
with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BUTTON]):
|
||||
@@ -37,9 +37,9 @@ async def test_button(
|
||||
{ATTR_ENTITY_ID: entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
reolink_host.set_ptz_command.assert_called_once()
|
||||
reolink_connect.set_ptz_command.assert_called_once()
|
||||
|
||||
reolink_host.set_ptz_command.side_effect = ReolinkError("Test error")
|
||||
reolink_connect.set_ptz_command.side_effect = ReolinkError("Test error")
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
BUTTON_DOMAIN,
|
||||
@@ -48,11 +48,13 @@ async def test_button(
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
reolink_connect.set_ptz_command.reset_mock(side_effect=True)
|
||||
|
||||
|
||||
async def test_ptz_move_service(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
reolink_host: MagicMock,
|
||||
reolink_connect: MagicMock,
|
||||
) -> None:
|
||||
"""Test ptz_move entity service using PTZ button entity."""
|
||||
with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BUTTON]):
|
||||
@@ -68,9 +70,9 @@ async def test_ptz_move_service(
|
||||
{ATTR_ENTITY_ID: entity_id, ATTR_SPEED: 5},
|
||||
blocking=True,
|
||||
)
|
||||
reolink_host.set_ptz_command.assert_called_with(0, command="Up", speed=5)
|
||||
reolink_connect.set_ptz_command.assert_called_with(0, command="Up", speed=5)
|
||||
|
||||
reolink_host.set_ptz_command.side_effect = ReolinkError("Test error")
|
||||
reolink_connect.set_ptz_command.side_effect = ReolinkError("Test error")
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
@@ -79,12 +81,14 @@ async def test_ptz_move_service(
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
reolink_connect.set_ptz_command.reset_mock(side_effect=True)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_host_button(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
reolink_host: MagicMock,
|
||||
reolink_connect: MagicMock,
|
||||
) -> None:
|
||||
"""Test host button entity with reboot."""
|
||||
with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BUTTON]):
|
||||
@@ -100,9 +104,9 @@ async def test_host_button(
|
||||
{ATTR_ENTITY_ID: entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
reolink_host.reboot.assert_called_once()
|
||||
reolink_connect.reboot.assert_called_once()
|
||||
|
||||
reolink_host.reboot.side_effect = ReolinkError("Test error")
|
||||
reolink_connect.reboot.side_effect = ReolinkError("Test error")
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
BUTTON_DOMAIN,
|
||||
@@ -110,3 +114,5 @@ async def test_host_button(
|
||||
{ATTR_ENTITY_ID: entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
reolink_connect.reboot.reset_mock(side_effect=True)
|
||||
|
||||
@@ -118,7 +118,6 @@ async def test_webhook_callback(
|
||||
reolink_connect.motion_detected.return_value = True
|
||||
reolink_connect.ONVIF_event_callback.return_value = None
|
||||
await client.post(f"/api/webhook/{webhook_id}")
|
||||
await hass.async_block_till_done()
|
||||
signal_all.assert_called_once()
|
||||
assert hass.states.get(entity_id).state == STATE_ON
|
||||
|
||||
@@ -130,7 +129,6 @@ async def test_webhook_callback(
|
||||
signal_all.reset_mock()
|
||||
reolink_connect.get_motion_state_all_ch.return_value = False
|
||||
await client.post(f"/api/webhook/{webhook_id}")
|
||||
await hass.async_block_till_done()
|
||||
signal_all.assert_not_called()
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_ON
|
||||
@@ -139,7 +137,6 @@ async def test_webhook_callback(
|
||||
reolink_connect.motion_detected.return_value = False
|
||||
reolink_connect.ONVIF_event_callback.return_value = [0]
|
||||
await client.post(f"/api/webhook/{webhook_id}", data="test_data")
|
||||
await hass.async_block_till_done()
|
||||
signal_ch.assert_called_once()
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
|
||||
@@ -147,7 +144,6 @@ async def test_webhook_callback(
|
||||
signal_ch.reset_mock()
|
||||
reolink_connect.ONVIF_event_callback.side_effect = Exception("Test error")
|
||||
await client.post(f"/api/webhook/{webhook_id}", data="test_data")
|
||||
await hass.async_block_till_done()
|
||||
signal_ch.assert_not_called()
|
||||
|
||||
# test failure to read date from webhook post
|
||||
|
||||
@@ -53,6 +53,6 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '1.0',
|
||||
'state': '1',
|
||||
})
|
||||
# ---
|
||||
|
||||
@@ -93,11 +93,11 @@ async def test_number_state_update(
|
||||
|
||||
entity_id = entity_info["entity_id"]
|
||||
|
||||
assert hass.states.get(entity_id).state == "1.0"
|
||||
assert hass.states.get(entity_id).state == "1"
|
||||
|
||||
mock_number_property.get.return_value = 100
|
||||
|
||||
await update_property_listeners(mock_number_property)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get(entity_id).state == "100.0"
|
||||
assert hass.states.get(entity_id).state == "100"
|
||||
|
||||
@@ -31,6 +31,7 @@ from .common import async_start
|
||||
from tests.common import (
|
||||
MockConfigEntry,
|
||||
MockEntity,
|
||||
MockEntityPlatform,
|
||||
MockModule,
|
||||
help_test_all,
|
||||
import_and_test_deprecated_constant_enum,
|
||||
@@ -262,6 +263,44 @@ async def test_send_command(hass: HomeAssistant, config_flow_fixture: None) -> N
|
||||
assert "test" in strings
|
||||
|
||||
|
||||
async def test_supported_features_compat(hass: HomeAssistant) -> None:
|
||||
"""Test StateVacuumEntity using deprecated feature constants features."""
|
||||
|
||||
features = (
|
||||
VacuumEntityFeature.BATTERY
|
||||
| VacuumEntityFeature.FAN_SPEED
|
||||
| VacuumEntityFeature.START
|
||||
| VacuumEntityFeature.STOP
|
||||
| VacuumEntityFeature.PAUSE
|
||||
)
|
||||
|
||||
class _LegacyConstantsStateVacuum(StateVacuumEntity):
|
||||
_attr_supported_features = int(features)
|
||||
_attr_fan_speed_list = ["silent", "normal", "pet hair"]
|
||||
|
||||
entity = _LegacyConstantsStateVacuum()
|
||||
entity.hass = hass
|
||||
entity.platform = MockEntityPlatform(hass)
|
||||
assert isinstance(entity.supported_features, int)
|
||||
assert entity.supported_features == int(features)
|
||||
assert entity.supported_features_compat is (
|
||||
VacuumEntityFeature.BATTERY
|
||||
| VacuumEntityFeature.FAN_SPEED
|
||||
| VacuumEntityFeature.START
|
||||
| VacuumEntityFeature.STOP
|
||||
| VacuumEntityFeature.PAUSE
|
||||
)
|
||||
assert entity.state_attributes == {
|
||||
"battery_level": None,
|
||||
"battery_icon": "mdi:battery-unknown",
|
||||
"fan_speed": None,
|
||||
}
|
||||
assert entity.capability_attributes == {
|
||||
"fan_speed_list": ["silent", "normal", "pet hair"]
|
||||
}
|
||||
assert entity._deprecated_supported_features_reported
|
||||
|
||||
|
||||
async def test_vacuum_not_log_deprecated_state_warning(
|
||||
hass: HomeAssistant,
|
||||
mock_vacuum_entity: MockVacuum,
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
import asyncio
|
||||
from copy import deepcopy
|
||||
import io
|
||||
import logging
|
||||
from typing import Any
|
||||
from unittest.mock import ANY, AsyncMock, Mock, patch
|
||||
@@ -18,9 +17,6 @@ from homeassistant.components.websocket_api.auth import (
|
||||
TYPE_AUTH_OK,
|
||||
TYPE_AUTH_REQUIRED,
|
||||
)
|
||||
from homeassistant.components.websocket_api.commands import (
|
||||
ALL_TRIGGER_DESCRIPTIONS_JSON_CACHE,
|
||||
)
|
||||
from homeassistant.components.websocket_api.const import FEATURE_COALESCE_MESSAGES, URL
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import SIGNAL_BOOTSTRAP_INTEGRATIONS
|
||||
@@ -29,10 +25,9 @@ from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.event import async_track_state_change_event
|
||||
from homeassistant.loader import Integration, async_get_integration
|
||||
from homeassistant.loader import async_get_integration
|
||||
from homeassistant.setup import async_set_domains_to_be_loaded, async_setup_component
|
||||
from homeassistant.util.json import json_loads
|
||||
from homeassistant.util.yaml.loader import parse_yaml
|
||||
|
||||
from tests.common import (
|
||||
MockConfigEntry,
|
||||
@@ -682,91 +677,6 @@ async def test_get_services(
|
||||
assert msg["result"].keys() == hass.services.async_services().keys()
|
||||
|
||||
|
||||
@patch("annotatedyaml.loader.load_yaml")
|
||||
@patch.object(Integration, "has_triggers", return_value=True)
|
||||
async def test_subscribe_triggers(
|
||||
mock_has_triggers: Mock,
|
||||
mock_load_yaml: Mock,
|
||||
hass: HomeAssistant,
|
||||
websocket_client: MockHAClientWebSocket,
|
||||
) -> None:
|
||||
"""Test get_triggers command."""
|
||||
sun_service_descriptions = """
|
||||
sun: {}
|
||||
"""
|
||||
tag_service_descriptions = """
|
||||
tag: {}
|
||||
"""
|
||||
|
||||
def _load_yaml(fname, secrets=None):
|
||||
if fname.endswith("sun/triggers.yaml"):
|
||||
service_descriptions = sun_service_descriptions
|
||||
elif fname.endswith("tag/triggers.yaml"):
|
||||
service_descriptions = tag_service_descriptions
|
||||
else:
|
||||
raise FileNotFoundError
|
||||
with io.StringIO(service_descriptions) as file:
|
||||
return parse_yaml(file)
|
||||
|
||||
mock_load_yaml.side_effect = _load_yaml
|
||||
|
||||
assert await async_setup_component(hass, "sun", {})
|
||||
assert await async_setup_component(hass, "system_health", {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert ALL_TRIGGER_DESCRIPTIONS_JSON_CACHE not in hass.data
|
||||
|
||||
await websocket_client.send_json_auto_id({"type": "trigger_platforms/subscribe"})
|
||||
|
||||
# Test start subscription with initial event
|
||||
msg = await websocket_client.receive_json()
|
||||
assert msg == {"id": 1, "result": None, "success": True, "type": "result"}
|
||||
msg = await websocket_client.receive_json()
|
||||
assert msg == {"event": {"sun": {"fields": {}}}, "id": 1, "type": "event"}
|
||||
|
||||
old_cache = hass.data[ALL_TRIGGER_DESCRIPTIONS_JSON_CACHE]
|
||||
|
||||
# Test we receive an event when a new platform is loaded, if it has descriptions
|
||||
assert await async_setup_component(hass, "calendar", {})
|
||||
assert await async_setup_component(hass, "tag", {})
|
||||
await hass.async_block_till_done()
|
||||
msg = await websocket_client.receive_json()
|
||||
assert msg == {
|
||||
"event": {"tag": {"fields": {}}},
|
||||
"id": 1,
|
||||
"type": "event",
|
||||
}
|
||||
|
||||
# Initiate a second subscription to check the cache is updated because of the new
|
||||
# trigger
|
||||
await websocket_client.send_json_auto_id({"type": "trigger_platforms/subscribe"})
|
||||
msg = await websocket_client.receive_json()
|
||||
assert msg == {"id": 2, "result": None, "success": True, "type": "result"}
|
||||
msg = await websocket_client.receive_json()
|
||||
assert msg == {
|
||||
"event": {"sun": {"fields": {}}, "tag": {"fields": {}}},
|
||||
"id": 2,
|
||||
"type": "event",
|
||||
}
|
||||
|
||||
assert hass.data[ALL_TRIGGER_DESCRIPTIONS_JSON_CACHE] is not old_cache
|
||||
|
||||
# Initiate a third subscription to check the cache is not updated because no new
|
||||
# trigger was added
|
||||
old_cache = hass.data[ALL_TRIGGER_DESCRIPTIONS_JSON_CACHE]
|
||||
await websocket_client.send_json_auto_id({"type": "trigger_platforms/subscribe"})
|
||||
msg = await websocket_client.receive_json()
|
||||
assert msg == {"id": 3, "result": None, "success": True, "type": "result"}
|
||||
msg = await websocket_client.receive_json()
|
||||
assert msg == {
|
||||
"event": {"sun": {"fields": {}}, "tag": {"fields": {}}},
|
||||
"id": 3,
|
||||
"type": "event",
|
||||
}
|
||||
|
||||
assert hass.data[ALL_TRIGGER_DESCRIPTIONS_JSON_CACHE] is old_cache
|
||||
|
||||
|
||||
async def test_get_config(
|
||||
hass: HomeAssistant, websocket_client: MockHAClientWebSocket
|
||||
) -> None:
|
||||
|
||||
@@ -1365,110 +1365,3 @@ async def test_announce(
|
||||
# Stop the satellite
|
||||
await hass.config_entries.async_unload(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
async def test_tts_timeout(
|
||||
hass: HomeAssistant, entity_registry: er.EntityRegistry
|
||||
) -> None:
|
||||
"""Test entity state goes back to IDLE on a timeout."""
|
||||
events = [
|
||||
Info(satellite=SATELLITE_INFO.satellite).event(),
|
||||
RunPipeline(start_stage=PipelineStage.TTS, end_stage=PipelineStage.TTS).event(),
|
||||
]
|
||||
|
||||
pipeline_kwargs: dict[str, Any] = {}
|
||||
pipeline_event_callback: Callable[[assist_pipeline.PipelineEvent], None] | None = (
|
||||
None
|
||||
)
|
||||
run_pipeline_called = asyncio.Event()
|
||||
|
||||
async def async_pipeline_from_audio_stream(
|
||||
hass: HomeAssistant,
|
||||
context,
|
||||
event_callback,
|
||||
stt_metadata,
|
||||
stt_stream,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
nonlocal pipeline_kwargs, pipeline_event_callback
|
||||
pipeline_kwargs = kwargs
|
||||
pipeline_event_callback = event_callback
|
||||
|
||||
run_pipeline_called.set()
|
||||
|
||||
response_finished = asyncio.Event()
|
||||
|
||||
def tts_response_finished(self):
|
||||
response_finished.set()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.wyoming.data.load_wyoming_info",
|
||||
return_value=SATELLITE_INFO,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.wyoming.assist_satellite.AsyncTcpClient",
|
||||
SatelliteAsyncTcpClient(events),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream",
|
||||
async_pipeline_from_audio_stream,
|
||||
),
|
||||
patch("homeassistant.components.wyoming.assist_satellite._PING_SEND_DELAY", 0),
|
||||
patch(
|
||||
"homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.tts_response_finished",
|
||||
tts_response_finished,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.wyoming.assist_satellite._TTS_TIMEOUT_EXTRA",
|
||||
0,
|
||||
),
|
||||
):
|
||||
entry = await setup_config_entry(hass)
|
||||
device: SatelliteDevice = hass.data[wyoming.DOMAIN][entry.entry_id].device
|
||||
assert device is not None
|
||||
|
||||
satellite_entry = next(
|
||||
(
|
||||
maybe_entry
|
||||
for maybe_entry in er.async_entries_for_device(
|
||||
entity_registry, device.device_id
|
||||
)
|
||||
if maybe_entry.domain == assist_satellite.DOMAIN
|
||||
),
|
||||
None,
|
||||
)
|
||||
assert satellite_entry is not None
|
||||
|
||||
async with asyncio.timeout(1):
|
||||
await run_pipeline_called.wait()
|
||||
|
||||
# Reset so we can check the pipeline is automatically restarted below
|
||||
run_pipeline_called.clear()
|
||||
|
||||
assert pipeline_event_callback is not None
|
||||
assert pipeline_kwargs.get("device_id") == device.device_id
|
||||
|
||||
pipeline_event_callback(
|
||||
assist_pipeline.PipelineEvent(
|
||||
assist_pipeline.PipelineEventType.TTS_START,
|
||||
{
|
||||
"tts_input": "test text to speak",
|
||||
"voice": "test voice",
|
||||
},
|
||||
)
|
||||
)
|
||||
mock_tts_result_stream = MockResultStream(hass, "wav", get_test_wav())
|
||||
pipeline_event_callback(
|
||||
assist_pipeline.PipelineEvent(
|
||||
assist_pipeline.PipelineEventType.TTS_END,
|
||||
{"tts_output": {"token": mock_tts_result_stream.token}},
|
||||
)
|
||||
)
|
||||
async with asyncio.timeout(1):
|
||||
# tts_response_finished should be called on timeout
|
||||
await response_finished.wait()
|
||||
|
||||
# Stop the satellite
|
||||
await hass.config_entries.async_unload(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -97,8 +97,8 @@
|
||||
'value_id': '52-113-0-Home Security-Cover status',
|
||||
}),
|
||||
dict({
|
||||
'disabled': True,
|
||||
'disabled_by': 'integration',
|
||||
'disabled': False,
|
||||
'disabled_by': None,
|
||||
'domain': 'button',
|
||||
'entity_category': 'config',
|
||||
'entity_id': 'button.multisensor_6_idle_home_security_cover_status',
|
||||
@@ -120,8 +120,8 @@
|
||||
'value_id': '52-113-0-Home Security-Cover status',
|
||||
}),
|
||||
dict({
|
||||
'disabled': True,
|
||||
'disabled_by': 'integration',
|
||||
'disabled': False,
|
||||
'disabled_by': None,
|
||||
'domain': 'button',
|
||||
'entity_category': 'config',
|
||||
'entity_id': 'button.multisensor_6_idle_home_security_motion_sensor_status',
|
||||
|
||||
@@ -1,21 +1,13 @@
|
||||
"""Test the Z-Wave JS button entities."""
|
||||
|
||||
from datetime import timedelta
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from zwave_js_server.model.node import Node
|
||||
|
||||
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS
|
||||
from homeassistant.components.zwave_js.const import DOMAIN, SERVICE_REFRESH_VALUE
|
||||
from homeassistant.components.zwave_js.helpers import get_valueless_base_unique_id
|
||||
from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY
|
||||
from homeassistant.const import ATTR_ENTITY_ID, EntityCategory, Platform
|
||||
from homeassistant.const import ATTR_ENTITY_ID, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -79,32 +71,11 @@ async def test_ping_entity(
|
||||
|
||||
|
||||
async def test_notification_idle_button(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
client: MagicMock,
|
||||
multisensor_6: Node,
|
||||
integration: MockConfigEntry,
|
||||
hass: HomeAssistant, client, multisensor_6, integration
|
||||
) -> None:
|
||||
"""Test Notification idle button."""
|
||||
node = multisensor_6
|
||||
entity_id = "button.multisensor_6_idle_home_security_cover_status"
|
||||
entity_entry = entity_registry.async_get(entity_id)
|
||||
assert entity_entry
|
||||
assert entity_entry.entity_category is EntityCategory.CONFIG
|
||||
assert entity_entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
|
||||
assert hass.states.get(entity_id) is None # disabled by default
|
||||
|
||||
entity_registry.async_update_entity(
|
||||
entity_id,
|
||||
disabled_by=None,
|
||||
)
|
||||
async_fire_time_changed(
|
||||
hass,
|
||||
dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1),
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
state = hass.states.get("button.multisensor_6_idle_home_security_cover_status")
|
||||
assert state
|
||||
assert state.state == "unknown"
|
||||
assert (
|
||||
@@ -117,13 +88,13 @@ async def test_notification_idle_button(
|
||||
BUTTON_DOMAIN,
|
||||
SERVICE_PRESS,
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
ATTR_ENTITY_ID: "button.multisensor_6_idle_home_security_cover_status",
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
assert client.async_send_command_no_wait.call_count == 1
|
||||
args = client.async_send_command_no_wait.call_args[0][0]
|
||||
assert len(client.async_send_command_no_wait.call_args_list) == 1
|
||||
args = client.async_send_command_no_wait.call_args_list[0][0][0]
|
||||
assert args["command"] == "node.manually_idle_notification_value"
|
||||
assert args["nodeId"] == node.node_id
|
||||
assert args["valueId"] == {
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
"""Test entity discovery for device-specific schemas for the Z-Wave JS integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from zwave_js_server.event import Event
|
||||
from zwave_js_server.model.node import Node
|
||||
|
||||
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
|
||||
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS
|
||||
from homeassistant.components.light import ATTR_SUPPORTED_COLOR_MODES, ColorMode
|
||||
from homeassistant.components.number import (
|
||||
@@ -14,6 +12,7 @@ from homeassistant.components.number import (
|
||||
DOMAIN as NUMBER_DOMAIN,
|
||||
SERVICE_SET_VALUE,
|
||||
)
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.components.switch import (
|
||||
DOMAIN as SWITCH_DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
@@ -27,13 +26,12 @@ from homeassistant.components.zwave_js.discovery import (
|
||||
from homeassistant.components.zwave_js.discovery_data_template import (
|
||||
DynamicCurrentTempClimateDataTemplate,
|
||||
)
|
||||
from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY
|
||||
from homeassistant.components.zwave_js.helpers import get_device_id
|
||||
from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_UNKNOWN, EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def test_aeon_smart_switch_6_state(
|
||||
@@ -224,24 +222,17 @@ async def test_merten_507801_disabled_enitites(
|
||||
async def test_zooz_zen72(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
client: MagicMock,
|
||||
switch_zooz_zen72: Node,
|
||||
integration: MockConfigEntry,
|
||||
client,
|
||||
switch_zooz_zen72,
|
||||
integration,
|
||||
) -> None:
|
||||
"""Test that Zooz ZEN72 Indicators are discovered as number entities."""
|
||||
assert len(hass.states.async_entity_ids(NUMBER_DOMAIN)) == 1
|
||||
assert len(hass.states.async_entity_ids(BUTTON_DOMAIN)) == 2 # includes ping
|
||||
entity_id = "number.z_wave_plus_700_series_dimmer_switch_indicator_value"
|
||||
entity_entry = entity_registry.async_get(entity_id)
|
||||
assert entity_entry
|
||||
assert entity_entry.entity_category == EntityCategory.CONFIG
|
||||
assert entity_entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
|
||||
assert hass.states.get(entity_id) is None # disabled by default
|
||||
entity_registry.async_update_entity(entity_id, disabled_by=None)
|
||||
async_fire_time_changed(
|
||||
hass,
|
||||
dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1),
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
client.async_send_command.reset_mock()
|
||||
entry = entity_registry.async_get(entity_id)
|
||||
assert entry
|
||||
assert entry.entity_category == EntityCategory.CONFIG
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert state.state == STATE_UNKNOWN
|
||||
@@ -255,7 +246,7 @@ async def test_zooz_zen72(
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
assert client.async_send_command.call_count == 1
|
||||
assert len(client.async_send_command.call_args_list) == 1
|
||||
args = client.async_send_command.call_args[0][0]
|
||||
assert args["command"] == "node.set_value"
|
||||
assert args["nodeId"] == switch_zooz_zen72.node_id
|
||||
@@ -269,18 +260,16 @@ async def test_zooz_zen72(
|
||||
client.async_send_command.reset_mock()
|
||||
|
||||
entity_id = "button.z_wave_plus_700_series_dimmer_switch_identify"
|
||||
entity_entry = entity_registry.async_get(entity_id)
|
||||
assert entity_entry
|
||||
assert entity_entry.entity_category == EntityCategory.CONFIG
|
||||
assert entity_entry.disabled_by is None
|
||||
assert hass.states.get(entity_id) is not None
|
||||
entry = entity_registry.async_get(entity_id)
|
||||
assert entry
|
||||
assert entry.entity_category == EntityCategory.CONFIG
|
||||
await hass.services.async_call(
|
||||
BUTTON_DOMAIN,
|
||||
SERVICE_PRESS,
|
||||
{ATTR_ENTITY_ID: entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
assert client.async_send_command.call_count == 1
|
||||
assert len(client.async_send_command.call_args_list) == 1
|
||||
args = client.async_send_command.call_args[0][0]
|
||||
assert args["command"] == "node.set_value"
|
||||
assert args["nodeId"] == switch_zooz_zen72.node_id
|
||||
@@ -296,55 +285,53 @@ async def test_indicator_test(
|
||||
hass: HomeAssistant,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
client: MagicMock,
|
||||
indicator_test: Node,
|
||||
integration: MockConfigEntry,
|
||||
client,
|
||||
indicator_test,
|
||||
integration,
|
||||
) -> None:
|
||||
"""Test that Indicators are discovered properly.
|
||||
|
||||
This test covers indicators that we don't already have device fixtures for.
|
||||
"""
|
||||
binary_sensor_entity_id = "binary_sensor.this_is_a_fake_device_binary_sensor"
|
||||
sensor_entity_id = "sensor.this_is_a_fake_device_sensor"
|
||||
switch_entity_id = "switch.this_is_a_fake_device_switch"
|
||||
|
||||
for entity_id in (
|
||||
binary_sensor_entity_id,
|
||||
sensor_entity_id,
|
||||
):
|
||||
entity_entry = entity_registry.async_get(entity_id)
|
||||
assert entity_entry
|
||||
assert entity_entry.entity_category == EntityCategory.DIAGNOSTIC
|
||||
assert entity_entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
|
||||
assert hass.states.get(entity_id) is None # disabled by default
|
||||
entity_registry.async_update_entity(entity_id, disabled_by=None)
|
||||
|
||||
entity_id = switch_entity_id
|
||||
entity_entry = entity_registry.async_get(entity_id)
|
||||
assert entity_entry
|
||||
assert entity_entry.entity_category == EntityCategory.CONFIG
|
||||
assert entity_entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
|
||||
assert hass.states.get(entity_id) is None # disabled by default
|
||||
entity_registry.async_update_entity(entity_id, disabled_by=None)
|
||||
|
||||
async_fire_time_changed(
|
||||
hass,
|
||||
dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1),
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={get_device_id(client.driver, indicator_test)}
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
client.async_send_command.reset_mock()
|
||||
assert device
|
||||
entities = er.async_entries_for_device(entity_registry, device.id)
|
||||
|
||||
entity_id = binary_sensor_entity_id
|
||||
def len_domain(domain):
|
||||
return len([entity for entity in entities if entity.domain == domain])
|
||||
|
||||
assert len_domain(NUMBER_DOMAIN) == 0
|
||||
assert len_domain(BUTTON_DOMAIN) == 1 # only ping
|
||||
assert len_domain(BINARY_SENSOR_DOMAIN) == 1
|
||||
assert len_domain(SENSOR_DOMAIN) == 3 # include node status + last seen
|
||||
assert len_domain(SWITCH_DOMAIN) == 1
|
||||
|
||||
entity_id = "binary_sensor.this_is_a_fake_device_binary_sensor"
|
||||
entry = entity_registry.async_get(entity_id)
|
||||
assert entry
|
||||
assert entry.entity_category == EntityCategory.DIAGNOSTIC
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert state.state == STATE_OFF
|
||||
|
||||
entity_id = sensor_entity_id
|
||||
client.async_send_command.reset_mock()
|
||||
|
||||
entity_id = "sensor.this_is_a_fake_device_sensor"
|
||||
entry = entity_registry.async_get(entity_id)
|
||||
assert entry
|
||||
assert entry.entity_category == EntityCategory.DIAGNOSTIC
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert state.state == "0.0"
|
||||
|
||||
entity_id = switch_entity_id
|
||||
client.async_send_command.reset_mock()
|
||||
|
||||
entity_id = "switch.this_is_a_fake_device_switch"
|
||||
entry = entity_registry.async_get(entity_id)
|
||||
assert entry
|
||||
assert entry.entity_category == EntityCategory.CONFIG
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert state.state == STATE_OFF
|
||||
@@ -355,7 +342,7 @@ async def test_indicator_test(
|
||||
{ATTR_ENTITY_ID: entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
assert client.async_send_command.call_count == 1
|
||||
assert len(client.async_send_command.call_args_list) == 1
|
||||
args = client.async_send_command.call_args[0][0]
|
||||
assert args["command"] == "node.set_value"
|
||||
assert args["nodeId"] == indicator_test.node_id
|
||||
@@ -375,7 +362,7 @@ async def test_indicator_test(
|
||||
{ATTR_ENTITY_ID: entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
assert client.async_send_command.call_count == 1
|
||||
assert len(client.async_send_command.call_args_list) == 1
|
||||
args = client.async_send_command.call_args[0][0]
|
||||
assert args["command"] == "node.set_value"
|
||||
assert args["nodeId"] == indicator_test.node_id
|
||||
|
||||
@@ -1812,8 +1812,7 @@ async def test_disabled_node_status_entity_on_node_replaced(
|
||||
assert state.state == STATE_UNAVAILABLE
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_remove_entity_on_value_removed(
|
||||
async def test_disabled_entity_on_value_removed(
|
||||
hass: HomeAssistant,
|
||||
zp3111: Node,
|
||||
client: MagicMock,
|
||||
@@ -1824,6 +1823,15 @@ async def test_remove_entity_on_value_removed(
|
||||
"button.4_in_1_sensor_idle_home_security_cover_status"
|
||||
)
|
||||
|
||||
# must reload the integration when enabling an entity
|
||||
await hass.config_entries.async_unload(integration.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
assert integration.state is ConfigEntryState.NOT_LOADED
|
||||
integration.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(integration.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
assert integration.state is ConfigEntryState.LOADED
|
||||
|
||||
state = hass.states.get(idle_cover_status_button_entity)
|
||||
assert state
|
||||
assert state.state != STATE_UNAVAILABLE
|
||||
|
||||
@@ -1125,7 +1125,7 @@ async def test_selector_serializer(
|
||||
"media_content_type": {"type": "string"},
|
||||
"metadata": {"type": "object", "additionalProperties": True},
|
||||
},
|
||||
"required": ["entity_id", "media_content_id", "media_content_type"],
|
||||
"required": ["media_content_id", "media_content_type"],
|
||||
}
|
||||
assert selector_serializer(selector.NumberSelector({"mode": "box"})) == {
|
||||
"type": "number"
|
||||
|
||||
@@ -817,6 +817,23 @@ def test_theme_selector_schema(schema, valid_selections, invalid_selections) ->
|
||||
),
|
||||
(None, "abc", {}),
|
||||
),
|
||||
(
|
||||
{
|
||||
"accept": ["image/*"],
|
||||
},
|
||||
(
|
||||
{
|
||||
"media_content_id": "abc",
|
||||
"media_content_type": "def",
|
||||
},
|
||||
{
|
||||
"media_content_id": "abc",
|
||||
"media_content_type": "def",
|
||||
"metadata": {},
|
||||
},
|
||||
),
|
||||
(None, "abc", {}),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_media_selector_schema(schema, valid_selections, invalid_selections) -> None:
|
||||
|
||||
@@ -16,7 +16,6 @@ from homeassistant import exceptions
|
||||
from homeassistant.auth.permissions import PolicyPermissions
|
||||
import homeassistant.components # noqa: F401
|
||||
from homeassistant.components.group import DOMAIN as DOMAIN_GROUP, Group
|
||||
from homeassistant.components.input_button import DOMAIN as DOMAIN_INPUT_BUTTON
|
||||
from homeassistant.components.logger import DOMAIN as DOMAIN_LOGGER
|
||||
from homeassistant.components.shell_command import DOMAIN as DOMAIN_SHELL_COMMAND
|
||||
from homeassistant.components.system_health import DOMAIN as DOMAIN_SYSTEM_HEALTH
|
||||
@@ -43,12 +42,7 @@ from homeassistant.helpers import (
|
||||
entity_registry as er,
|
||||
service,
|
||||
)
|
||||
from homeassistant.helpers.translation import async_get_translations
|
||||
from homeassistant.loader import (
|
||||
Integration,
|
||||
async_get_integration,
|
||||
async_get_integrations,
|
||||
)
|
||||
from homeassistant.loader import async_get_integration
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.util.yaml.loader import parse_yaml
|
||||
|
||||
@@ -1098,66 +1092,38 @@ async def test_async_get_all_descriptions_failing_integration(
|
||||
"""Test async_get_all_descriptions when async_get_integrations returns an exception."""
|
||||
group_config = {DOMAIN_GROUP: {}}
|
||||
await async_setup_component(hass, DOMAIN_GROUP, group_config)
|
||||
descriptions = await service.async_get_all_descriptions(hass)
|
||||
|
||||
assert len(descriptions) == 1
|
||||
|
||||
assert "description" in descriptions["group"]["reload"]
|
||||
assert "fields" in descriptions["group"]["reload"]
|
||||
|
||||
logger_config = {DOMAIN_LOGGER: {}}
|
||||
await async_setup_component(hass, DOMAIN_LOGGER, logger_config)
|
||||
|
||||
input_button_config = {DOMAIN_INPUT_BUTTON: {}}
|
||||
await async_setup_component(hass, DOMAIN_INPUT_BUTTON, input_button_config)
|
||||
|
||||
async def wrap_get_integrations(
|
||||
hass: HomeAssistant, domains: Iterable[str]
|
||||
) -> dict[str, Integration | Exception]:
|
||||
integrations = await async_get_integrations(hass, domains)
|
||||
integrations[DOMAIN_LOGGER] = ImportError("Failed to load services.yaml")
|
||||
return integrations
|
||||
|
||||
async def wrap_get_translations(
|
||||
hass: HomeAssistant,
|
||||
language: str,
|
||||
category: str,
|
||||
integrations: Iterable[str] | None = None,
|
||||
config_flow: bool | None = None,
|
||||
) -> dict[str, str]:
|
||||
translations = await async_get_translations(
|
||||
hass, language, category, integrations, config_flow
|
||||
)
|
||||
return {
|
||||
key: value
|
||||
for key, value in translations.items()
|
||||
if not key.startswith("component.logger.services.")
|
||||
}
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.helpers.service.async_get_integrations",
|
||||
wraps=wrap_get_integrations,
|
||||
return_value={"logger": ImportError},
|
||||
),
|
||||
patch(
|
||||
"homeassistant.helpers.service.translation.async_get_translations",
|
||||
wrap_get_translations,
|
||||
return_value={},
|
||||
),
|
||||
):
|
||||
descriptions = await service.async_get_all_descriptions(hass)
|
||||
|
||||
assert len(descriptions) == 3
|
||||
assert len(descriptions) == 2
|
||||
assert "Failed to load integration: logger" in caplog.text
|
||||
|
||||
# Services are empty defaults if the load fails but should
|
||||
# not raise
|
||||
assert descriptions[DOMAIN_GROUP]["remove"]["description"]
|
||||
assert descriptions[DOMAIN_GROUP]["remove"]["fields"]
|
||||
|
||||
assert descriptions[DOMAIN_LOGGER]["set_level"] == {
|
||||
"description": "",
|
||||
"fields": {},
|
||||
"name": "",
|
||||
}
|
||||
|
||||
assert descriptions[DOMAIN_INPUT_BUTTON]["press"]["description"]
|
||||
assert descriptions[DOMAIN_INPUT_BUTTON]["press"]["fields"] == {}
|
||||
assert "target" in descriptions[DOMAIN_INPUT_BUTTON]["press"]
|
||||
|
||||
hass.services.async_register(DOMAIN_LOGGER, "new_service", lambda x: None, None)
|
||||
service.async_set_service_schema(
|
||||
hass, DOMAIN_LOGGER, "new_service", {"description": "new service"}
|
||||
|
||||
@@ -1,17 +1,11 @@
|
||||
"""The tests for the trigger helper."""
|
||||
|
||||
import io
|
||||
from unittest.mock import ANY, AsyncMock, MagicMock, call, patch
|
||||
|
||||
import pytest
|
||||
from pytest_unordered import unordered
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sun import DOMAIN as DOMAIN_SUN
|
||||
from homeassistant.components.system_health import DOMAIN as DOMAIN_SYSTEM_HEALTH
|
||||
from homeassistant.core import Context, HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import trigger
|
||||
from homeassistant.helpers.trigger import (
|
||||
DATA_PLUGGABLE_ACTIONS,
|
||||
PluggableAction,
|
||||
@@ -19,11 +13,7 @@ from homeassistant.helpers.trigger import (
|
||||
async_initialize_triggers,
|
||||
async_validate_trigger_config,
|
||||
)
|
||||
from homeassistant.loader import Integration, async_get_integration
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.util.yaml.loader import parse_yaml
|
||||
|
||||
from tests.common import MockModule, MockPlatform, mock_integration, mock_platform
|
||||
|
||||
|
||||
async def test_bad_trigger_platform(hass: HomeAssistant) -> None:
|
||||
@@ -438,174 +428,3 @@ async def test_pluggable_action(
|
||||
remove_attach_2()
|
||||
assert not hass.data[DATA_PLUGGABLE_ACTIONS]
|
||||
assert not plug_2
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"sun_service_descriptions",
|
||||
[
|
||||
"""
|
||||
sun:
|
||||
fields:
|
||||
event:
|
||||
example: sunrise
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- sunrise
|
||||
- sunset
|
||||
offset:
|
||||
selector:
|
||||
time: null
|
||||
""",
|
||||
"""
|
||||
.anchor: &anchor
|
||||
- sunrise
|
||||
- sunset
|
||||
sun:
|
||||
fields:
|
||||
event:
|
||||
example: sunrise
|
||||
selector:
|
||||
select:
|
||||
options: *anchor
|
||||
offset:
|
||||
selector:
|
||||
time: null
|
||||
""",
|
||||
],
|
||||
)
|
||||
async def test_async_get_all_descriptions(
|
||||
hass: HomeAssistant, sun_service_descriptions: str
|
||||
) -> None:
|
||||
"""Test async_get_all_descriptions."""
|
||||
assert await async_setup_component(hass, DOMAIN_SUN, {})
|
||||
assert await async_setup_component(hass, DOMAIN_SYSTEM_HEALTH, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
def _load_yaml(fname, secrets=None):
|
||||
with io.StringIO(sun_service_descriptions) as file:
|
||||
return parse_yaml(file)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.helpers.trigger._load_triggers_files",
|
||||
side_effect=trigger._load_triggers_files,
|
||||
) as proxy_load_triggers_files,
|
||||
patch(
|
||||
"annotatedyaml.loader.load_yaml",
|
||||
side_effect=_load_yaml,
|
||||
),
|
||||
patch.object(Integration, "has_triggers", return_value=True),
|
||||
):
|
||||
descriptions = await trigger.async_get_all_descriptions(hass)
|
||||
|
||||
# Test we only load triggers.yaml for integrations with triggers,
|
||||
# system_health has no triggers
|
||||
assert proxy_load_triggers_files.mock_calls[0][1][1] == unordered(
|
||||
[
|
||||
await async_get_integration(hass, DOMAIN_SUN),
|
||||
]
|
||||
)
|
||||
|
||||
# system_health does not have services and should not be in descriptions
|
||||
assert descriptions == {
|
||||
DOMAIN_SUN: {
|
||||
"fields": {
|
||||
"event": {
|
||||
"example": "sunrise",
|
||||
"selector": {"select": {"options": ["sunrise", "sunset"]}},
|
||||
},
|
||||
"offset": {"selector": {"time": None}},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Verify the cache returns the same object
|
||||
assert await trigger.async_get_all_descriptions(hass) is descriptions
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("yaml_error", "expected_message"),
|
||||
[
|
||||
(
|
||||
FileNotFoundError("Blah"),
|
||||
"Unable to find triggers.yaml for the sun integration",
|
||||
),
|
||||
(
|
||||
HomeAssistantError("Test error"),
|
||||
"Unable to parse triggers.yaml for the sun integration: Test error",
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_async_get_all_descriptions_with_yaml_error(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
yaml_error: Exception,
|
||||
expected_message: str,
|
||||
) -> None:
|
||||
"""Test async_get_all_descriptions."""
|
||||
assert await async_setup_component(hass, DOMAIN_SUN, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
def _load_yaml_dict(fname, secrets=None):
|
||||
raise yaml_error
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.helpers.trigger.load_yaml_dict",
|
||||
side_effect=_load_yaml_dict,
|
||||
),
|
||||
patch.object(Integration, "has_triggers", return_value=True),
|
||||
):
|
||||
descriptions = await trigger.async_get_all_descriptions(hass)
|
||||
|
||||
assert descriptions == {DOMAIN_SUN: None}
|
||||
|
||||
assert expected_message in caplog.text
|
||||
|
||||
|
||||
async def test_async_get_all_descriptions_with_bad_description(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test async_get_all_descriptions."""
|
||||
sun_service_descriptions = """
|
||||
sun:
|
||||
fields: not_a_dict
|
||||
"""
|
||||
|
||||
assert await async_setup_component(hass, DOMAIN_SUN, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
def _load_yaml(fname, secrets=None):
|
||||
with io.StringIO(sun_service_descriptions) as file:
|
||||
return parse_yaml(file)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"annotatedyaml.loader.load_yaml",
|
||||
side_effect=_load_yaml,
|
||||
),
|
||||
patch.object(Integration, "has_triggers", return_value=True),
|
||||
):
|
||||
descriptions = await trigger.async_get_all_descriptions(hass)
|
||||
|
||||
assert descriptions == {DOMAIN_SUN: None}
|
||||
|
||||
assert (
|
||||
"Unable to parse triggers.yaml for the sun integration: "
|
||||
"expected a dictionary for dictionary value @ data['sun']['fields']"
|
||||
) in caplog.text
|
||||
|
||||
|
||||
async def test_invalid_trigger_platform(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test invalid trigger platform."""
|
||||
mock_integration(hass, MockModule("test", async_setup=AsyncMock(return_value=True)))
|
||||
mock_platform(hass, "test.trigger", MockPlatform())
|
||||
|
||||
await async_setup_component(hass, "test", {})
|
||||
|
||||
assert "Integration test does not provide trigger support, skipping" in caplog.text
|
||||
|
||||
@@ -85,17 +85,6 @@ async def test_async_enable_logging(
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test to ensure logging is migrated to the queue handlers."""
|
||||
config_log_file_pattern = get_test_config_dir("home-assistant.log*")
|
||||
arg_log_file_pattern = "test.log*"
|
||||
|
||||
# Ensure we start with a clean slate
|
||||
for f in glob.glob(arg_log_file_pattern):
|
||||
os.remove(f)
|
||||
for f in glob.glob(config_log_file_pattern):
|
||||
os.remove(f)
|
||||
assert len(glob.glob(config_log_file_pattern)) == 0
|
||||
assert len(glob.glob(arg_log_file_pattern)) == 0
|
||||
|
||||
with (
|
||||
patch("logging.getLogger"),
|
||||
patch(
|
||||
@@ -108,8 +97,6 @@ async def test_async_enable_logging(
|
||||
):
|
||||
await bootstrap.async_enable_logging(hass)
|
||||
mock_async_activate_log_queue_handler.assert_called_once()
|
||||
assert len(glob.glob(config_log_file_pattern)) > 0
|
||||
|
||||
mock_async_activate_log_queue_handler.reset_mock()
|
||||
await bootstrap.async_enable_logging(
|
||||
hass,
|
||||
@@ -117,15 +104,13 @@ async def test_async_enable_logging(
|
||||
log_file="test.log",
|
||||
)
|
||||
mock_async_activate_log_queue_handler.assert_called_once()
|
||||
assert len(glob.glob(arg_log_file_pattern)) > 0
|
||||
for f in glob.glob("test.log*"):
|
||||
os.remove(f)
|
||||
for f in glob.glob("testing_config/home-assistant.log*"):
|
||||
os.remove(f)
|
||||
|
||||
assert "Error rolling over log file" in caplog.text
|
||||
|
||||
for f in glob.glob(arg_log_file_pattern):
|
||||
os.remove(f)
|
||||
for f in glob.glob(config_log_file_pattern):
|
||||
os.remove(f)
|
||||
|
||||
|
||||
async def test_load_hassio(hass: HomeAssistant) -> None:
|
||||
"""Test that we load the hassio integration when using Supervisor."""
|
||||
|
||||
Reference in New Issue
Block a user