Compare commits

..

6 Commits

Author SHA1 Message Date
Jan Čermák
09546525b6 fixup! Bump base image to 2026.02.0 with Python 3.14.3, use 3.14.3 in CI 2026-03-02 13:17:40 +01:00
Jan Čermák
37a59bd23c Sleep twice as suggested in PR 2026-03-02 12:29:29 +01:00
Jan Čermák
6793a98c08 Sleep 1e-99s in async_block_till_done to workaround unfinished tasks 2026-03-02 12:29:28 +01:00
Jan Čermák
0ee4589ba3 Revert "Try reverting changes from Python's gh-105836 patch"
This reverts commit 5de171f714.
2026-03-02 12:29:27 +01:00
Jan Čermák
55bd4b00b4 Try reverting changes from Python's gh-105836 patch 2026-03-02 12:29:27 +01:00
Jan Čermák
43f5f922e3 Bump base image to 2026.02.0 with Python 3.14.3, use 3.14.3 in CI
This also bumps libcec used in the base image to 7.1.1, full changelog:
* https://github.com/home-assistant/docker/releases/tag/2026.02.0

Python changelog:
* https://docs.python.org/release/3.14.3/whatsnew/changelog.html
2026-03-02 12:29:25 +01:00
85 changed files with 2750 additions and 1048 deletions

View File

@@ -10,12 +10,12 @@ on:
env:
BUILD_TYPE: core
DEFAULT_PYTHON: "3.14.2"
DEFAULT_PYTHON: "3.14.3"
PIP_TIMEOUT: 60
UV_HTTP_TIMEOUT: 60
UV_SYSTEM_PYTHON: "true"
# Base image version from https://github.com/home-assistant/docker
BASE_IMAGE_VERSION: "2026.01.0"
BASE_IMAGE_VERSION: "2026.02.0"
ARCHITECTURES: '["amd64", "aarch64"]'
permissions: {}
@@ -182,7 +182,7 @@ jobs:
fi
- name: Download translations
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
name: translations
@@ -544,7 +544,7 @@ jobs:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Download translations
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
name: translations

View File

@@ -41,8 +41,8 @@ env:
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2026.4"
DEFAULT_PYTHON: "3.14.2"
ALL_PYTHON_VERSIONS: "['3.14.2']"
DEFAULT_PYTHON: "3.14.3"
ALL_PYTHON_VERSIONS: "['3.14.3']"
# 10.3 is the oldest supported version
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
# 10.6 is the current long-term-support
@@ -978,7 +978,7 @@ jobs:
run: |
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
- name: Download pytest_buckets
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
name: pytest_buckets
- name: Compile English translations
@@ -1387,7 +1387,7 @@ jobs:
with:
persist-credentials: false
- name: Download all coverage artifacts
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
pattern: coverage-*
- name: Upload coverage to Codecov
@@ -1558,7 +1558,7 @@ jobs:
with:
persist-credentials: false
- name: Download all coverage artifacts
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
pattern: coverage-*
- name: Upload coverage to Codecov
@@ -1587,7 +1587,7 @@ jobs:
&& needs.info.outputs.skip_coverage != 'true' && !cancelled()
steps:
- name: Download all coverage artifacts
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
pattern: test-results-*
- name: Upload test results to Codecov

View File

@@ -16,7 +16,7 @@ concurrency:
cancel-in-progress: true
env:
DEFAULT_PYTHON: "3.14.2"
DEFAULT_PYTHON: "3.14.3"
jobs:
upload:

View File

@@ -17,7 +17,7 @@ on:
- "script/gen_requirements_all.py"
env:
DEFAULT_PYTHON: "3.14.2"
DEFAULT_PYTHON: "3.14.3"
permissions: {}
@@ -124,12 +124,12 @@ jobs:
persist-credentials: false
- name: Download env_file
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
name: env_file
- name: Download requirements_diff
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
name: requirements_diff
@@ -175,17 +175,17 @@ jobs:
persist-credentials: false
- name: Download env_file
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
name: env_file
- name: Download requirements_diff
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
name: requirements_diff
- name: Download requirements_all_wheels
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
name: requirements_all_wheels

View File

@@ -1,5 +0,0 @@
{
"domain": "ubisys",
"name": "Ubisys",
"iot_standards": ["zigbee"]
}

View File

@@ -190,7 +190,7 @@ class BitcoinSensor(SensorEntity):
elif sensor_type == "miners_revenue_usd":
self._attr_native_value = f"{stats.miners_revenue_usd:.0f}"
elif sensor_type == "btc_mined":
self._attr_native_value = str(stats.btc_mined * 1e-8)
self._attr_native_value = str(stats.btc_mined * 0.00000001)
elif sensor_type == "trade_volume_usd":
self._attr_native_value = f"{stats.trade_volume_usd:.1f}"
elif sensor_type == "difficulty":
@@ -208,13 +208,13 @@ class BitcoinSensor(SensorEntity):
elif sensor_type == "blocks_size":
self._attr_native_value = f"{stats.blocks_size:.1f}"
elif sensor_type == "total_fees_btc":
self._attr_native_value = f"{stats.total_fees_btc * 1e-8:.2f}"
self._attr_native_value = f"{stats.total_fees_btc * 0.00000001:.2f}"
elif sensor_type == "total_btc_sent":
self._attr_native_value = f"{stats.total_btc_sent * 1e-8:.2f}"
self._attr_native_value = f"{stats.total_btc_sent * 0.00000001:.2f}"
elif sensor_type == "estimated_btc_sent":
self._attr_native_value = f"{stats.estimated_btc_sent * 1e-8:.2f}"
self._attr_native_value = f"{stats.estimated_btc_sent * 0.00000001:.2f}"
elif sensor_type == "total_btc":
self._attr_native_value = f"{stats.total_btc * 1e-8:.2f}"
self._attr_native_value = f"{stats.total_btc * 0.00000001:.2f}"
elif sensor_type == "total_blocks":
self._attr_native_value = f"{stats.total_blocks:.0f}"
elif sensor_type == "next_retarget":
@@ -222,7 +222,7 @@ class BitcoinSensor(SensorEntity):
elif sensor_type == "estimated_transaction_volume_usd":
self._attr_native_value = f"{stats.estimated_transaction_volume_usd:.2f}"
elif sensor_type == "miners_revenue_btc":
self._attr_native_value = f"{stats.miners_revenue_btc * 1e-8:.1f}"
self._attr_native_value = f"{stats.miners_revenue_btc * 0.00000001:.1f}"
elif sensor_type == "market_price_usd":
self._attr_native_value = f"{stats.market_price_usd:.2f}"

View File

@@ -48,8 +48,6 @@ def async_setup(hass: HomeAssistant) -> None:
vol.Optional("conversation_id"): vol.Any(str, None),
vol.Optional("language"): str,
vol.Optional("agent_id"): agent_id_validator,
vol.Optional("device_id"): vol.Any(str, None),
vol.Optional("satellite_id"): vol.Any(str, None),
}
)
@websocket_api.async_response
@@ -66,8 +64,6 @@ async def websocket_process(
context=connection.context(msg),
language=msg.get("language"),
agent_id=msg.get("agent_id"),
device_id=msg.get("device_id"),
satellite_id=msg.get("satellite_id"),
)
connection.send_result(msg["id"], result.as_dict())
@@ -252,8 +248,6 @@ class ConversationProcessView(http.HomeAssistantView):
vol.Optional("conversation_id"): str,
vol.Optional("language"): str,
vol.Optional("agent_id"): agent_id_validator,
vol.Optional("device_id"): vol.Any(str, None),
vol.Optional("satellite_id"): vol.Any(str, None),
}
)
)
@@ -268,8 +262,6 @@ class ConversationProcessView(http.HomeAssistantView):
context=self.context(request),
language=data.get("language"),
agent_id=data.get("agent_id"),
device_id=data.get("device_id"),
satellite_id=data.get("satellite_id"),
)
return self.json(result.as_dict())

View File

@@ -112,12 +112,11 @@ def _zone_is_configured(zone: DaikinZone) -> bool:
def _zone_temperature_lists(device: Appliance) -> tuple[list[str], list[str]]:
"""Return the decoded zone temperature lists."""
values = device.values
if DAIKIN_ZONE_TEMP_HEAT not in values or DAIKIN_ZONE_TEMP_COOL not in values:
try:
heating = device.represent(DAIKIN_ZONE_TEMP_HEAT)[1]
cooling = device.represent(DAIKIN_ZONE_TEMP_COOL)[1]
except AttributeError, KeyError:
return ([], [])
heating = device.represent(DAIKIN_ZONE_TEMP_HEAT)[1]
cooling = device.represent(DAIKIN_ZONE_TEMP_COOL)[1]
return (list(heating or []), list(cooling or []))

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["dsmr_parser"],
"requirements": ["dsmr-parser==1.5.0"]
"requirements": ["dsmr-parser==1.4.3"]
}

View File

@@ -2,39 +2,14 @@
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr
from .const import DOMAIN
from .coordinator import EafmConfigEntry, EafmCoordinator
PLATFORMS = [Platform.SENSOR]
def _fix_device_registry_identifiers(
hass: HomeAssistant, entry: EafmConfigEntry
) -> None:
"""Fix invalid identifiers in device registry.
Added in 2026.4, can be removed in 2026.10 or later.
"""
device_registry = dr.async_get(hass)
for device_entry in dr.async_entries_for_config_entry(
device_registry, entry.entry_id
):
old_identifier = (DOMAIN, "measure-id", entry.data["station"])
if old_identifier not in device_entry.identifiers: # type: ignore[comparison-overlap]
continue
new_identifiers = device_entry.identifiers.copy()
new_identifiers.discard(old_identifier) # type: ignore[arg-type]
new_identifiers.add((DOMAIN, entry.data["station"]))
device_registry.async_update_device(
device_entry.id, new_identifiers=new_identifiers
)
async def async_setup_entry(hass: HomeAssistant, entry: EafmConfigEntry) -> bool:
"""Set up flood monitoring sensors for this config entry."""
_fix_device_registry_identifiers(hass, entry)
coordinator = EafmCoordinator(hass, entry=entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator

View File

@@ -94,11 +94,11 @@ class Measurement(CoordinatorEntity, SensorEntity):
return self.coordinator.data["measures"][self.key]["parameterName"]
@property
def device_info(self) -> DeviceInfo:
def device_info(self):
"""Return the device info."""
return DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
identifiers={(DOMAIN, self.station_id)},
identifiers={(DOMAIN, "measure-id", self.station_id)},
manufacturer="https://environment.data.gov.uk/",
model=self.parameter_name,
name=f"{self.station_name} {self.parameter_name} {self.qualifier}",

View File

@@ -189,7 +189,6 @@ async def platform_async_setup_entry(
info_type: type[_InfoT],
entity_type: type[_EntityT],
state_type: type[_StateT],
info_filter: Callable[[_InfoT], bool] | None = None,
) -> None:
"""Set up an esphome platform.
@@ -209,22 +208,10 @@ async def platform_async_setup_entry(
entity_type,
state_type,
)
if info_filter is not None:
def on_filtered_update(infos: list[EntityInfo]) -> None:
on_static_info_update(
[info for info in infos if info_filter(cast(_InfoT, info))]
)
info_callback = on_filtered_update
else:
info_callback = on_static_info_update
entry_data.cleanup_callbacks.append(
entry_data.async_register_static_info_callback(
info_type,
info_callback,
on_static_info_update,
)
)

View File

@@ -29,7 +29,6 @@ from aioesphomeapi import (
Event,
EventInfo,
FanInfo,
InfraredInfo,
LightInfo,
LockInfo,
MediaPlayerInfo,
@@ -86,7 +85,6 @@ INFO_TYPE_TO_PLATFORM: dict[type[EntityInfo], Platform] = {
DateTimeInfo: Platform.DATETIME,
EventInfo: Platform.EVENT,
FanInfo: Platform.FAN,
InfraredInfo: Platform.INFRARED,
LightInfo: Platform.LIGHT,
LockInfo: Platform.LOCK,
MediaPlayerInfo: Platform.MEDIA_PLAYER,

View File

@@ -1,59 +0,0 @@
"""Infrared platform for ESPHome."""
from __future__ import annotations
from functools import partial
import logging
from aioesphomeapi import EntityState, InfraredCapability, InfraredInfo
from homeassistant.components.infrared import InfraredCommand, InfraredEntity
from homeassistant.core import callback
from .entity import (
EsphomeEntity,
convert_api_error_ha_error,
platform_async_setup_entry,
)
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
class EsphomeInfraredEntity(EsphomeEntity[InfraredInfo, EntityState], InfraredEntity):
"""ESPHome infrared entity using native API."""
@callback
def _on_device_update(self) -> None:
"""Call when device updates or entry data changes."""
super()._on_device_update()
if self._entry_data.available:
# Infrared entities should go available as soon as the device comes online
self.async_write_ha_state()
@convert_api_error_ha_error
async def async_send_command(self, command: InfraredCommand) -> None:
"""Send an IR command."""
timings = [
interval
for timing in command.get_raw_timings()
for interval in (timing.high_us, -timing.low_us)
]
_LOGGER.debug("Sending command: %s", timings)
self._client.infrared_rf_transmit_raw_timings(
self._static_info.key,
carrier_frequency=command.modulation,
timings=timings,
device_id=self._static_info.device_id,
)
async_setup_entry = partial(
platform_async_setup_entry,
info_type=InfraredInfo,
entity_type=EsphomeInfraredEntity,
state_type=EntityState,
info_filter=lambda info: bool(info.capabilities & InfraredCapability.TRANSMITTER),
)

View File

@@ -241,7 +241,7 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
if (color_temp_k := kwargs.get(ATTR_COLOR_TEMP_KELVIN)) is not None:
# Do not use kelvin_to_mired here to prevent precision loss
data["color_temperature"] = 1_000_000.0 / color_temp_k
data["color_temperature"] = 1000000.0 / color_temp_k
if color_temp_modes := _filter_color_modes(
color_modes, LightColorCapability.COLOR_TEMPERATURE
):

View File

@@ -21,5 +21,5 @@
"integration_type": "system",
"preview_features": { "winter_mode": {} },
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20260302.0"]
"requirements": ["home-assistant-frontend==20260226.0"]
}

View File

@@ -54,10 +54,6 @@
"connectable": false,
"local_name": "GVH5110*"
},
{
"connectable": false,
"local_name": "GV5140*"
},
{
"connectable": false,
"manufacturer_id": 1,

View File

@@ -21,7 +21,6 @@ from homeassistant.components.sensor import (
)
from homeassistant.const import (
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_MILLION,
PERCENTAGE,
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
UnitOfTemperature,
@@ -73,12 +72,6 @@ SENSOR_DESCRIPTIONS = {
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
),
(DeviceClass.CO2, Units.CONCENTRATION_PARTS_PER_MILLION): SensorEntityDescription(
key=f"{DeviceClass.CO2}_{Units.CONCENTRATION_PARTS_PER_MILLION}",
device_class=SensorDeviceClass.CO2,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
state_class=SensorStateClass.MEASUREMENT,
),
}

View File

@@ -13,7 +13,7 @@
"requirements": [
"xknx==3.15.0",
"xknxproject==3.8.2",
"knx-frontend==2026.3.2.183756"
"knx-frontend==2026.2.25.165736"
],
"single_config_entry": true
}

View File

@@ -10,7 +10,7 @@
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["pylutron_caseta"],
"requirements": ["pylutron-caseta==0.27.0"],
"requirements": ["pylutron-caseta==0.26.0"],
"zeroconf": [
{
"properties": {

View File

@@ -14,6 +14,7 @@ from chip.clusters.Types import NullValue
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from .const import (
CLEAR_ALL_INDEX,
CRED_TYPE_FACE,
CRED_TYPE_FINGER_VEIN,
CRED_TYPE_FINGERPRINT,
@@ -221,6 +222,42 @@ def _format_user_response(user_data: Any) -> LockUserData | None:
# --- Credential management helpers ---
async def _clear_user_credentials(
matter_client: MatterClient,
node_id: int,
endpoint_id: int,
user_index: int,
) -> None:
"""Clear all credentials for a specific user.
Fetches the user to get credential list, then clears each credential.
"""
get_user_response = await matter_client.send_device_command(
node_id=node_id,
endpoint_id=endpoint_id,
command=clusters.DoorLock.Commands.GetUser(userIndex=user_index),
)
creds = _get_attr(get_user_response, "credentials")
if not creds:
return
for cred in creds:
cred_type = _get_attr(cred, "credentialType")
cred_index = _get_attr(cred, "credentialIndex")
await matter_client.send_device_command(
node_id=node_id,
endpoint_id=endpoint_id,
command=clusters.DoorLock.Commands.ClearCredential(
credential=clusters.DoorLock.Structs.CredentialStruct(
credentialType=cred_type,
credentialIndex=cred_index,
),
),
timed_request_timeout_ms=LOCK_TIMED_REQUEST_TIMEOUT_MS,
)
class LockEndpointNotFoundError(HomeAssistantError):
"""Lock endpoint not found on node."""
@@ -520,16 +557,33 @@ async def clear_lock_user(
node: MatterNode,
user_index: int,
) -> None:
"""Clear a user from the lock.
"""Clear a user from the lock, cleaning up credentials first.
Per the Matter spec, ClearUser also clears all associated credentials
and schedules for the user.
Use index 0xFFFE (CLEAR_ALL_INDEX) to clear all users.
Raises HomeAssistantError on failure.
"""
lock_endpoint = _get_lock_endpoint_or_raise(node)
_ensure_usr_support(lock_endpoint)
if user_index == CLEAR_ALL_INDEX:
# Clear all: clear all credentials first, then all users
await matter_client.send_device_command(
node_id=node.node_id,
endpoint_id=lock_endpoint.endpoint_id,
command=clusters.DoorLock.Commands.ClearCredential(
credential=None,
),
timed_request_timeout_ms=LOCK_TIMED_REQUEST_TIMEOUT_MS,
)
else:
# Clear credentials for this specific user before deleting them
await _clear_user_credentials(
matter_client,
node.node_id,
lock_endpoint.endpoint_id,
user_index,
)
await matter_client.send_device_command(
node_id=node.node_id,
endpoint_id=lock_endpoint.endpoint_id,

View File

@@ -642,7 +642,7 @@
},
"services": {
"clear_lock_credential": {
"description": "Removes a credential from a lock.",
"description": "Removes a credential from the lock.",
"fields": {
"credential_index": {
"description": "The credential slot index to clear.",
@@ -666,7 +666,7 @@
"name": "Clear lock user"
},
"get_lock_credential_status": {
"description": "Returns the status of a credential slot on a lock.",
"description": "Returns the status of a credential slot on the lock.",
"fields": {
"credential_index": {
"description": "The credential slot index to query.",
@@ -684,7 +684,7 @@
"name": "Get lock info"
},
"get_lock_users": {
"description": "Returns all users configured on a lock with their credentials.",
"description": "Returns all users configured on the lock with their credentials.",
"name": "Get lock users"
},
"open_commissioning_window": {
@@ -698,7 +698,7 @@
"name": "Open commissioning window"
},
"set_lock_credential": {
"description": "Adds or updates a credential on a lock.",
"description": "Adds or updates a credential on the lock.",
"fields": {
"credential_data": {
"description": "The credential data. For PIN: digits only. For RFID: hexadecimal string.",

View File

@@ -14,26 +14,27 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .const import DOMAIN
PLATFORMS: list[Platform] = [Platform.SENSOR]
_LOGGER = logging.getLogger(__name__)
type MoatConfigEntry = ConfigEntry[PassiveBluetoothProcessorCoordinator]
async def async_setup_entry(hass: HomeAssistant, entry: MoatConfigEntry) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Moat BLE device from a config entry."""
address = entry.unique_id
assert address is not None
data = MoatBluetoothDeviceData()
coordinator = PassiveBluetoothProcessorCoordinator(
hass,
_LOGGER,
address=address,
mode=BluetoothScanningMode.PASSIVE,
update_method=data.update,
coordinator = hass.data.setdefault(DOMAIN, {})[entry.entry_id] = (
PassiveBluetoothProcessorCoordinator(
hass,
_LOGGER,
address=address,
mode=BluetoothScanningMode.PASSIVE,
update_method=data.update,
)
)
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(
coordinator.async_start()
@@ -41,6 +42,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: MoatConfigEntry) -> bool
return True
async def async_unload_entry(hass: HomeAssistant, entry: MoatConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok

View File

@@ -4,10 +4,12 @@ from __future__ import annotations
from moat_ble import DeviceClass, DeviceKey, SensorUpdate, Units
from homeassistant import config_entries
from homeassistant.components.bluetooth.passive_update_processor import (
PassiveBluetoothDataProcessor,
PassiveBluetoothDataUpdate,
PassiveBluetoothEntityKey,
PassiveBluetoothProcessorCoordinator,
PassiveBluetoothProcessorEntity,
)
from homeassistant.components.sensor import (
@@ -26,7 +28,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.sensor import sensor_device_info_to_hass_device_info
from . import MoatConfigEntry
from .const import DOMAIN
SENSOR_DESCRIPTIONS = {
(DeviceClass.TEMPERATURE, Units.TEMP_CELSIUS): SensorEntityDescription(
@@ -102,11 +104,13 @@ def sensor_update_to_bluetooth_data_update(
async def async_setup_entry(
hass: HomeAssistant,
entry: MoatConfigEntry,
entry: config_entries.ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Moat BLE sensors."""
coordinator = entry.runtime_data
coordinator: PassiveBluetoothProcessorCoordinator = hass.data[DOMAIN][
entry.entry_id
]
processor = PassiveBluetoothDataProcessor(sensor_update_to_bluetooth_data_update)
entry.async_on_unload(
processor.async_add_entities_listener(

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
import logging
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
@@ -13,14 +14,15 @@ from homeassistant.helpers.config_entry_oauth2_flow import (
)
from .api import AuthenticatedMonzoAPI
from .coordinator import MonzoConfigEntry, MonzoCoordinator
from .const import DOMAIN
from .coordinator import MonzoCoordinator
_LOGGER = logging.getLogger(__name__)
PLATFORMS: list[Platform] = [Platform.SENSOR]
async def async_migrate_entry(hass: HomeAssistant, entry: MonzoConfigEntry) -> bool:
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Migrate entry."""
_LOGGER.debug("Migrating from version %s.%s", entry.version, entry.minor_version)
@@ -37,7 +39,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: MonzoConfigEntry) -> b
return True
async def async_setup_entry(hass: HomeAssistant, entry: MonzoConfigEntry) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Monzo from a config entry."""
implementation = await async_get_config_entry_implementation(hass, entry)
@@ -49,12 +51,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: MonzoConfigEntry) -> boo
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: MonzoConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok

View File

@@ -1,7 +1,5 @@
"""The Monzo integration."""
from __future__ import annotations
from dataclasses import dataclass
from datetime import timedelta
import logging
@@ -20,8 +18,6 @@ from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
type MonzoConfigEntry = ConfigEntry[MonzoCoordinator]
@dataclass
class MonzoData:
@@ -34,13 +30,10 @@ class MonzoData:
class MonzoCoordinator(DataUpdateCoordinator[MonzoData]):
"""Class to manage fetching Monzo data from the API."""
config_entry: MonzoConfigEntry
config_entry: ConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: MonzoConfigEntry,
api: AuthenticatedMonzoAPI,
self, hass: HomeAssistant, config_entry: ConfigEntry, api: AuthenticatedMonzoAPI
) -> None:
"""Initialize."""
super().__init__(

View File

@@ -11,11 +11,14 @@ from homeassistant.components.sensor import (
SensorEntity,
SensorEntityDescription,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from .coordinator import MonzoConfigEntry, MonzoCoordinator, MonzoData
from . import MonzoCoordinator
from .const import DOMAIN
from .coordinator import MonzoData
from .entity import MonzoBaseEntity
@@ -61,11 +64,11 @@ MODEL_POT = "Pot"
async def async_setup_entry(
hass: HomeAssistant,
config_entry: MonzoConfigEntry,
config_entry: ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Defer sensor setup to the shared sensor module."""
coordinator = config_entry.runtime_data
coordinator: MonzoCoordinator = hass.data[DOMAIN][config_entry.entry_id]
accounts = [
MonzoSensor(

View File

@@ -17,6 +17,7 @@ from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
ConfigSubentryData,
ConfigSubentryFlow,
SubentryFlowResult,
)
@@ -29,7 +30,15 @@ from homeassistant.helpers.selector import (
TimeSelector,
)
from .const import CONF_FROM, CONF_TIME, CONF_TO, CONF_VIA, DOMAIN, INTEGRATION_TITLE
from .const import (
CONF_FROM,
CONF_ROUTES,
CONF_TIME,
CONF_TO,
CONF_VIA,
DOMAIN,
INTEGRATION_TITLE,
)
_LOGGER = logging.getLogger(__name__)
@@ -124,6 +133,47 @@ class NSConfigFlow(ConfigFlow, domain=DOMAIN):
errors=errors,
)
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
"""Handle import from YAML configuration."""
self._async_abort_entries_match({CONF_API_KEY: import_data[CONF_API_KEY]})
client = NSAPI(import_data[CONF_API_KEY])
try:
stations = await self.hass.async_add_executor_job(client.get_stations)
except HTTPError:
return self.async_abort(reason="invalid_auth")
except RequestsConnectionError, Timeout:
return self.async_abort(reason="cannot_connect")
except Exception:
_LOGGER.exception("Unexpected exception validating API key")
return self.async_abort(reason="unknown")
station_codes = {station.code for station in stations}
subentries: list[ConfigSubentryData] = []
for route in import_data.get(CONF_ROUTES, []):
# Convert station codes to uppercase for consistency with UI routes
for key in (CONF_FROM, CONF_TO, CONF_VIA):
if key in route:
route[key] = route[key].upper()
if route[key] not in station_codes:
return self.async_abort(reason="invalid_station")
subentries.append(
ConfigSubentryData(
title=route[CONF_NAME],
subentry_type="route",
data=route,
unique_id=None,
)
)
return self.async_create_entry(
title=INTEGRATION_TITLE,
data={CONF_API_KEY: import_data[CONF_API_KEY]},
subentries=subentries,
)
@classmethod
@callback
def async_get_supported_subentry_types(

View File

@@ -12,6 +12,7 @@ AMS_TZ = ZoneInfo("Europe/Amsterdam")
# Update every 2 minutes
SCAN_INTERVAL = timedelta(minutes=2)
CONF_ROUTES = "routes"
CONF_FROM = "from"
CONF_TO = "to"
CONF_VIA = "via"

View File

@@ -5,24 +5,42 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime
import logging
from typing import Any
from ns_api import Trip
import voluptuous as vol
from homeassistant.components.sensor import (
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import CONF_API_KEY, CONF_NAME, EntityCategory
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers import config_validation as cv, issue_registry as ir
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from homeassistant.helpers.entity_platform import (
AddConfigEntryEntitiesCallback,
AddEntitiesCallback,
)
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .binary_sensor import get_delay
from .const import DOMAIN, INTEGRATION_TITLE, ROUTE_MODEL
from .const import (
CONF_FROM,
CONF_ROUTES,
CONF_TIME,
CONF_TO,
CONF_VIA,
DOMAIN,
INTEGRATION_TITLE,
ROUTE_MODEL,
)
from .coordinator import NSConfigEntry, NSDataUpdateCoordinator
@@ -52,9 +70,26 @@ TRIP_STATUS = {
"CANCELLED": "cancelled",
}
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0 # since we use coordinator pattern
ROUTE_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_FROM): cv.string,
vol.Required(CONF_TO): cv.string,
vol.Optional(CONF_VIA): cv.string,
vol.Optional(CONF_TIME): cv.time,
}
)
ROUTES_SCHEMA = vol.All(cv.ensure_list, [ROUTE_SCHEMA])
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
{vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_ROUTES): ROUTES_SCHEMA}
)
@dataclass(frozen=True, kw_only=True)
class NSSensorEntityDescription(SensorEntityDescription):
@@ -160,6 +195,55 @@ SENSOR_DESCRIPTIONS: tuple[NSSensorEntityDescription, ...] = (
)
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the departure sensor."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=config,
)
if (
result.get("type") is FlowResultType.ABORT
and result.get("reason") != "already_configured"
):
ir.async_create_issue(
hass,
DOMAIN,
f"deprecated_yaml_import_issue_{result.get('reason')}",
breaks_in_ha_version="2026.4.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=ir.IssueSeverity.WARNING,
translation_key=f"deprecated_yaml_import_issue_{result.get('reason')}",
translation_placeholders={
"domain": DOMAIN,
"integration_title": INTEGRATION_TITLE,
},
)
return
ir.async_create_issue(
hass,
HOMEASSISTANT_DOMAIN,
"deprecated_yaml",
breaks_in_ha_version="2026.4.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=ir.IssueSeverity.WARNING,
translation_key="deprecated_yaml",
translation_placeholders={
"domain": DOMAIN,
"integration_title": INTEGRATION_TITLE,
},
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: NSConfigEntry,

View File

@@ -127,5 +127,23 @@
"name": "Transfers"
}
}
},
"issues": {
"deprecated_yaml_import_issue_cannot_connect": {
"description": "Configuring Nederlandse Spoorwegen using YAML sensor platform is deprecated.\n\nWhile importing your configuration, Home Assistant could not connect to the NS API. Please check your internet connection and the status of the NS API, then restart Home Assistant to try again, or remove the existing YAML configuration and set the integration up via the UI.",
"title": "[%key:component::nederlandse_spoorwegen::issues::deprecated_yaml_import_issue_invalid_auth::title%]"
},
"deprecated_yaml_import_issue_invalid_auth": {
"description": "Configuring Nederlandse Spoorwegen using YAML sensor platform is deprecated.\n\nWhile importing your configuration, an invalid API key was found. Please update your YAML configuration, or remove the existing YAML configuration and set the integration up via the UI.",
"title": "Nederlandse Spoorwegen YAML configuration deprecated"
},
"deprecated_yaml_import_issue_invalid_station": {
"description": "Configuring Nederlandse Spoorwegen using YAML sensor platform is deprecated.\n\nWhile importing your configuration an invalid station was found. Please update your YAML configuration, or remove the existing YAML configuration and set the integration up via the UI.",
"title": "[%key:component::nederlandse_spoorwegen::issues::deprecated_yaml_import_issue_invalid_auth::title%]"
},
"deprecated_yaml_import_issue_unknown": {
"description": "Configuring Nederlandse Spoorwegen using YAML sensor platform is deprecated.\n\nWhile importing your configuration, an unknown error occurred. Please restart Home Assistant to try again, or remove the existing YAML configuration and set the integration up via the UI.",
"title": "[%key:component::nederlandse_spoorwegen::issues::deprecated_yaml_import_issue_invalid_auth::title%]"
}
}
}

View File

@@ -218,7 +218,7 @@ def fix_coordinates(user_input: dict) -> dict:
# Ensure coordinates have acceptable length for the Netatmo API
for coordinate in (CONF_LAT_NE, CONF_LAT_SW, CONF_LON_NE, CONF_LON_SW):
if len(str(user_input[coordinate]).split(".")[1]) < 7:
user_input[coordinate] = user_input[coordinate] + 1e-7
user_input[coordinate] = user_input[coordinate] + 0.0000001
# Swap coordinates if entered in wrong order
if user_input[CONF_LAT_NE] < user_input[CONF_LAT_SW]:

View File

@@ -16,30 +16,23 @@ from onvif.client import (
)
from onvif.exceptions import ONVIFError
from onvif.util import stringify_onvif_error
import onvif_parsers
from zeep.exceptions import Fault, TransportError, ValidationError, XMLParseError
from homeassistant.components import webhook
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EntityCategory
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
from homeassistant.helpers.device_registry import format_mac
from homeassistant.helpers.event import async_call_later
from homeassistant.helpers.network import NoURLAvailableError, get_url
from homeassistant.util import dt as dt_util
from .const import DOMAIN, LOGGER
from .models import Event, PullPointManagerState, WebHookManagerState
from .parsers import PARSERS
# Topics in this list are ignored because we do not want to create
# entities for them.
UNHANDLED_TOPICS: set[str] = {"tns1:MediaControl/VideoEncoderConfiguration"}
ENTITY_CATEGORY_MAPPING: dict[str, EntityCategory] = {
"diagnostic": EntityCategory.DIAGNOSTIC,
"config": EntityCategory.CONFIG,
}
SUBSCRIPTION_ERRORS = (Fault, TimeoutError, TransportError)
CREATE_ERRORS = (
ONVIFError,
@@ -88,18 +81,6 @@ PULLPOINT_MESSAGE_LIMIT = 100
PULLPOINT_COOLDOWN_TIME = 0.75
def _local_datetime_or_none(value: str) -> dt.datetime | None:
"""Convert strings to datetimes, if invalid, return None."""
# Handle cameras that return times like '0000-00-00T00:00:00Z' (e.g. Hikvision)
try:
ret = dt_util.parse_datetime(value)
except ValueError:
return None
if ret is not None:
return dt_util.as_local(ret)
return None
class EventManager:
"""ONVIF Event Manager."""
@@ -195,10 +176,7 @@ class EventManager:
# tns1:RuleEngine/CellMotionDetector/Motion
topic = msg.Topic._value_1.rstrip("/.") # noqa: SLF001
try:
event = await onvif_parsers.parse(topic, unique_id, msg)
error = None
except onvif_parsers.errors.UnknownTopicError:
if not (parser := PARSERS.get(topic)):
if topic not in UNHANDLED_TOPICS:
LOGGER.warning(
"%s: No registered handler for event from %s: %s",
@@ -208,6 +186,10 @@ class EventManager:
)
UNHANDLED_TOPICS.add(topic)
continue
try:
event = await parser(unique_id, msg)
error = None
except (AttributeError, KeyError) as e:
event = None
error = e
@@ -220,26 +202,10 @@ class EventManager:
error,
msg,
)
continue
return
value = event.value
if event.device_class == "timestamp" and isinstance(value, str):
value = _local_datetime_or_none(value)
ha_event = Event(
uid=event.uid,
name=event.name,
platform=event.platform,
device_class=event.device_class,
unit_of_measurement=event.unit_of_measurement,
value=value,
entity_category=ENTITY_CATEGORY_MAPPING.get(
event.entity_category or ""
),
entity_enabled=event.entity_enabled,
)
self.get_uids_by_platform(ha_event.platform).add(ha_event.uid)
self._events[ha_event.uid] = ha_event
self.get_uids_by_platform(event.platform).add(event.uid)
self._events[event.uid] = event
def get_uid(self, uid: str) -> Event | None:
"""Retrieve event for given id."""

View File

@@ -13,9 +13,5 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["onvif", "wsdiscovery", "zeep"],
"requirements": [
"onvif-zeep-async==4.0.4",
"onvif_parsers==1.2.2",
"WSDiscovery==2.1.2"
]
"requirements": ["onvif-zeep-async==4.0.4", "WSDiscovery==2.1.2"]
}

View File

@@ -0,0 +1,755 @@
"""ONVIF event parsers."""
from __future__ import annotations
from collections.abc import Callable, Coroutine
import dataclasses
import datetime
from typing import Any
from homeassistant.const import EntityCategory
from homeassistant.util import dt as dt_util
from homeassistant.util.decorator import Registry
from .models import Event
PARSERS: Registry[str, Callable[[str, Any], Coroutine[Any, Any, Event | None]]] = (
Registry()
)
VIDEO_SOURCE_MAPPING = {
"vsconf": "VideoSourceToken",
}
def extract_message(msg: Any) -> tuple[str, Any]:
"""Extract the message content and the topic."""
return msg.Topic._value_1, msg.Message._value_1 # noqa: SLF001
def _normalize_video_source(source: str) -> str:
"""Normalize video source.
Some cameras do not set the VideoSourceToken correctly so we get duplicate
sensors, so we need to normalize it to the correct value.
"""
return VIDEO_SOURCE_MAPPING.get(source, source)
def local_datetime_or_none(value: str) -> datetime.datetime | None:
"""Convert strings to datetimes, if invalid, return None."""
# To handle cameras that return times like '0000-00-00T00:00:00Z' (e.g. hikvision)
try:
ret = dt_util.parse_datetime(value)
except ValueError:
return None
if ret is not None:
return dt_util.as_local(ret)
return None
@PARSERS.register("tns1:VideoSource/MotionAlarm")
@PARSERS.register("tns1:Device/Trigger/tnshik:AlarmIn")
async def async_parse_motion_alarm(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:VideoSource/MotionAlarm
"""
topic, payload = extract_message(msg)
source = payload.Source.SimpleItem[0].Value
return Event(
f"{uid}_{topic}_{source}",
"Motion Alarm",
"binary_sensor",
"motion",
None,
payload.Data.SimpleItem[0].Value == "true",
)
@PARSERS.register("tns1:VideoSource/ImageTooBlurry/AnalyticsService")
@PARSERS.register("tns1:VideoSource/ImageTooBlurry/ImagingService")
@PARSERS.register("tns1:VideoSource/ImageTooBlurry/RecordingService")
async def async_parse_image_too_blurry(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:VideoSource/ImageTooBlurry/*
"""
topic, payload = extract_message(msg)
source = payload.Source.SimpleItem[0].Value
return Event(
f"{uid}_{topic}_{source}",
"Image Too Blurry",
"binary_sensor",
"problem",
None,
payload.Data.SimpleItem[0].Value == "true",
EntityCategory.DIAGNOSTIC,
)
@PARSERS.register("tns1:VideoSource/ImageTooDark/AnalyticsService")
@PARSERS.register("tns1:VideoSource/ImageTooDark/ImagingService")
@PARSERS.register("tns1:VideoSource/ImageTooDark/RecordingService")
async def async_parse_image_too_dark(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:VideoSource/ImageTooDark/*
"""
topic, payload = extract_message(msg)
source = payload.Source.SimpleItem[0].Value
return Event(
f"{uid}_{topic}_{source}",
"Image Too Dark",
"binary_sensor",
"problem",
None,
payload.Data.SimpleItem[0].Value == "true",
EntityCategory.DIAGNOSTIC,
)
@PARSERS.register("tns1:VideoSource/ImageTooBright/AnalyticsService")
@PARSERS.register("tns1:VideoSource/ImageTooBright/ImagingService")
@PARSERS.register("tns1:VideoSource/ImageTooBright/RecordingService")
async def async_parse_image_too_bright(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:VideoSource/ImageTooBright/*
"""
topic, payload = extract_message(msg)
source = payload.Source.SimpleItem[0].Value
return Event(
f"{uid}_{topic}_{source}",
"Image Too Bright",
"binary_sensor",
"problem",
None,
payload.Data.SimpleItem[0].Value == "true",
EntityCategory.DIAGNOSTIC,
)
@PARSERS.register("tns1:VideoSource/GlobalSceneChange/AnalyticsService")
@PARSERS.register("tns1:VideoSource/GlobalSceneChange/ImagingService")
@PARSERS.register("tns1:VideoSource/GlobalSceneChange/RecordingService")
async def async_parse_scene_change(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:VideoSource/GlobalSceneChange/*
"""
topic, payload = extract_message(msg)
source = payload.Source.SimpleItem[0].Value
return Event(
f"{uid}_{topic}_{source}",
"Global Scene Change",
"binary_sensor",
"problem",
None,
payload.Data.SimpleItem[0].Value == "true",
)
@PARSERS.register("tns1:AudioAnalytics/Audio/DetectedSound")
async def async_parse_detected_sound(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:AudioAnalytics/Audio/DetectedSound
"""
audio_source = ""
audio_analytics = ""
rule = ""
topic, payload = extract_message(msg)
for source in payload.Source.SimpleItem:
if source.Name == "AudioSourceConfigurationToken":
audio_source = source.Value
if source.Name == "AudioAnalyticsConfigurationToken":
audio_analytics = source.Value
if source.Name == "Rule":
rule = source.Value
return Event(
f"{uid}_{topic}_{audio_source}_{audio_analytics}_{rule}",
"Detected Sound",
"binary_sensor",
"sound",
None,
payload.Data.SimpleItem[0].Value == "true",
)
@PARSERS.register("tns1:RuleEngine/FieldDetector/ObjectsInside")
async def async_parse_field_detector(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:RuleEngine/FieldDetector/ObjectsInside
"""
video_source = ""
video_analytics = ""
rule = ""
topic, payload = extract_message(msg)
for source in payload.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = _normalize_video_source(source.Value)
if source.Name == "VideoAnalyticsConfigurationToken":
video_analytics = source.Value
if source.Name == "Rule":
rule = source.Value
return Event(
f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}",
"Field Detection",
"binary_sensor",
"motion",
None,
payload.Data.SimpleItem[0].Value == "true",
)
@PARSERS.register("tns1:RuleEngine/CellMotionDetector/Motion")
async def async_parse_cell_motion_detector(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:RuleEngine/CellMotionDetector/Motion
"""
video_source = ""
video_analytics = ""
rule = ""
topic, payload = extract_message(msg)
for source in payload.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = _normalize_video_source(source.Value)
if source.Name == "VideoAnalyticsConfigurationToken":
video_analytics = source.Value
if source.Name == "Rule":
rule = source.Value
return Event(
f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}",
"Cell Motion Detection",
"binary_sensor",
"motion",
None,
payload.Data.SimpleItem[0].Value == "true",
)
@PARSERS.register("tns1:RuleEngine/MotionRegionDetector/Motion")
async def async_parse_motion_region_detector(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:RuleEngine/MotionRegionDetector/Motion
"""
video_source = ""
video_analytics = ""
rule = ""
topic, payload = extract_message(msg)
for source in payload.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = _normalize_video_source(source.Value)
if source.Name == "VideoAnalyticsConfigurationToken":
video_analytics = source.Value
if source.Name == "Rule":
rule = source.Value
return Event(
f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}",
"Motion Region Detection",
"binary_sensor",
"motion",
None,
payload.Data.SimpleItem[0].Value in ["1", "true"],
)
@PARSERS.register("tns1:RuleEngine/TamperDetector/Tamper")
async def async_parse_tamper_detector(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:RuleEngine/TamperDetector/Tamper
"""
video_source = ""
video_analytics = ""
rule = ""
topic, payload = extract_message(msg)
for source in payload.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = _normalize_video_source(source.Value)
if source.Name == "VideoAnalyticsConfigurationToken":
video_analytics = source.Value
if source.Name == "Rule":
rule = source.Value
return Event(
f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}",
"Tamper Detection",
"binary_sensor",
"problem",
None,
payload.Data.SimpleItem[0].Value == "true",
EntityCategory.DIAGNOSTIC,
)
@PARSERS.register("tns1:RuleEngine/MyRuleDetector/DogCatDetect")
async def async_parse_dog_cat_detector(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:RuleEngine/MyRuleDetector/DogCatDetect
"""
video_source = ""
topic, payload = extract_message(msg)
for source in payload.Source.SimpleItem:
if source.Name == "Source":
video_source = _normalize_video_source(source.Value)
return Event(
f"{uid}_{topic}_{video_source}",
"Pet Detection",
"binary_sensor",
"motion",
None,
payload.Data.SimpleItem[0].Value == "true",
)
@PARSERS.register("tns1:RuleEngine/MyRuleDetector/VehicleDetect")
async def async_parse_vehicle_detector(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:RuleEngine/MyRuleDetector/VehicleDetect
"""
video_source = ""
topic, payload = extract_message(msg)
for source in payload.Source.SimpleItem:
if source.Name == "Source":
video_source = _normalize_video_source(source.Value)
return Event(
f"{uid}_{topic}_{video_source}",
"Vehicle Detection",
"binary_sensor",
"motion",
None,
payload.Data.SimpleItem[0].Value == "true",
)
_TAPO_EVENT_TEMPLATES: dict[str, Event] = {
"IsVehicle": Event(
uid="",
name="Vehicle Detection",
platform="binary_sensor",
device_class="motion",
),
"IsPeople": Event(
uid="", name="Person Detection", platform="binary_sensor", device_class="motion"
),
"IsPet": Event(
uid="", name="Pet Detection", platform="binary_sensor", device_class="motion"
),
"IsLineCross": Event(
uid="",
name="Line Detector Crossed",
platform="binary_sensor",
device_class="motion",
),
"IsTamper": Event(
uid="", name="Tamper Detection", platform="binary_sensor", device_class="tamper"
),
"IsIntrusion": Event(
uid="",
name="Intrusion Detection",
platform="binary_sensor",
device_class="safety",
),
}
@PARSERS.register("tns1:RuleEngine/CellMotionDetector/Intrusion")
@PARSERS.register("tns1:RuleEngine/CellMotionDetector/LineCross")
@PARSERS.register("tns1:RuleEngine/CellMotionDetector/People")
@PARSERS.register("tns1:RuleEngine/CellMotionDetector/Tamper")
@PARSERS.register("tns1:RuleEngine/CellMotionDetector/TpSmartEvent")
@PARSERS.register("tns1:RuleEngine/PeopleDetector/People")
@PARSERS.register("tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent")
async def async_parse_tplink_detector(uid: str, msg) -> Event | None:
"""Handle parsing tplink smart event messages.
Topic: tns1:RuleEngine/CellMotionDetector/Intrusion
Topic: tns1:RuleEngine/CellMotionDetector/LineCross
Topic: tns1:RuleEngine/CellMotionDetector/People
Topic: tns1:RuleEngine/CellMotionDetector/Tamper
Topic: tns1:RuleEngine/CellMotionDetector/TpSmartEvent
Topic: tns1:RuleEngine/PeopleDetector/People
Topic: tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent
"""
video_source = ""
video_analytics = ""
rule = ""
topic, payload = extract_message(msg)
for source in payload.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = _normalize_video_source(source.Value)
if source.Name == "VideoAnalyticsConfigurationToken":
video_analytics = source.Value
if source.Name == "Rule":
rule = source.Value
for item in payload.Data.SimpleItem:
event_template = _TAPO_EVENT_TEMPLATES.get(item.Name)
if event_template is None:
continue
return dataclasses.replace(
event_template,
uid=f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}",
value=item.Value == "true",
)
return None
@PARSERS.register("tns1:RuleEngine/MyRuleDetector/PeopleDetect")
async def async_parse_person_detector(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:RuleEngine/MyRuleDetector/PeopleDetect
"""
video_source = ""
topic, payload = extract_message(msg)
for source in payload.Source.SimpleItem:
if source.Name == "Source":
video_source = _normalize_video_source(source.Value)
return Event(
f"{uid}_{topic}_{video_source}",
"Person Detection",
"binary_sensor",
"motion",
None,
payload.Data.SimpleItem[0].Value == "true",
)
@PARSERS.register("tns1:RuleEngine/MyRuleDetector/FaceDetect")
async def async_parse_face_detector(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:RuleEngine/MyRuleDetector/FaceDetect
"""
video_source = ""
topic, payload = extract_message(msg)
for source in payload.Source.SimpleItem:
if source.Name == "Source":
video_source = _normalize_video_source(source.Value)
return Event(
f"{uid}_{topic}_{video_source}",
"Face Detection",
"binary_sensor",
"motion",
None,
payload.Data.SimpleItem[0].Value == "true",
)
@PARSERS.register("tns1:RuleEngine/MyRuleDetector/Visitor")
async def async_parse_visitor_detector(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:RuleEngine/MyRuleDetector/Visitor
"""
video_source = ""
topic, payload = extract_message(msg)
for source in payload.Source.SimpleItem:
if source.Name == "Source":
video_source = _normalize_video_source(source.Value)
return Event(
f"{uid}_{topic}_{video_source}",
"Visitor Detection",
"binary_sensor",
"occupancy",
None,
payload.Data.SimpleItem[0].Value == "true",
)
@PARSERS.register("tns1:RuleEngine/MyRuleDetector/Package")
async def async_parse_package_detector(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:RuleEngine/MyRuleDetector/Package
"""
video_source = ""
topic, payload = extract_message(msg)
for source in payload.Source.SimpleItem:
if source.Name == "Source":
video_source = _normalize_video_source(source.Value)
return Event(
f"{uid}_{topic}_{video_source}",
"Package Detection",
"binary_sensor",
"occupancy",
None,
payload.Data.SimpleItem[0].Value == "true",
)
@PARSERS.register("tns1:Device/Trigger/DigitalInput")
async def async_parse_digital_input(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:Device/Trigger/DigitalInput
"""
topic, payload = extract_message(msg)
source = payload.Source.SimpleItem[0].Value
return Event(
f"{uid}_{topic}_{source}",
"Digital Input",
"binary_sensor",
None,
None,
payload.Data.SimpleItem[0].Value == "true",
)
@PARSERS.register("tns1:Device/Trigger/Relay")
async def async_parse_relay(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:Device/Trigger/Relay
"""
topic, payload = extract_message(msg)
source = payload.Source.SimpleItem[0].Value
return Event(
f"{uid}_{topic}_{source}",
"Relay Triggered",
"binary_sensor",
None,
None,
payload.Data.SimpleItem[0].Value == "active",
)
@PARSERS.register("tns1:Device/HardwareFailure/StorageFailure")
async def async_parse_storage_failure(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:Device/HardwareFailure/StorageFailure
"""
topic, payload = extract_message(msg)
source = payload.Source.SimpleItem[0].Value
return Event(
f"{uid}_{topic}_{source}",
"Storage Failure",
"binary_sensor",
"problem",
None,
payload.Data.SimpleItem[0].Value == "true",
EntityCategory.DIAGNOSTIC,
)
@PARSERS.register("tns1:Monitoring/ProcessorUsage")
async def async_parse_processor_usage(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:Monitoring/ProcessorUsage
"""
topic, payload = extract_message(msg)
usage = float(payload.Data.SimpleItem[0].Value)
if usage <= 1:
usage *= 100
return Event(
f"{uid}_{topic}",
"Processor Usage",
"sensor",
None,
"percent",
int(usage),
EntityCategory.DIAGNOSTIC,
)
@PARSERS.register("tns1:Monitoring/OperatingTime/LastReboot")
async def async_parse_last_reboot(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:Monitoring/OperatingTime/LastReboot
"""
topic, payload = extract_message(msg)
date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value)
return Event(
f"{uid}_{topic}",
"Last Reboot",
"sensor",
"timestamp",
None,
date_time,
EntityCategory.DIAGNOSTIC,
)
@PARSERS.register("tns1:Monitoring/OperatingTime/LastReset")
async def async_parse_last_reset(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:Monitoring/OperatingTime/LastReset
"""
topic, payload = extract_message(msg)
date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value)
return Event(
f"{uid}_{topic}",
"Last Reset",
"sensor",
"timestamp",
None,
date_time,
EntityCategory.DIAGNOSTIC,
entity_enabled=False,
)
@PARSERS.register("tns1:Monitoring/Backup/Last")
async def async_parse_backup_last(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:Monitoring/Backup/Last
"""
topic, payload = extract_message(msg)
date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value)
return Event(
f"{uid}_{topic}",
"Last Backup",
"sensor",
"timestamp",
None,
date_time,
EntityCategory.DIAGNOSTIC,
entity_enabled=False,
)
@PARSERS.register("tns1:Monitoring/OperatingTime/LastClockSynchronization")
async def async_parse_last_clock_sync(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:Monitoring/OperatingTime/LastClockSynchronization
"""
topic, payload = extract_message(msg)
date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value)
return Event(
f"{uid}_{topic}",
"Last Clock Synchronization",
"sensor",
"timestamp",
None,
date_time,
EntityCategory.DIAGNOSTIC,
entity_enabled=False,
)
@PARSERS.register("tns1:RecordingConfig/JobState")
async def async_parse_jobstate(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:RecordingConfig/JobState
"""
topic, payload = extract_message(msg)
source = payload.Source.SimpleItem[0].Value
return Event(
f"{uid}_{topic}_{source}",
"Recording Job State",
"binary_sensor",
None,
None,
payload.Data.SimpleItem[0].Value == "Active",
EntityCategory.DIAGNOSTIC,
)
@PARSERS.register("tns1:RuleEngine/LineDetector/Crossed")
async def async_parse_linedetector_crossed(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:RuleEngine/LineDetector/Crossed
"""
video_source = ""
video_analytics = ""
rule = ""
topic, payload = extract_message(msg)
for source in payload.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = source.Value
if source.Name == "VideoAnalyticsConfigurationToken":
video_analytics = source.Value
if source.Name == "Rule":
rule = source.Value
return Event(
f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}",
"Line Detector Crossed",
"sensor",
None,
None,
payload.Data.SimpleItem[0].Value,
EntityCategory.DIAGNOSTIC,
)
@PARSERS.register("tns1:RuleEngine/CountAggregation/Counter")
async def async_parse_count_aggregation_counter(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:RuleEngine/CountAggregation/Counter
"""
video_source = ""
video_analytics = ""
rule = ""
topic, payload = extract_message(msg)
for source in payload.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = _normalize_video_source(source.Value)
if source.Name == "VideoAnalyticsConfigurationToken":
video_analytics = source.Value
if source.Name == "Rule":
rule = source.Value
return Event(
f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}",
"Count Aggregation Counter",
"sensor",
None,
None,
payload.Data.SimpleItem[0].Value,
EntityCategory.DIAGNOSTIC,
)
@PARSERS.register("tns1:UserAlarm/IVA/HumanShapeDetect")
async def async_parse_human_shape_detect(uid: str, msg) -> Event | None:
"""Handle parsing event message.
Topic: tns1:UserAlarm/IVA/HumanShapeDetect
"""
topic, payload = extract_message(msg)
video_source = ""
for source in payload.Source.SimpleItem:
if source.Name == "VideoSourceConfigurationToken":
video_source = _normalize_video_source(source.Value)
break
return Event(
f"{uid}_{topic}_{video_source}",
"Human Shape Detect",
"binary_sensor",
"motion",
None,
payload.Data.SimpleItem[0].Value == "true",
)

View File

@@ -9,6 +9,6 @@
"iot_class": "local_push",
"loggers": ["openevsehttp"],
"quality_scale": "bronze",
"requirements": ["python-openevse-http==0.2.5"],
"requirements": ["python-openevse-http==0.2.1"],
"zeroconf": ["_openevse._tcp.local."]
}

View File

@@ -2,17 +2,35 @@
import logging
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, device_registry as dr
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import CONF_CODE, CONF_HOST, CONF_NAME, CONF_PORT, Platform
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers import (
config_validation as cv,
device_registry as dr,
issue_registry as ir,
)
from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries
from homeassistant.helpers.typing import ConfigType
from .client import SatelClient
from .const import (
CONF_ARM_HOME_MODE,
CONF_DEVICE_PARTITIONS,
CONF_OUTPUT_NUMBER,
CONF_OUTPUTS,
CONF_PARTITION_NUMBER,
CONF_SWITCHABLE_OUTPUT_NUMBER,
CONF_SWITCHABLE_OUTPUTS,
CONF_ZONE_NUMBER,
CONF_ZONE_TYPE,
CONF_ZONES,
DEFAULT_CONF_ARM_HOME_MODE,
DEFAULT_PORT,
DEFAULT_ZONE_TYPE,
DOMAIN,
SUBENTRY_TYPE_OUTPUT,
SUBENTRY_TYPE_PARTITION,
@@ -31,7 +49,104 @@ _LOGGER = logging.getLogger(__name__)
PLATFORMS = [Platform.ALARM_CONTROL_PANEL, Platform.BINARY_SENSOR, Platform.SWITCH]
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
ZONE_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_ZONE_TYPE, default=DEFAULT_ZONE_TYPE): cv.string,
}
)
EDITABLE_OUTPUT_SCHEMA = vol.Schema({vol.Required(CONF_NAME): cv.string})
PARTITION_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_ARM_HOME_MODE, default=DEFAULT_CONF_ARM_HOME_MODE): vol.In(
[1, 2, 3]
),
}
)
def is_alarm_code_necessary(value):
"""Check if alarm code must be configured."""
if value.get(CONF_SWITCHABLE_OUTPUTS) and CONF_CODE not in value:
raise vol.Invalid("You need to specify alarm code to use switchable_outputs")
return value
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_CODE): cv.string,
vol.Optional(CONF_DEVICE_PARTITIONS, default={}): {
vol.Coerce(int): PARTITION_SCHEMA
},
vol.Optional(CONF_ZONES, default={}): {vol.Coerce(int): ZONE_SCHEMA},
vol.Optional(CONF_OUTPUTS, default={}): {vol.Coerce(int): ZONE_SCHEMA},
vol.Optional(CONF_SWITCHABLE_OUTPUTS, default={}): {
vol.Coerce(int): EDITABLE_OUTPUT_SCHEMA
},
},
is_alarm_code_necessary,
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, hass_config: ConfigType) -> bool:
"""Set up Satel Integra from YAML."""
if config := hass_config.get(DOMAIN):
hass.async_create_task(_async_import(hass, config))
return True
async def _async_import(hass: HomeAssistant, config: ConfigType) -> None:
"""Process YAML import."""
if not hass.config_entries.async_entries(DOMAIN):
# Start import flow
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
)
if result.get("type") == FlowResultType.ABORT:
ir.async_create_issue(
hass,
DOMAIN,
"deprecated_yaml_import_issue_cannot_connect",
breaks_in_ha_version="2026.4.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=ir.IssueSeverity.WARNING,
translation_key="deprecated_yaml_import_issue_cannot_connect",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "Satel Integra",
},
)
return
ir.async_create_issue(
hass,
HOMEASSISTANT_DOMAIN,
f"deprecated_yaml_{DOMAIN}",
breaks_in_ha_version="2026.4.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=ir.IssueSeverity.WARNING,
translation_key="deprecated_yaml",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "Satel Integra",
},
)
async def async_setup_entry(hass: HomeAssistant, entry: SatelConfigEntry) -> bool:

View File

@@ -13,6 +13,7 @@ from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
ConfigSubentryData,
ConfigSubentryFlow,
OptionsFlow,
SubentryFlowResult,
@@ -23,11 +24,15 @@ from homeassistant.helpers import config_validation as cv, selector
from .const import (
CONF_ARM_HOME_MODE,
CONF_DEVICE_PARTITIONS,
CONF_OUTPUT_NUMBER,
CONF_OUTPUTS,
CONF_PARTITION_NUMBER,
CONF_SWITCHABLE_OUTPUT_NUMBER,
CONF_SWITCHABLE_OUTPUTS,
CONF_ZONE_NUMBER,
CONF_ZONE_TYPE,
CONF_ZONES,
DEFAULT_CONF_ARM_HOME_MODE,
DEFAULT_PORT,
DOMAIN,
@@ -48,7 +53,6 @@ CONNECTION_SCHEMA = vol.Schema(
}
)
CODE_SCHEMA = vol.Schema(
{
vol.Optional(CONF_CODE): cv.string,
@@ -139,6 +143,97 @@ class SatelConfigFlow(ConfigFlow, domain=DOMAIN):
step_id="user", data_schema=CONNECTION_SCHEMA, errors=errors
)
async def async_step_import(
self, import_config: dict[str, Any]
) -> ConfigFlowResult:
"""Handle a flow initialized by import."""
valid = await self.test_connection(
import_config[CONF_HOST], import_config.get(CONF_PORT, DEFAULT_PORT)
)
if valid:
subentries: list[ConfigSubentryData] = []
for partition_number, partition_data in import_config.get(
CONF_DEVICE_PARTITIONS, {}
).items():
subentries.append(
{
"subentry_type": SUBENTRY_TYPE_PARTITION,
"title": f"{partition_data[CONF_NAME]} ({partition_number})",
"unique_id": f"{SUBENTRY_TYPE_PARTITION}_{partition_number}",
"data": {
CONF_NAME: partition_data[CONF_NAME],
CONF_ARM_HOME_MODE: partition_data.get(
CONF_ARM_HOME_MODE, DEFAULT_CONF_ARM_HOME_MODE
),
CONF_PARTITION_NUMBER: partition_number,
},
}
)
for zone_number, zone_data in import_config.get(CONF_ZONES, {}).items():
subentries.append(
{
"subentry_type": SUBENTRY_TYPE_ZONE,
"title": f"{zone_data[CONF_NAME]} ({zone_number})",
"unique_id": f"{SUBENTRY_TYPE_ZONE}_{zone_number}",
"data": {
CONF_NAME: zone_data[CONF_NAME],
CONF_ZONE_NUMBER: zone_number,
CONF_ZONE_TYPE: zone_data.get(
CONF_ZONE_TYPE, BinarySensorDeviceClass.MOTION
),
},
}
)
for output_number, output_data in import_config.get(
CONF_OUTPUTS, {}
).items():
subentries.append(
{
"subentry_type": SUBENTRY_TYPE_OUTPUT,
"title": f"{output_data[CONF_NAME]} ({output_number})",
"unique_id": f"{SUBENTRY_TYPE_OUTPUT}_{output_number}",
"data": {
CONF_NAME: output_data[CONF_NAME],
CONF_OUTPUT_NUMBER: output_number,
CONF_ZONE_TYPE: output_data.get(
CONF_ZONE_TYPE, BinarySensorDeviceClass.MOTION
),
},
}
)
for switchable_output_number, switchable_output_data in import_config.get(
CONF_SWITCHABLE_OUTPUTS, {}
).items():
subentries.append(
{
"subentry_type": SUBENTRY_TYPE_SWITCHABLE_OUTPUT,
"title": f"{switchable_output_data[CONF_NAME]} ({switchable_output_number})",
"unique_id": f"{SUBENTRY_TYPE_SWITCHABLE_OUTPUT}_{switchable_output_number}",
"data": {
CONF_NAME: switchable_output_data[CONF_NAME],
CONF_SWITCHABLE_OUTPUT_NUMBER: switchable_output_number,
},
}
)
return self.async_create_entry(
title=import_config[CONF_HOST],
data={
CONF_HOST: import_config[CONF_HOST],
CONF_PORT: import_config.get(CONF_PORT, DEFAULT_PORT),
},
options={CONF_CODE: import_config.get(CONF_CODE)},
subentries=subentries,
)
return self.async_abort(reason="cannot_connect")
async def test_connection(self, host: str, port: int) -> bool:
"""Test a connection to the Satel alarm."""
controller = AsyncSatel(host, port, self.hass.loop)

View File

@@ -2,6 +2,7 @@
DEFAULT_CONF_ARM_HOME_MODE = 1
DEFAULT_PORT = 7094
DEFAULT_ZONE_TYPE = "motion"
DOMAIN = "satel_integra"
@@ -15,7 +16,11 @@ CONF_ZONE_NUMBER = "zone_number"
CONF_OUTPUT_NUMBER = "output_number"
CONF_SWITCHABLE_OUTPUT_NUMBER = "switchable_output_number"
CONF_DEVICE_PARTITIONS = "partitions"
CONF_ARM_HOME_MODE = "arm_home_mode"
CONF_ZONE_TYPE = "type"
CONF_ZONES = "zones"
CONF_OUTPUTS = "outputs"
CONF_SWITCHABLE_OUTPUTS = "switchable_outputs"
ZONES = "zones"

View File

@@ -167,6 +167,12 @@
"message": "Cannot control switchable outputs because no user code is configured for this Satel Integra entry. Configure a code in the integration options to enable output control."
}
},
"issues": {
"deprecated_yaml_import_issue_cannot_connect": {
"description": "Configuring {integration_title} using YAML is being removed but there was an connection error importing your existing configuration.\n\nEnsure connection to {integration_title} works and restart Home Assistant to try again or remove the `{domain}` YAML configuration from your configuration.yaml file and add the {integration_title} integration manually.",
"title": "YAML import failed due to a connection error"
}
},
"options": {
"step": {
"init": {

View File

@@ -17,11 +17,13 @@ from homeassistant.components import climate as FanState
from homeassistant.components.climate import (
ATTR_FAN_MODE,
ATTR_TEMPERATURE,
PRESET_AWAY,
PRESET_BOOST,
PRESET_COMFORT,
PRESET_ECO,
PRESET_HOME,
PRESET_NONE,
PRESET_SLEEP,
ClimateEntity,
ClimateEntityFeature,
HVACMode,
@@ -38,11 +40,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.restore_state import RestoreEntity
from . import SwitchbotCloudData, SwitchBotCoordinator
from .const import (
CLIMATE_PRESET_SCHEDULE,
DOMAIN,
SMART_RADIATOR_THERMOSTAT_AFTER_COMMAND_REFRESH,
)
from .const import DOMAIN, SMART_RADIATOR_THERMOSTAT_AFTER_COMMAND_REFRESH
from .entity import SwitchBotCloudEntity
_LOGGER = getLogger(__name__)
@@ -208,7 +206,6 @@ RADIATOR_PRESET_MODE_MAP: dict[str, SmartRadiatorThermostatMode] = {
PRESET_BOOST: SmartRadiatorThermostatMode.FAST_HEATING,
PRESET_COMFORT: SmartRadiatorThermostatMode.COMFORT,
PRESET_HOME: SmartRadiatorThermostatMode.MANUAL,
CLIMATE_PRESET_SCHEDULE: SmartRadiatorThermostatMode.SCHEDULE,
}
RADIATOR_HA_PRESET_MODE_MAP = {
@@ -230,10 +227,15 @@ class SwitchBotCloudSmartRadiatorThermostat(SwitchBotCloudEntity, ClimateEntity)
_attr_target_temperature_step = PRECISION_TENTHS
_attr_temperature_unit = UnitOfTemperature.CELSIUS
_attr_preset_modes = list(RADIATOR_PRESET_MODE_MAP)
_attr_translation_key = "smart_radiator_thermostat"
_attr_preset_modes = [
PRESET_NONE,
PRESET_ECO,
PRESET_AWAY,
PRESET_BOOST,
PRESET_COMFORT,
PRESET_HOME,
PRESET_SLEEP,
]
_attr_preset_mode = PRESET_HOME
_attr_hvac_modes = [
@@ -298,7 +300,7 @@ class SwitchBotCloudSmartRadiatorThermostat(SwitchBotCloudEntity, ClimateEntity)
SmartRadiatorThermostatMode(mode)
]
if self.preset_mode == PRESET_NONE:
if self.preset_mode in [PRESET_NONE, PRESET_AWAY]:
self._attr_hvac_mode = HVACMode.OFF
else:
self._attr_hvac_mode = HVACMode.HEAT

View File

@@ -17,9 +17,6 @@ VACUUM_FAN_SPEED_STANDARD = "standard"
VACUUM_FAN_SPEED_STRONG = "strong"
VACUUM_FAN_SPEED_MAX = "max"
CLIMATE_PRESET_SCHEDULE = "schedule"
AFTER_COMMAND_REFRESH = 5
COVER_ENTITY_AFTER_COMMAND_REFRESH = 10
SMART_RADIATOR_THERMOSTAT_AFTER_COMMAND_REFRESH = 30

View File

@@ -8,17 +8,6 @@
"default": "mdi:chevron-left-box"
}
},
"climate": {
"smart_radiator_thermostat": {
"state_attributes": {
"preset_mode": {
"state": {
"schedule": "mdi:clock-outline"
}
}
}
}
},
"fan": {
"air_purifier": {
"default": "mdi:air-purifier",

View File

@@ -26,17 +26,6 @@
"name": "Previous"
}
},
"climate": {
"smart_radiator_thermostat": {
"state_attributes": {
"preset_mode": {
"state": {
"schedule": "Schedule"
}
}
}
}
},
"fan": {
"air_purifier": {
"state_attributes": {

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["aiotankerkoenig"],
"quality_scale": "platinum",
"requirements": ["aiotankerkoenig==0.5.1"]
"requirements": ["aiotankerkoenig==0.4.2"]
}

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"quality_scale": "bronze",
"requirements": ["uhooapi==1.2.8"]
"requirements": ["uhooapi==1.2.6"]
}

View File

@@ -23,7 +23,6 @@ from homeassistant.const import ( # noqa: F401 # STATE_PAUSED/IDLE are API
STATE_ON,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import config_validation as cv, issue_registry as ir
from homeassistant.helpers.entity import Entity, EntityDescription
from homeassistant.helpers.entity_component import EntityComponent
@@ -64,6 +63,7 @@ SERVICE_STOP = "stop"
DEFAULT_NAME = "Vacuum cleaner robot"
ISSUE_SEGMENTS_CHANGED = "segments_changed"
ISSUE_SEGMENTS_MAPPING_NOT_CONFIGURED = "segments_mapping_not_configured"
_BATTERY_DEPRECATION_IGNORED_PLATFORMS = ("template",)
@@ -438,14 +438,7 @@ class StateVacuumEntity(
)
options: Mapping[str, Any] = self.registry_entry.options.get(DOMAIN, {})
area_mapping: dict[str, list[str]] | None = options.get("area_mapping")
if area_mapping is None:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="area_mapping_not_configured",
translation_placeholders={"entity_id": self.entity_id},
)
area_mapping: dict[str, list[str]] = options.get("area_mapping", {})
# We use a dict to preserve the order of segments.
segment_ids: dict[str, None] = {}

View File

@@ -89,11 +89,6 @@
}
}
},
"exceptions": {
"area_mapping_not_configured": {
"message": "Area mapping is not configured for `{entity_id}`. Configure the segment-to-area mapping before using this action."
}
},
"issues": {
"segments_changed": {
"description": "",

View File

@@ -111,7 +111,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: XboxConfigEntry) -> bo
# Migrate unique_id from `xbox` to account xuid and
# change generic entry name to user's gamertag
try:
own = await client.people.get_friend_by_xuid(client.xuid)
own = await client.people.get_friends_by_xuid(client.xuid)
except TimeoutException as e:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,

View File

@@ -74,7 +74,7 @@ class OAuth2FlowHandler(
client = XboxLiveClient(auth)
me = await client.people.get_friend_by_xuid(client.xuid)
me = await client.people.get_friends_by_xuid(client.xuid)
await self.async_set_unique_id(client.xuid)

View File

@@ -213,10 +213,10 @@ class XboxPresenceCoordinator(XboxBaseCoordinator[XboxData]):
async def update_data(self) -> XboxData:
"""Fetch presence data."""
me = await self.client.people.get_friend_by_xuid(self.client.xuid)
batch = await self.client.people.get_friends_by_xuid(self.client.xuid)
friends = await self.client.people.get_friends_own()
presence_data = {self.client.xuid: me.people[0]}
presence_data = {self.client.xuid: batch.people[0]}
presence_data.update(
{
friend.xuid: friend

View File

@@ -14,7 +14,7 @@
"iot_class": "cloud_polling",
"quality_scale": "platinum",
"requirements": ["python-xbox==0.2.0"],
"requirements": ["python-xbox==0.1.3"],
"ssdp": [
{
"manufacturer": "Microsoft Corporation",

View File

@@ -28,7 +28,7 @@ from .helpers import (
)
from .models import ZwaveJSConfigEntry
KEYS_TO_REDACT = {"homeId", "location", "dsk"}
KEYS_TO_REDACT = {"homeId", "location"}
VALUES_TO_REDACT = (
ZwaveValueMatcher(property_="userCode", command_class=CommandClass.USER_CODE),

View File

@@ -961,7 +961,8 @@ class HomeAssistant:
async def async_block_till_done(self, wait_background_tasks: bool = False) -> None:
"""Block until all pending work is done."""
# To flush out any call_soon_threadsafe
# Sleep twice to flush out any call_soon_threadsafe
await asyncio.sleep(0)
await asyncio.sleep(0)
start_time: float | None = None
current_task = asyncio.current_task()

View File

@@ -212,11 +212,6 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [
"domain": "govee_ble",
"local_name": "GVH5110*",
},
{
"connectable": False,
"domain": "govee_ble",
"local_name": "GV5140*",
},
{
"connectable": False,
"domain": "govee_ble",

View File

@@ -7338,12 +7338,6 @@
}
}
},
"ubisys": {
"name": "Ubisys",
"iot_standards": [
"zigbee"
]
},
"ubiwizz": {
"name": "Ubiwizz",
"integration_type": "virtual",

View File

@@ -3,6 +3,7 @@
from __future__ import annotations
import asyncio
from contextlib import suppress
import importlib
import logging
import sys
@@ -52,10 +53,11 @@ async def async_import_module(hass: HomeAssistant, name: str) -> ModuleType:
if isinstance(ex, ModuleNotFoundError):
failure_cache[name] = True
import_future.set_exception(ex)
# Set the exception retrieved flag on the future since
# it will never be retrieved unless there
# are concurrent calls
import_future.exception()
with suppress(BaseException):
# Set the exception retrieved flag on the future since
# it will never be retrieved unless there
# are concurrent calls
import_future.result()
raise
finally:
del import_futures[name]

View File

@@ -40,7 +40,7 @@ habluetooth==5.8.0
hass-nabucasa==1.15.0
hassil==3.5.0
home-assistant-bluetooth==1.13.1
home-assistant-frontend==20260302.0
home-assistant-frontend==20260226.0
home-assistant-intents==2026.2.13
httpx==0.28.1
ifaddr==0.2.0

View File

@@ -283,7 +283,7 @@ def color_xy_brightness_to_RGB(
Y = brightness
if vY == 0.0:
vY += 1e-11
vY += 0.00000000001
X = (Y / vY) * vX
Z = (Y / vY) * (1 - vX - vY)

View File

@@ -477,7 +477,7 @@ class MassVolumeConcentrationConverter(BaseUnitConverter):
UNIT_CLASS = "concentration"
_UNIT_CONVERSION: dict[str | None, float] = {
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: 1_000_000.0, # 1000 µg/m³ = 1 mg/m³
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: 1000000.0, # 1000 µg/m³ = 1 mg/m³
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER: 1000.0, # 1000 mg/m³ = 1 g/m³
CONCENTRATION_GRAMS_PER_CUBIC_METER: 1.0,
}

19
requirements_all.txt generated
View File

@@ -416,7 +416,7 @@ aioswitcher==6.1.0
aiosyncthing==0.7.1
# homeassistant.components.tankerkoenig
aiotankerkoenig==0.5.1
aiotankerkoenig==0.4.2
# homeassistant.components.tedee
aiotedee==0.2.25
@@ -828,7 +828,7 @@ dremel3dpy==2.1.1
dropmqttapi==1.0.3
# homeassistant.components.dsmr
dsmr-parser==1.5.0
dsmr-parser==1.4.3
# homeassistant.components.dwd_weather_warnings
dwdwfsapi==1.0.7
@@ -1223,7 +1223,7 @@ hole==0.9.0
holidays==0.84
# homeassistant.components.frontend
home-assistant-frontend==20260302.0
home-assistant-frontend==20260226.0
# homeassistant.components.conversation
home-assistant-intents==2026.2.13
@@ -1374,7 +1374,7 @@ kiwiki-client==0.1.1
knocki==0.4.2
# homeassistant.components.knx
knx-frontend==2026.3.2.183756
knx-frontend==2026.2.25.165736
# homeassistant.components.konnected
konnected==1.2.0
@@ -1681,9 +1681,6 @@ onedrive-personal-sdk==0.1.4
# homeassistant.components.onvif
onvif-zeep-async==4.0.4
# homeassistant.components.onvif
onvif_parsers==1.2.2
# homeassistant.components.opengarage
open-garage==0.2.0
@@ -2236,7 +2233,7 @@ pylitejet==0.6.3
pylitterbot==2025.1.0
# homeassistant.components.lutron_caseta
pylutron-caseta==0.27.0
pylutron-caseta==0.26.0
# homeassistant.components.lutron
pylutron==0.2.18
@@ -2608,7 +2605,7 @@ python-open-router==0.3.3
python-opendata-transport==0.5.0
# homeassistant.components.openevse
python-openevse-http==0.2.5
python-openevse-http==0.2.1
# homeassistant.components.opensky
python-opensky==1.0.1
@@ -2660,7 +2657,7 @@ python-telegram-bot[socks]==22.1
python-vlc==3.0.18122
# homeassistant.components.xbox
python-xbox==0.2.0
python-xbox==0.1.3
# homeassistant.components.egardia
pythonegardia==1.0.52
@@ -3145,7 +3142,7 @@ typedmonarchmoney==0.7.0
uasiren==0.0.1
# homeassistant.components.uhoo
uhooapi==1.2.8
uhooapi==1.2.6
# homeassistant.components.unifiprotect
uiprotect==10.2.2

View File

@@ -401,7 +401,7 @@ aioswitcher==6.1.0
aiosyncthing==0.7.1
# homeassistant.components.tankerkoenig
aiotankerkoenig==0.5.1
aiotankerkoenig==0.4.2
# homeassistant.components.tedee
aiotedee==0.2.25
@@ -734,7 +734,7 @@ dremel3dpy==2.1.1
dropmqttapi==1.0.3
# homeassistant.components.dsmr
dsmr-parser==1.5.0
dsmr-parser==1.4.3
# homeassistant.components.dwd_weather_warnings
dwdwfsapi==1.0.7
@@ -1084,7 +1084,7 @@ hole==0.9.0
holidays==0.84
# homeassistant.components.frontend
home-assistant-frontend==20260302.0
home-assistant-frontend==20260226.0
# homeassistant.components.conversation
home-assistant-intents==2026.2.13
@@ -1211,7 +1211,7 @@ kegtron-ble==1.0.2
knocki==0.4.2
# homeassistant.components.knx
knx-frontend==2026.3.2.183756
knx-frontend==2026.2.25.165736
# homeassistant.components.konnected
konnected==1.2.0
@@ -1467,9 +1467,6 @@ onedrive-personal-sdk==0.1.4
# homeassistant.components.onvif
onvif-zeep-async==4.0.4
# homeassistant.components.onvif
onvif_parsers==1.2.2
# homeassistant.components.opengarage
open-garage==0.2.0
@@ -1910,7 +1907,7 @@ pylitejet==0.6.3
pylitterbot==2025.1.0
# homeassistant.components.lutron_caseta
pylutron-caseta==0.27.0
pylutron-caseta==0.26.0
# homeassistant.components.lutron
pylutron==0.2.18
@@ -2207,7 +2204,7 @@ python-open-router==0.3.3
python-opendata-transport==0.5.0
# homeassistant.components.openevse
python-openevse-http==0.2.5
python-openevse-http==0.2.1
# homeassistant.components.opensky
python-opensky==1.0.1
@@ -2253,7 +2250,7 @@ python-technove==2.0.0
python-telegram-bot[socks]==22.1
# homeassistant.components.xbox
python-xbox==0.2.0
python-xbox==0.1.3
# homeassistant.components.uptime_kuma
pythonkuma==0.5.0
@@ -2648,7 +2645,7 @@ typedmonarchmoney==0.7.0
uasiren==0.0.1
# homeassistant.components.uhoo
uhooapi==1.2.8
uhooapi==1.2.6
# homeassistant.components.unifiprotect
uiprotect==10.2.2

View File

@@ -16,7 +16,6 @@ from homeassistant.components.conversation import (
async_get_chat_log,
)
from homeassistant.components.conversation.const import HOME_ASSISTANT_AGENT
from homeassistant.components.conversation.models import ConversationResult
from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN
from homeassistant.const import ATTR_FRIENDLY_NAME
from homeassistant.core import HomeAssistant
@@ -174,36 +173,6 @@ async def test_http_api_wrong_data(
assert resp.status == HTTPStatus.BAD_REQUEST
async def test_http_processing_intent_with_device_satellite_ids(
hass: HomeAssistant,
init_components,
hass_client: ClientSessionGenerator,
) -> None:
"""Test processing intent via HTTP API with both device_id and satellite_id."""
client = await hass_client()
mock_result = intent.IntentResponse(language=hass.config.language)
mock_result.async_set_speech("test")
with patch(
"homeassistant.components.conversation.http.async_converse",
return_value=ConversationResult(response=mock_result),
) as mock_converse:
resp = await client.post(
"/api/conversation/process",
json={
"text": "test",
"device_id": "test-device-id",
"satellite_id": "test-satellite-id",
},
)
assert resp.status == HTTPStatus.OK
mock_converse.assert_called_once()
call_kwargs = mock_converse.call_args[1]
assert call_kwargs["device_id"] == "test-device-id"
assert call_kwargs["satellite_id"] == "test-satellite-id"
@pytest.mark.parametrize(
"payload",
[
@@ -252,38 +221,6 @@ async def test_ws_api(
assert msg["result"]["response"]["data"]["code"] == "no_intent_match"
async def test_ws_api_with_device_satellite_ids(
hass: HomeAssistant,
init_components,
hass_ws_client: WebSocketGenerator,
) -> None:
"""Test the Websocket conversation API with both device_id and satellite_id."""
client = await hass_ws_client(hass)
mock_result = intent.IntentResponse(language=hass.config.language)
mock_result.async_set_speech("test")
with patch(
"homeassistant.components.conversation.http.async_converse",
return_value=ConversationResult(response=mock_result),
) as mock_converse:
await client.send_json_auto_id(
{
"type": "conversation/process",
"text": "test",
"device_id": "test-device-id",
"satellite_id": "test-satellite-id",
}
)
msg = await client.receive_json()
assert msg["success"]
mock_converse.assert_called_once()
call_kwargs = mock_converse.call_args[1]
assert call_kwargs["device_id"] == "test-device-id"
assert call_kwargs["satellite_id"] == "test-satellite-id"
@pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS)
async def test_ws_prepare(
hass: HomeAssistant, init_components, hass_ws_client: WebSocketGenerator, agent_id

View File

@@ -1,64 +1,19 @@
"""eafm fixtures."""
from collections.abc import Generator
from typing import Any
from unittest.mock import AsyncMock, patch
from unittest.mock import patch
import pytest
from homeassistant.components.eafm.const import DOMAIN
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
@pytest.fixture
def mock_get_stations() -> Generator[AsyncMock]:
def mock_get_stations():
"""Mock aioeafm.get_stations."""
with patch("homeassistant.components.eafm.config_flow.get_stations") as patched:
patched.return_value = [
{"label": "My station", "stationReference": "L12345", "RLOIid": "R12345"}
]
yield patched
@pytest.fixture
def mock_get_station(initial_value: dict[str, Any]) -> Generator[AsyncMock]:
def mock_get_station():
"""Mock aioeafm.get_station."""
with patch("homeassistant.components.eafm.coordinator.get_station") as patched:
patched.return_value = initial_value
yield patched
@pytest.fixture
def initial_value() -> dict[str, Any]:
"""Mock aioeafm.get_station."""
return {
"label": "My station",
"measures": [
{
"@id": "really-long-unique-id",
"label": "York Viking Recorder - level-stage-i-15_min----",
"qualifier": "Stage",
"parameterName": "Water Level",
"latestReading": {"value": 5},
"stationReference": "L1234",
"unit": "http://qudt.org/1.1/vocab/unit#Meter",
"unitName": "m",
}
],
}
@pytest.fixture
def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry:
"""Create a dummy config entry for testing."""
entry = MockConfigEntry(
version=1,
domain=DOMAIN,
entry_id="VikingRecorder1234",
data={"station": "L1234"},
title="Viking Recorder",
)
entry.add_to_hass(hass)
return entry

View File

@@ -1,34 +0,0 @@
# serializer version: 1
# name: test_load_unload_entry
list([
DeviceRegistryEntrySnapshot({
'area_id': None,
'config_entries': <ANY>,
'config_entries_subentries': <ANY>,
'configuration_url': None,
'connections': set({
}),
'disabled_by': None,
'entry_type': <DeviceEntryType.SERVICE: 'service'>,
'hw_version': None,
'id': <ANY>,
'identifiers': set({
tuple(
'eafm',
'L1234',
),
}),
'labels': set({
}),
'manufacturer': 'https://environment.data.gov.uk/',
'model': 'Water Level',
'model_id': None,
'name': 'My station Water Level Stage',
'name_by_user': None,
'primary_config_entry': <ANY>,
'serial_number': None,
'sw_version': None,
'via_device_id': None,
}),
])
# ---

View File

@@ -1,64 +0,0 @@
"""Tests for initialization."""
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.eafm.const import DOMAIN
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr
from tests.common import MockConfigEntry
@pytest.mark.usefixtures("mock_get_station")
async def test_load_unload_entry(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
device_registry: dr.DeviceRegistry,
snapshot: SnapshotAssertion,
) -> None:
"""Test being able to load and unload an entry."""
assert await hass.config_entries.async_setup(mock_config_entry.entry_id)
assert mock_config_entry.state is ConfigEntryState.LOADED
await hass.async_block_till_done()
assert (
dr.async_entries_for_config_entry(device_registry, mock_config_entry.entry_id)
== snapshot
)
await hass.config_entries.async_unload(mock_config_entry.entry_id)
@pytest.mark.usefixtures("mock_get_station")
async def test_update_device_identifiers(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
device_registry: dr.DeviceRegistry,
) -> None:
"""Test being able to update device identifiers."""
device_entry = device_registry.async_get_or_create(
config_entry_id=mock_config_entry.entry_id,
identifiers={(DOMAIN, "measure-id", "L1234")},
)
entries = dr.async_entries_for_config_entry(
device_registry, mock_config_entry.entry_id
)
assert len(entries) == 1
device_entry = entries[0]
assert (DOMAIN, "measure-id", "L1234") in device_entry.identifiers
assert (DOMAIN, "L1234") not in device_entry.identifiers
assert await hass.config_entries.async_setup(mock_config_entry.entry_id)
assert mock_config_entry.state is ConfigEntryState.LOADED
await hass.async_block_till_done()
entries = dr.async_entries_for_config_entry(
device_registry, mock_config_entry.entry_id
)
assert len(entries) == 1
device_entry = entries[0]
assert (DOMAIN, "measure-id", "L1234") not in device_entry.identifiers
assert (DOMAIN, "L1234") in device_entry.identifiers

View File

@@ -1,173 +0,0 @@
"""Test ESPHome infrared platform."""
from aioesphomeapi import (
APIClient,
APIConnectionError,
InfraredCapability,
InfraredInfo,
)
from infrared_protocols import NECCommand
import pytest
from homeassistant.components import infrared
from homeassistant.const import STATE_UNAVAILABLE
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from .conftest import MockESPHomeDevice, MockESPHomeDeviceType
ENTITY_ID = "infrared.test_ir"
async def _mock_ir_device(
mock_esphome_device: MockESPHomeDeviceType,
mock_client: APIClient,
capabilities: InfraredCapability = InfraredCapability.TRANSMITTER,
) -> MockESPHomeDevice:
entity_info = [
InfraredInfo(object_id="ir", key=1, name="IR", capabilities=capabilities)
]
return await mock_esphome_device(
mock_client=mock_client, entity_info=entity_info, states=[]
)
@pytest.mark.parametrize(
("capabilities", "entity_created"),
[
(InfraredCapability.TRANSMITTER, True),
(InfraredCapability.RECEIVER, False),
(InfraredCapability.TRANSMITTER | InfraredCapability.RECEIVER, True),
(InfraredCapability(0), False),
],
)
async def test_infrared_entity_transmitter(
hass: HomeAssistant,
mock_client: APIClient,
mock_esphome_device: MockESPHomeDeviceType,
capabilities: InfraredCapability,
entity_created: bool,
) -> None:
"""Test infrared entity with transmitter capability is created."""
await _mock_ir_device(mock_esphome_device, mock_client, capabilities)
state = hass.states.get(ENTITY_ID)
assert (state is not None) == entity_created
emitters = infrared.async_get_emitters(hass)
assert (len(emitters) == 1) == entity_created
async def test_infrared_multiple_entities_mixed_capabilities(
hass: HomeAssistant,
mock_client: APIClient,
mock_esphome_device: MockESPHomeDeviceType,
) -> None:
"""Test multiple infrared entities with mixed capabilities."""
entity_info = [
InfraredInfo(
object_id="ir_transmitter",
key=1,
name="IR Transmitter",
capabilities=InfraredCapability.TRANSMITTER,
),
InfraredInfo(
object_id="ir_receiver",
key=2,
name="IR Receiver",
capabilities=InfraredCapability.RECEIVER,
),
InfraredInfo(
object_id="ir_transceiver",
key=3,
name="IR Transceiver",
capabilities=InfraredCapability.TRANSMITTER | InfraredCapability.RECEIVER,
),
]
await mock_esphome_device(
mock_client=mock_client,
entity_info=entity_info,
states=[],
)
# Only transmitter and transceiver should be created
assert hass.states.get("infrared.test_ir_transmitter") is not None
assert hass.states.get("infrared.test_ir_receiver") is None
assert hass.states.get("infrared.test_ir_transceiver") is not None
emitters = infrared.async_get_emitters(hass)
assert len(emitters) == 2
async def test_infrared_send_command_success(
hass: HomeAssistant,
mock_client: APIClient,
mock_esphome_device: MockESPHomeDeviceType,
) -> None:
"""Test sending IR command successfully."""
await _mock_ir_device(mock_esphome_device, mock_client)
command = NECCommand(address=0x04, command=0x08, modulation=38000)
await infrared.async_send_command(hass, ENTITY_ID, command)
# Verify the command was sent to the ESPHome client
mock_client.infrared_rf_transmit_raw_timings.assert_called_once()
call_args = mock_client.infrared_rf_transmit_raw_timings.call_args
assert call_args[0][0] == 1 # key
assert call_args[1]["carrier_frequency"] == 38000
assert call_args[1]["device_id"] == 0
# Verify timings (alternating positive/negative values)
timings = call_args[1]["timings"]
assert len(timings) > 0
for i in range(0, len(timings), 2):
assert timings[i] >= 0
for i in range(1, len(timings), 2):
assert timings[i] <= 0
async def test_infrared_send_command_failure(
hass: HomeAssistant,
mock_client: APIClient,
mock_esphome_device: MockESPHomeDeviceType,
) -> None:
"""Test sending IR command with APIConnectionError raises HomeAssistantError."""
await _mock_ir_device(mock_esphome_device, mock_client)
mock_client.infrared_rf_transmit_raw_timings.side_effect = APIConnectionError(
"Connection lost"
)
command = NECCommand(address=0x04, command=0x08, modulation=38000)
with pytest.raises(HomeAssistantError) as exc_info:
await infrared.async_send_command(hass, ENTITY_ID, command)
assert exc_info.value.translation_domain == "esphome"
assert exc_info.value.translation_key == "error_communicating_with_device"
async def test_infrared_entity_availability(
hass: HomeAssistant,
mock_client: APIClient,
mock_esphome_device: MockESPHomeDeviceType,
) -> None:
"""Test infrared entity becomes available after device reconnects."""
mock_device = await _mock_ir_device(mock_esphome_device, mock_client)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.state != STATE_UNAVAILABLE
await mock_device.mock_disconnect(False)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.state == STATE_UNAVAILABLE
await mock_device.mock_connect()
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.state != STATE_UNAVAILABLE

View File

@@ -84,6 +84,7 @@ GVH5106_SERVICE_INFO = BluetoothServiceInfo(
source="local",
)
GV5125_BUTTON_0_SERVICE_INFO = BluetoothServiceInfo(
name="GV51255367",
address="C1:37:37:32:0F:45",
@@ -162,16 +163,6 @@ GV5123_CLOSED_SERVICE_INFO = BluetoothServiceInfo(
source="24:4C:AB:03:E6:B8",
)
# Encodes: temperature=21.6°C, humidity=67.8%, CO2=531 ppm, no error
GV5140_SERVICE_INFO = BluetoothServiceInfo(
name="GV5140EEFF",
address="AA:BB:CC:DD:EE:FF",
rssi=-63,
manufacturer_data={1: b"\x01\x01\x03\x4e\x66\x02\x13\x00"},
service_uuids=["0000ec88-0000-1000-8000-00805f9b34fb"],
service_data={},
source="local",
)
GVH5124_SERVICE_INFO = BluetoothServiceInfo(
name="GV51242F68",

View File

@@ -17,7 +17,6 @@ from homeassistant.core import HomeAssistant
from homeassistant.util import dt as dt_util
from . import (
GV5140_SERVICE_INFO,
GVH5075_SERVICE_INFO,
GVH5106_SERVICE_INFO,
GVH5178_PRIMARY_SERVICE_INFO,
@@ -164,47 +163,6 @@ async def test_gvh5178_multi_sensor(hass: HomeAssistant) -> None:
assert primary_temp_sensor.state == STATE_UNAVAILABLE
async def test_gv5140(hass: HomeAssistant) -> None:
"""Test CO2, temperature and humidity sensors for a GV5140 device."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id="AA:BB:CC:DD:EE:FF",
)
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
inject_bluetooth_service_info(hass, GV5140_SERVICE_INFO)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 3
temp_sensor = hass.states.get("sensor.5140eeff_temperature")
temp_sensor_attributes = temp_sensor.attributes
assert temp_sensor.state == "21.6"
assert temp_sensor_attributes[ATTR_FRIENDLY_NAME] == "5140EEFF Temperature"
assert temp_sensor_attributes[ATTR_UNIT_OF_MEASUREMENT] == "°C"
assert temp_sensor_attributes[ATTR_STATE_CLASS] == "measurement"
humidity_sensor = hass.states.get("sensor.5140eeff_humidity")
humidity_sensor_attributes = humidity_sensor.attributes
assert humidity_sensor.state == "67.8"
assert humidity_sensor_attributes[ATTR_FRIENDLY_NAME] == "5140EEFF Humidity"
assert humidity_sensor_attributes[ATTR_UNIT_OF_MEASUREMENT] == "%"
assert humidity_sensor_attributes[ATTR_STATE_CLASS] == "measurement"
co2_sensor = hass.states.get("sensor.5140eeff_carbon_dioxide")
co2_sensor_attributes = co2_sensor.attributes
assert co2_sensor.state == "531"
assert co2_sensor_attributes[ATTR_FRIENDLY_NAME] == "5140EEFF Carbon Dioxide"
assert co2_sensor_attributes[ATTR_UNIT_OF_MEASUREMENT] == "ppm"
assert co2_sensor_attributes[ATTR_STATE_CLASS] == "measurement"
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
async def test_gvh5106(hass: HomeAssistant) -> None:
"""Test setting up creates the sensors for a device with PM25."""
entry = MockConfigEntry(

View File

@@ -485,7 +485,13 @@ async def test_clear_lock_user_service(
matter_node: MatterNode,
) -> None:
"""Test clear_lock_user entity service."""
matter_client.send_device_command = AsyncMock(return_value=None)
matter_client.send_device_command = AsyncMock(
side_effect=[
# clear_user_credentials: GetUser returns user with no creds
{"userStatus": 1, "credentials": None},
None, # ClearUser
]
)
await hass.services.async_call(
DOMAIN,
@@ -497,9 +503,127 @@ async def test_clear_lock_user_service(
blocking=True,
)
# ClearUser handles credential cleanup per the Matter spec
assert matter_client.send_device_command.call_count == 1
assert matter_client.send_device_command.call_args == call(
assert matter_client.send_device_command.call_count == 2
# Verify GetUser was called to check credentials
assert matter_client.send_device_command.call_args_list[0] == call(
node_id=matter_node.node_id,
endpoint_id=1,
command=clusters.DoorLock.Commands.GetUser(userIndex=1),
)
# Verify ClearUser was called
assert matter_client.send_device_command.call_args_list[1] == call(
node_id=matter_node.node_id,
endpoint_id=1,
command=clusters.DoorLock.Commands.ClearUser(userIndex=1),
timed_request_timeout_ms=10000,
)
@pytest.mark.parametrize("node_fixture", ["mock_door_lock"])
@pytest.mark.parametrize("attributes", [{"1/257/65532": _FEATURE_USR_PIN}])
async def test_clear_lock_user_credentials_nullvalue(
hass: HomeAssistant,
matter_client: MagicMock,
matter_node: MatterNode,
) -> None:
"""Test clear_lock_user handles NullValue credentials from Matter SDK."""
matter_client.send_device_command = AsyncMock(
side_effect=[
# GetUser returns NullValue for credentials (truthy but not iterable)
{"userStatus": 1, "credentials": NullValue},
None, # ClearUser
]
)
await hass.services.async_call(
DOMAIN,
"clear_lock_user",
{
ATTR_ENTITY_ID: "lock.mock_door_lock",
ATTR_USER_INDEX: 1,
},
blocking=True,
)
# GetUser + ClearUser (no ClearCredential since NullValue means no credentials)
assert matter_client.send_device_command.call_count == 2
assert matter_client.send_device_command.call_args_list[0] == call(
node_id=matter_node.node_id,
endpoint_id=1,
command=clusters.DoorLock.Commands.GetUser(userIndex=1),
)
assert matter_client.send_device_command.call_args_list[1] == call(
node_id=matter_node.node_id,
endpoint_id=1,
command=clusters.DoorLock.Commands.ClearUser(userIndex=1),
timed_request_timeout_ms=10000,
)
@pytest.mark.parametrize("node_fixture", ["mock_door_lock"])
@pytest.mark.parametrize("attributes", [{"1/257/65532": _FEATURE_USR_PIN}])
async def test_clear_lock_user_clears_credentials_first(
hass: HomeAssistant,
matter_client: MagicMock,
matter_node: MatterNode,
) -> None:
"""Test clear_lock_user clears credentials before clearing user."""
matter_client.send_device_command = AsyncMock(
side_effect=[
# clear_user_credentials: GetUser returns user with credentials
{
"userStatus": 1,
"credentials": [
{"credentialType": 1, "credentialIndex": 1},
{"credentialType": 1, "credentialIndex": 2},
],
},
None, # ClearCredential for first
None, # ClearCredential for second
None, # ClearUser
]
)
await hass.services.async_call(
DOMAIN,
"clear_lock_user",
{
ATTR_ENTITY_ID: "lock.mock_door_lock",
ATTR_USER_INDEX: 1,
},
blocking=True,
)
# GetUser + 2 ClearCredential + ClearUser
assert matter_client.send_device_command.call_count == 4
assert matter_client.send_device_command.call_args_list[0] == call(
node_id=matter_node.node_id,
endpoint_id=1,
command=clusters.DoorLock.Commands.GetUser(userIndex=1),
)
assert matter_client.send_device_command.call_args_list[1] == call(
node_id=matter_node.node_id,
endpoint_id=1,
command=clusters.DoorLock.Commands.ClearCredential(
credential=clusters.DoorLock.Structs.CredentialStruct(
credentialType=1,
credentialIndex=1,
),
),
timed_request_timeout_ms=10000,
)
assert matter_client.send_device_command.call_args_list[2] == call(
node_id=matter_node.node_id,
endpoint_id=1,
command=clusters.DoorLock.Commands.ClearCredential(
credential=clusters.DoorLock.Structs.CredentialStruct(
credentialType=1,
credentialIndex=2,
),
),
timed_request_timeout_ms=10000,
)
assert matter_client.send_device_command.call_args_list[3] == call(
node_id=matter_node.node_id,
endpoint_id=1,
command=clusters.DoorLock.Commands.ClearUser(userIndex=1),
@@ -2045,8 +2169,13 @@ async def test_clear_lock_user_clear_all(
matter_client: MagicMock,
matter_node: MatterNode,
) -> None:
"""Test clear_lock_user with CLEAR_ALL_INDEX clears all users."""
matter_client.send_device_command = AsyncMock(return_value=None)
"""Test clear_lock_user with CLEAR_ALL_INDEX clears all credentials then users."""
matter_client.send_device_command = AsyncMock(
side_effect=[
None, # ClearCredential(None) - clear all credentials
None, # ClearUser(0xFFFE) - clear all users
]
)
await hass.services.async_call(
DOMAIN,
@@ -2058,9 +2187,16 @@ async def test_clear_lock_user_clear_all(
blocking=True,
)
# ClearUser handles credential cleanup per the Matter spec
assert matter_client.send_device_command.call_count == 1
assert matter_client.send_device_command.call_args == call(
assert matter_client.send_device_command.call_count == 2
# First: ClearCredential with None (clear all)
assert matter_client.send_device_command.call_args_list[0] == call(
node_id=matter_node.node_id,
endpoint_id=1,
command=clusters.DoorLock.Commands.ClearCredential(credential=None),
timed_request_timeout_ms=10000,
)
# Second: ClearUser with CLEAR_ALL_INDEX
assert matter_client.send_device_command.call_args_list[1] == call(
node_id=matter_node.node_id,
endpoint_id=1,
command=clusters.DoorLock.Commands.ClearUser(userIndex=CLEAR_ALL_INDEX),
@@ -2566,3 +2702,69 @@ async def test_set_lock_user_update_with_explicit_type_and_rule(
),
timed_request_timeout_ms=10000,
)
# --- clear_lock_user with mixed credential types ---
@pytest.mark.parametrize("node_fixture", ["mock_door_lock"])
@pytest.mark.parametrize("attributes", [{"1/257/65532": _FEATURE_USR_PIN_RFID}])
async def test_clear_lock_user_mixed_credential_types(
hass: HomeAssistant,
matter_client: MagicMock,
matter_node: MatterNode,
) -> None:
"""Test clear_lock_user clears mixed PIN and RFID credentials."""
pin_type = clusters.DoorLock.Enums.CredentialTypeEnum.kPin
rfid_type = clusters.DoorLock.Enums.CredentialTypeEnum.kRfid
matter_client.send_device_command = AsyncMock(
side_effect=[
# GetUser returns user with PIN and RFID credentials
{
"userStatus": 1,
"credentials": [
{"credentialType": pin_type, "credentialIndex": 1},
{"credentialType": rfid_type, "credentialIndex": 2},
],
},
None, # ClearCredential for PIN
None, # ClearCredential for RFID
None, # ClearUser
]
)
await hass.services.async_call(
DOMAIN,
"clear_lock_user",
{
ATTR_ENTITY_ID: "lock.mock_door_lock",
ATTR_USER_INDEX: 1,
},
blocking=True,
)
assert matter_client.send_device_command.call_count == 4
# Verify PIN credential was cleared
assert matter_client.send_device_command.call_args_list[1] == call(
node_id=matter_node.node_id,
endpoint_id=1,
command=clusters.DoorLock.Commands.ClearCredential(
credential=clusters.DoorLock.Structs.CredentialStruct(
credentialType=pin_type,
credentialIndex=1,
),
),
timed_request_timeout_ms=10000,
)
# Verify RFID credential was cleared
assert matter_client.send_device_command.call_args_list[2] == call(
node_id=matter_node.node_id,
endpoint_id=1,
command=clusters.DoorLock.Commands.ClearCredential(
credential=clusters.DoorLock.Structs.CredentialStruct(
credentialType=rfid_type,
credentialIndex=2,
),
),
timed_request_timeout_ms=10000,
)

View File

@@ -1,5 +1,7 @@
"""Test config flow for Nederlandse Spoorwegen integration."""
from datetime import time
from typing import Any
from unittest.mock import AsyncMock
import pytest
@@ -7,12 +9,13 @@ from requests import ConnectionError as RequestsConnectionError, HTTPError, Time
from homeassistant.components.nederlandse_spoorwegen.const import (
CONF_FROM,
CONF_ROUTES,
CONF_TIME,
CONF_TO,
CONF_VIA,
DOMAIN,
)
from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_RECONFIGURE, SOURCE_USER
from homeassistant.const import CONF_API_KEY, CONF_NAME
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
@@ -162,6 +165,174 @@ async def test_already_configured(
assert result["reason"] == "already_configured"
async def test_config_flow_import_success(
hass: HomeAssistant, mock_nsapi: AsyncMock, mock_setup_entry: AsyncMock
) -> None:
"""Test successful import flow from YAML configuration."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_API_KEY: API_KEY},
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == "Nederlandse Spoorwegen"
assert result["data"] == {CONF_API_KEY: API_KEY}
assert not result["result"].subentries
@pytest.mark.parametrize(
("routes_data", "expected_routes_data"),
[
(
# Test with uppercase station codes (UI behavior)
[
{
CONF_NAME: "Home to Work",
CONF_FROM: "ASD",
CONF_TO: "RTD",
CONF_VIA: "HT",
CONF_TIME: time(hour=8, minute=30),
}
],
[
{
CONF_NAME: "Home to Work",
CONF_FROM: "ASD",
CONF_TO: "RTD",
CONF_VIA: "HT",
CONF_TIME: time(hour=8, minute=30),
}
],
),
(
# Test with lowercase station codes (converted to uppercase)
[
{
CONF_NAME: "Rotterdam-Amsterdam",
CONF_FROM: "rtd", # lowercase input
CONF_TO: "asd", # lowercase input
},
{
CONF_NAME: "Amsterdam-Haarlem",
CONF_FROM: "asd", # lowercase input
CONF_TO: "ht", # lowercase input
CONF_VIA: "rtd", # lowercase input
},
],
[
{
CONF_NAME: "Rotterdam-Amsterdam",
CONF_FROM: "RTD", # converted to uppercase
CONF_TO: "ASD", # converted to uppercase
},
{
CONF_NAME: "Amsterdam-Haarlem",
CONF_FROM: "ASD", # converted to uppercase
CONF_TO: "HT", # converted to uppercase
CONF_VIA: "RTD", # converted to uppercase
},
],
),
],
)
async def test_config_flow_import_with_routes(
hass: HomeAssistant,
mock_nsapi: AsyncMock,
mock_setup_entry: AsyncMock,
routes_data: list[dict[str, Any]],
expected_routes_data: list[dict[str, Any]],
) -> None:
"""Test import flow with routes from YAML configuration."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_API_KEY: API_KEY,
CONF_ROUTES: routes_data,
},
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == "Nederlandse Spoorwegen"
assert result["data"] == {CONF_API_KEY: API_KEY}
assert len(result["result"].subentries) == len(expected_routes_data)
subentries = list(result["result"].subentries.values())
for expected_route in expected_routes_data:
route_entry = next(
entry for entry in subentries if entry.title == expected_route[CONF_NAME]
)
assert route_entry.data == expected_route
assert route_entry.subentry_type == "route"
async def test_config_flow_import_with_unknown_station(
hass: HomeAssistant, mock_nsapi: AsyncMock, mock_setup_entry: AsyncMock
) -> None:
"""Test import flow aborts with unknown station in routes."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_API_KEY: API_KEY,
CONF_ROUTES: [
{
CONF_NAME: "Home to Work",
CONF_FROM: "HRM",
CONF_TO: "RTD",
CONF_VIA: "HT",
CONF_TIME: time(hour=8, minute=30),
}
],
},
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "invalid_station"
async def test_config_flow_import_already_configured(
hass: HomeAssistant, mock_config_entry: MockConfigEntry
) -> None:
"""Test import flow when integration is already configured."""
mock_config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_API_KEY: API_KEY},
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "already_configured"
@pytest.mark.parametrize(
("exception", "expected_error"),
[
(HTTPError("Invalid API key"), "invalid_auth"),
(Timeout("Cannot connect"), "cannot_connect"),
(RequestsConnectionError("Cannot connect"), "cannot_connect"),
(Exception("Unexpected error"), "unknown"),
],
)
async def test_import_flow_exceptions(
hass: HomeAssistant,
mock_nsapi: AsyncMock,
exception: Exception,
expected_error: str,
) -> None:
"""Test config flow handling different exceptions."""
mock_nsapi.get_stations.side_effect = exception
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_API_KEY: API_KEY}
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == expected_error
async def test_reconfigure_success(
hass: HomeAssistant, mock_nsapi: AsyncMock, mock_config_entry: MockConfigEntry
) -> None:

View File

@@ -11,6 +11,7 @@ from syrupy.assertion import SnapshotAssertion
from homeassistant.components.nederlandse_spoorwegen.const import (
CONF_FROM,
CONF_ROUTES,
CONF_TIME,
CONF_TO,
CONF_VIA,
@@ -18,10 +19,19 @@ from homeassistant.components.nederlandse_spoorwegen.const import (
INTEGRATION_TITLE,
SUBENTRY_TYPE_ROUTE,
)
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.config_entries import ConfigSubentryDataWithId
from homeassistant.const import CONF_API_KEY, CONF_NAME, STATE_UNKNOWN, Platform
from homeassistant.core import HomeAssistant
from homeassistant.const import (
CONF_API_KEY,
CONF_NAME,
CONF_PLATFORM,
STATE_UNKNOWN,
Platform,
)
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
import homeassistant.helpers.entity_registry as er
import homeassistant.helpers.issue_registry as ir
from homeassistant.setup import async_setup_component
from . import setup_integration
from .const import API_KEY
@@ -39,6 +49,41 @@ def mock_sensor_platform() -> Generator:
yield mock_platform
async def test_config_import(
hass: HomeAssistant,
mock_nsapi,
mock_setup_entry: AsyncMock,
issue_registry: ir.IssueRegistry,
) -> None:
"""Test sensor initialization."""
await async_setup_component(
hass,
SENSOR_DOMAIN,
{
SENSOR_DOMAIN: [
{
CONF_PLATFORM: DOMAIN,
CONF_API_KEY: API_KEY,
CONF_ROUTES: [
{
CONF_NAME: "Spoorwegen Nederlande Station",
CONF_FROM: "ASD",
CONF_TO: "RTD",
CONF_VIA: "HT",
}
],
}
]
},
)
await hass.async_block_till_done()
assert len(issue_registry.issues) == 1
assert (HOMEASSISTANT_DOMAIN, "deprecated_yaml") in issue_registry.issues
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
@pytest.mark.freeze_time("2025-09-15 14:30:00+00:00")
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
async def test_sensor(

View File

@@ -1,22 +1,16 @@
"""Tests for the ONVIF integration."""
from __future__ import annotations
from collections import defaultdict
from unittest.mock import AsyncMock, MagicMock, patch
from onvif.exceptions import ONVIFError
from onvif_parsers.model import EventEntity
from zeep.exceptions import Fault
from homeassistant import config_entries
from homeassistant.components.onvif import config_flow
from homeassistant.components.onvif.const import CONF_SNAPSHOT_AUTH
from homeassistant.components.onvif.event import EventManager
from homeassistant.components.onvif.models import (
Capabilities,
DeviceInfo,
Event,
Profile,
PullPointManagerState,
Resolution,
@@ -129,7 +123,7 @@ def setup_mock_onvif_camera(
mock_onvif_camera.side_effect = mock_constructor
def setup_mock_device(mock_device, capabilities=None, profiles=None, events=None):
def setup_mock_device(mock_device, capabilities=None, profiles=None):
"""Prepare mock ONVIFDevice."""
mock_device.async_setup = AsyncMock(return_value=True)
mock_device.port = 80
@@ -155,11 +149,7 @@ def setup_mock_device(mock_device, capabilities=None, profiles=None, events=None
mock_device.events = MagicMock(
webhook_manager=MagicMock(state=WebHookManagerState.STARTED),
pullpoint_manager=MagicMock(state=PullPointManagerState.PAUSED),
async_stop=AsyncMock(),
)
mock_device.device.close = AsyncMock()
if events:
_setup_mock_events(mock_device.events, events)
def mock_constructor(
hass: HomeAssistant, config: config_entries.ConfigEntry
@@ -170,23 +160,6 @@ def setup_mock_device(mock_device, capabilities=None, profiles=None, events=None
mock_device.side_effect = mock_constructor
def _setup_mock_events(mock_events: MagicMock, events: list[Event]) -> None:
"""Configure mock events to return proper Event objects."""
events_by_platform: dict[str, list[Event]] = defaultdict(list)
events_by_uid: dict[str, Event] = {}
uids_by_platform: dict[str, set[str]] = defaultdict(set)
for event in events:
events_by_platform[event.platform].append(event)
events_by_uid[event.uid] = event
uids_by_platform[event.platform].add(event.uid)
mock_events.get_platform.side_effect = lambda p: list(events_by_platform.get(p, []))
mock_events.get_uid.side_effect = events_by_uid.get
mock_events.get_uids_by_platform.side_effect = lambda p: set(
uids_by_platform.get(p, set())
)
async def setup_onvif_integration(
hass: HomeAssistant,
config=None,
@@ -195,8 +168,6 @@ async def setup_onvif_integration(
entry_id="1",
source=config_entries.SOURCE_USER,
capabilities=None,
events=None,
raw_events: list[tuple[str, EventEntity]] | None = None,
) -> tuple[MockConfigEntry, MagicMock, MagicMock]:
"""Create an ONVIF config entry."""
if not config:
@@ -231,35 +202,8 @@ async def setup_onvif_integration(
setup_mock_onvif_camera(mock_onvif_camera, two_profiles=True)
# no discovery
mock_discovery.return_value = []
setup_mock_device(mock_device, capabilities=capabilities, events=events)
setup_mock_device(mock_device, capabilities=capabilities)
mock_device.device = mock_onvif_camera
if raw_events:
# Process raw library events through a real EventManager
# to test the full parsing pipeline including conversions
event_manager = EventManager(hass, mock_onvif_camera, config_entry, NAME)
mock_messages = []
event_by_topic: dict[str, EventEntity] = {}
for topic, raw_event in raw_events:
mock_msg = MagicMock()
mock_msg.Topic._value_1 = topic
mock_messages.append(mock_msg)
event_by_topic[topic] = raw_event
async def mock_parse(topic, unique_id, msg):
return event_by_topic.get(topic)
with patch(
"homeassistant.components.onvif.event.onvif_parsers"
) as mock_parsers:
mock_parsers.parse = mock_parse
mock_parsers.errors.UnknownTopicError = type(
"UnknownTopicError", (Exception,), {}
)
await event_manager.async_parse_messages(mock_messages)
mock_device.events = event_manager
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
return config_entry, mock_onvif_camera, mock_device

View File

@@ -1,137 +0,0 @@
"""Test ONVIF event handling end-to-end."""
from onvif_parsers.model import EventEntity
from homeassistant.components.onvif.models import Capabilities, Event
from homeassistant.const import ATTR_DEVICE_CLASS, STATE_OFF, STATE_ON, EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import MAC, setup_onvif_integration
MOTION_ALARM_UID = f"{MAC}_tns1:VideoSource/MotionAlarm_VideoSourceToken"
IMAGE_TOO_BLURRY_UID = (
f"{MAC}_tns1:VideoSource/ImageTooBlurry/AnalyticsService_VideoSourceToken"
)
LAST_RESET_UID = f"{MAC}_tns1:Monitoring/LastReset_0"
async def test_motion_alarm_event(hass: HomeAssistant) -> None:
"""Test that a motion alarm event creates a binary sensor."""
await setup_onvif_integration(
hass,
capabilities=Capabilities(events=True, imaging=True, ptz=True),
events=[
Event(
uid=MOTION_ALARM_UID,
name="Motion Alarm",
platform="binary_sensor",
device_class="motion",
value=True,
),
],
)
state = hass.states.get("binary_sensor.testcamera_motion_alarm")
assert state is not None
assert state.state == STATE_ON
assert state.attributes[ATTR_DEVICE_CLASS] == "motion"
async def test_motion_alarm_event_off(hass: HomeAssistant) -> None:
"""Test that a motion alarm event with false value is off."""
await setup_onvif_integration(
hass,
capabilities=Capabilities(events=True, imaging=True, ptz=True),
events=[
Event(
uid=MOTION_ALARM_UID,
name="Motion Alarm",
platform="binary_sensor",
device_class="motion",
value=False,
),
],
)
state = hass.states.get("binary_sensor.testcamera_motion_alarm")
assert state is not None
assert state.state == STATE_OFF
async def test_diagnostic_event_entity_category(
hass: HomeAssistant, entity_registry: er.EntityRegistry
) -> None:
"""Test that a diagnostic event gets the correct entity category."""
await setup_onvif_integration(
hass,
capabilities=Capabilities(events=True, imaging=True, ptz=True),
events=[
Event(
uid=IMAGE_TOO_BLURRY_UID,
name="Image Too Blurry",
platform="binary_sensor",
device_class="problem",
value=True,
entity_category=EntityCategory.DIAGNOSTIC,
),
],
)
state = hass.states.get("binary_sensor.testcamera_image_too_blurry")
assert state is not None
assert state.state == STATE_ON
entry = entity_registry.async_get("binary_sensor.testcamera_image_too_blurry")
assert entry is not None
assert entry.entity_category is EntityCategory.DIAGNOSTIC
async def test_timestamp_event_conversion(hass: HomeAssistant) -> None:
"""Test that timestamp sensor events get string values converted to datetime."""
await setup_onvif_integration(
hass,
capabilities=Capabilities(events=True, imaging=True, ptz=True),
raw_events=[
(
"tns1:Monitoring/LastReset",
EventEntity(
uid=LAST_RESET_UID,
name="Last Reset",
platform="sensor",
device_class="timestamp",
value="2023-10-01T12:00:00Z",
),
),
],
)
state = hass.states.get("sensor.testcamera_last_reset")
assert state is not None
# Verify the string was converted to a datetime (raw string would end
# with "Z", converted datetime rendered by SensorEntity has "+00:00")
assert state.state == "2023-10-01T12:00:00+00:00"
async def test_timestamp_event_invalid_value(hass: HomeAssistant) -> None:
"""Test that invalid timestamp values result in unknown state."""
await setup_onvif_integration(
hass,
capabilities=Capabilities(events=True, imaging=True, ptz=True),
raw_events=[
(
"tns1:Monitoring/LastReset",
EventEntity(
uid=LAST_RESET_UID,
name="Last Reset",
platform="sensor",
device_class="timestamp",
value="0000-00-00T00:00:00Z",
),
),
],
)
state = hass.states.get("sensor.testcamera_last_reset")
assert state is not None
assert state.state == "unknown"

View File

@@ -0,0 +1,881 @@
"""Test ONVIF parsers."""
import datetime
import os
import onvif
import onvif.settings
import pytest
from zeep import Client
from zeep.transports import Transport
from homeassistant.components.onvif import models, parsers
from homeassistant.core import HomeAssistant
TEST_UID = "test-unique-id"
async def get_event(notification_data: dict) -> models.Event:
"""Take in a zeep dict, run it through the parser, and return an Event.
When the parser encounters an unknown topic that it doesn't know how to parse,
it outputs a message 'No registered handler for event from ...' along with a
print out of the serialized xml message from zeep. If it tries to parse and
can't, it prints out 'Unable to parse event from ...' along with the same
serialized message. This method can take the output directly from these log
messages and run them through the parser, which makes it easy to add new unit
tests that verify the message can now be parsed.
"""
zeep_client = Client(
f"{os.path.dirname(onvif.__file__)}/wsdl/events.wsdl",
wsse=None,
transport=Transport(),
)
notif_msg_type = zeep_client.get_type("ns5:NotificationMessageHolderType")
assert notif_msg_type is not None
notif_msg = notif_msg_type(**notification_data)
assert notif_msg is not None
# The xsd:any type embedded inside the message doesn't parse, so parse it manually.
msg_elem = zeep_client.get_element("ns8:Message")
assert msg_elem is not None
msg_data = msg_elem(**notification_data["Message"]["_value_1"])
assert msg_data is not None
notif_msg.Message._value_1 = msg_data
parser = parsers.PARSERS.get(notif_msg.Topic._value_1)
assert parser is not None
return await parser(TEST_UID, notif_msg)
async def test_line_detector_crossed(hass: HomeAssistant) -> None:
"""Tests tns1:RuleEngine/LineDetector/Crossed."""
event = await get_event(
{
"SubscriptionReference": {
"Address": {"_value_1": None, "_attr_1": None},
"ReferenceParameters": None,
"Metadata": None,
"_value_1": None,
"_attr_1": None,
},
"Topic": {
"_value_1": "tns1:RuleEngine/LineDetector/Crossed",
"Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet",
"_attr_1": {},
},
"ProducerReference": {
"Address": {
"_value_1": "xx.xx.xx.xx/onvif/event/alarm",
"_attr_1": None,
},
"ReferenceParameters": None,
"Metadata": None,
"_value_1": None,
"_attr_1": None,
},
"Message": {
"_value_1": {
"Source": {
"SimpleItem": [
{
"Name": "VideoSourceConfigurationToken",
"Value": "video_source_config1",
},
{
"Name": "VideoAnalyticsConfigurationToken",
"Value": "analytics_video_source",
},
{"Name": "Rule", "Value": "MyLineDetectorRule"},
],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Key": None,
"Data": {
"SimpleItem": [{"Name": "ObjectId", "Value": "0"}],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Extension": None,
"UtcTime": datetime.datetime(2020, 5, 24, 7, 24, 47),
"PropertyOperation": "Initialized",
"_attr_1": {},
}
},
}
)
assert event is not None
assert event.name == "Line Detector Crossed"
assert event.platform == "sensor"
assert event.value == "0"
assert event.uid == (
f"{TEST_UID}_tns1:RuleEngine/LineDetector/"
"Crossed_video_source_config1_analytics_video_source_MyLineDetectorRule"
)
async def test_tapo_line_crossed(hass: HomeAssistant) -> None:
"""Tests tns1:RuleEngine/CellMotionDetector/LineCross."""
event = await get_event(
{
"SubscriptionReference": {
"Address": {
"_value_1": "http://CAMERA_LOCAL_IP:2020/event-0_2020",
"_attr_1": None,
},
"ReferenceParameters": None,
"Metadata": None,
"_value_1": None,
"_attr_1": None,
},
"Topic": {
"_value_1": "tns1:RuleEngine/CellMotionDetector/LineCross",
"Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet",
"_attr_1": {},
},
"ProducerReference": {
"Address": {
"_value_1": "http://CAMERA_LOCAL_IP:5656/event",
"_attr_1": None,
},
"ReferenceParameters": None,
"Metadata": None,
"_value_1": None,
"_attr_1": None,
},
"Message": {
"_value_1": {
"Source": {
"SimpleItem": [
{
"Name": "VideoSourceConfigurationToken",
"Value": "vsconf",
},
{
"Name": "VideoAnalyticsConfigurationToken",
"Value": "VideoAnalyticsToken",
},
{"Name": "Rule", "Value": "MyLineCrossDetectorRule"},
],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Key": None,
"Data": {
"SimpleItem": [{"Name": "IsLineCross", "Value": "true"}],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Extension": None,
"UtcTime": datetime.datetime(
2025, 1, 3, 21, 5, 14, tzinfo=datetime.UTC
),
"PropertyOperation": "Changed",
"_attr_1": {},
}
},
}
)
assert event is not None
assert event.name == "Line Detector Crossed"
assert event.platform == "binary_sensor"
assert event.device_class == "motion"
assert event.value
assert event.uid == (
f"{TEST_UID}_tns1:RuleEngine/CellMotionDetector/"
"LineCross_VideoSourceToken_VideoAnalyticsToken_MyLineCrossDetectorRule"
)
async def test_tapo_tpsmartevent_vehicle(hass: HomeAssistant) -> None:
"""Tests tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent - vehicle."""
event = await get_event(
{
"Message": {
"_value_1": {
"Data": {
"ElementItem": [],
"Extension": None,
"SimpleItem": [{"Name": "IsVehicle", "Value": "true"}],
"_attr_1": None,
},
"Extension": None,
"Key": None,
"PropertyOperation": "Changed",
"Source": {
"ElementItem": [],
"Extension": None,
"SimpleItem": [
{
"Name": "VideoSourceConfigurationToken",
"Value": "vsconf",
},
{
"Name": "VideoAnalyticsConfigurationToken",
"Value": "VideoAnalyticsToken",
},
{
"Name": "Rule",
"Value": "MyTPSmartEventDetectorRule",
},
],
"_attr_1": None,
},
"UtcTime": datetime.datetime(
2024, 11, 2, 0, 33, 11, tzinfo=datetime.UTC
),
"_attr_1": {},
}
},
"ProducerReference": {
"Address": {
"_attr_1": None,
"_value_1": "http://192.168.56.127:5656/event",
},
"Metadata": None,
"ReferenceParameters": None,
"_attr_1": None,
"_value_1": None,
},
"SubscriptionReference": {
"Address": {
"_attr_1": None,
"_value_1": "http://192.168.56.127:2020/event-0_2020",
},
"Metadata": None,
"ReferenceParameters": None,
"_attr_1": None,
"_value_1": None,
},
"Topic": {
"Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet",
"_attr_1": {},
"_value_1": "tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent",
},
}
)
assert event is not None
assert event.name == "Vehicle Detection"
assert event.platform == "binary_sensor"
assert event.device_class == "motion"
assert event.value
assert event.uid == (
f"{TEST_UID}_tns1:RuleEngine/TPSmartEventDetector/"
"TPSmartEvent_VideoSourceToken_VideoAnalyticsToken_MyTPSmartEventDetectorRule"
)
async def test_tapo_cellmotiondetector_vehicle(hass: HomeAssistant) -> None:
"""Tests tns1:RuleEngine/CellMotionDetector/TpSmartEvent - vehicle."""
event = await get_event(
{
"SubscriptionReference": {
"Address": {
"_value_1": "http://CAMERA_LOCAL_IP:2020/event-0_2020",
"_attr_1": None,
},
"ReferenceParameters": None,
"Metadata": None,
"_value_1": None,
"_attr_1": None,
},
"Topic": {
"_value_1": "tns1:RuleEngine/CellMotionDetector/TpSmartEvent",
"Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet",
"_attr_1": {},
},
"ProducerReference": {
"Address": {
"_value_1": "http://CAMERA_LOCAL_IP:5656/event",
"_attr_1": None,
},
"ReferenceParameters": None,
"Metadata": None,
"_value_1": None,
"_attr_1": None,
},
"Message": {
"_value_1": {
"Source": {
"SimpleItem": [
{
"Name": "VideoSourceConfigurationToken",
"Value": "vsconf",
},
{
"Name": "VideoAnalyticsConfigurationToken",
"Value": "VideoAnalyticsToken",
},
{"Name": "Rule", "Value": "MyTPSmartEventDetectorRule"},
],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Key": None,
"Data": {
"SimpleItem": [{"Name": "IsVehicle", "Value": "true"}],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Extension": None,
"UtcTime": datetime.datetime(
2025, 1, 5, 14, 2, 9, tzinfo=datetime.UTC
),
"PropertyOperation": "Changed",
"_attr_1": {},
}
},
}
)
assert event is not None
assert event.name == "Vehicle Detection"
assert event.platform == "binary_sensor"
assert event.device_class == "motion"
assert event.value
assert event.uid == (
f"{TEST_UID}_tns1:RuleEngine/CellMotionDetector/"
"TpSmartEvent_VideoSourceToken_VideoAnalyticsToken_MyTPSmartEventDetectorRule"
)
async def test_tapo_tpsmartevent_person(hass: HomeAssistant) -> None:
"""Tests tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent - person."""
event = await get_event(
{
"Message": {
"_value_1": {
"Data": {
"ElementItem": [],
"Extension": None,
"SimpleItem": [{"Name": "IsPeople", "Value": "true"}],
"_attr_1": None,
},
"Extension": None,
"Key": None,
"PropertyOperation": "Changed",
"Source": {
"ElementItem": [],
"Extension": None,
"SimpleItem": [
{
"Name": "VideoSourceConfigurationToken",
"Value": "vsconf",
},
{
"Name": "VideoAnalyticsConfigurationToken",
"Value": "VideoAnalyticsToken",
},
{"Name": "Rule", "Value": "MyPeopleDetectorRule"},
],
"_attr_1": None,
},
"UtcTime": datetime.datetime(
2024, 11, 3, 18, 40, 43, tzinfo=datetime.UTC
),
"_attr_1": {},
}
},
"ProducerReference": {
"Address": {
"_attr_1": None,
"_value_1": "http://192.168.56.127:5656/event",
},
"Metadata": None,
"ReferenceParameters": None,
"_attr_1": None,
"_value_1": None,
},
"SubscriptionReference": {
"Address": {
"_attr_1": None,
"_value_1": "http://192.168.56.127:2020/event-0_2020",
},
"Metadata": None,
"ReferenceParameters": None,
"_attr_1": None,
"_value_1": None,
},
"Topic": {
"Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet",
"_attr_1": {},
"_value_1": "tns1:RuleEngine/PeopleDetector/People",
},
}
)
assert event is not None
assert event.name == "Person Detection"
assert event.platform == "binary_sensor"
assert event.device_class == "motion"
assert event.value
assert event.uid == (
f"{TEST_UID}_tns1:RuleEngine/PeopleDetector/"
"People_VideoSourceToken_VideoAnalyticsToken_MyPeopleDetectorRule"
)
async def test_tapo_tpsmartevent_pet(hass: HomeAssistant) -> None:
"""Tests tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent - pet."""
event = await get_event(
{
"SubscriptionReference": {
"Address": {
"_value_1": "http://192.168.56.63:2020/event-0_2020",
"_attr_1": None,
},
"ReferenceParameters": None,
"Metadata": None,
"_value_1": None,
"_attr_1": None,
},
"Topic": {
"_value_1": "tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent",
"Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet",
"_attr_1": {},
},
"ProducerReference": {
"Address": {
"_value_1": "http://192.168.56.63:5656/event",
"_attr_1": None,
},
"ReferenceParameters": None,
"Metadata": None,
"_value_1": None,
"_attr_1": None,
},
"Message": {
"_value_1": {
"Source": {
"SimpleItem": [
{
"Name": "VideoSourceConfigurationToken",
"Value": "vsconf",
},
{
"Name": "VideoAnalyticsConfigurationToken",
"Value": "VideoAnalyticsToken",
},
{"Name": "Rule", "Value": "MyTPSmartEventDetectorRule"},
],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Key": None,
"Data": {
"SimpleItem": [{"Name": "IsPet", "Value": "true"}],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Extension": None,
"UtcTime": datetime.datetime(
2025, 1, 22, 13, 24, 57, tzinfo=datetime.UTC
),
"PropertyOperation": "Changed",
"_attr_1": {},
}
},
}
)
assert event is not None
assert event.name == "Pet Detection"
assert event.platform == "binary_sensor"
assert event.device_class == "motion"
assert event.value
assert event.uid == (
f"{TEST_UID}_tns1:RuleEngine/TPSmartEventDetector/"
"TPSmartEvent_VideoSourceToken_VideoAnalyticsToken_MyTPSmartEventDetectorRule"
)
async def test_tapo_cellmotiondetector_person(hass: HomeAssistant) -> None:
"""Tests tns1:RuleEngine/CellMotionDetector/People - person."""
event = await get_event(
{
"SubscriptionReference": {
"Address": {
"_value_1": "http://192.168.56.63:2020/event-0_2020",
"_attr_1": None,
},
"ReferenceParameters": None,
"Metadata": None,
"_value_1": None,
"_attr_1": None,
},
"Topic": {
"_value_1": "tns1:RuleEngine/CellMotionDetector/People",
"Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet",
"_attr_1": {},
},
"ProducerReference": {
"Address": {
"_value_1": "http://192.168.56.63:5656/event",
"_attr_1": None,
},
"ReferenceParameters": None,
"Metadata": None,
"_value_1": None,
"_attr_1": None,
},
"Message": {
"_value_1": {
"Source": {
"SimpleItem": [
{
"Name": "VideoSourceConfigurationToken",
"Value": "vsconf",
},
{
"Name": "VideoAnalyticsConfigurationToken",
"Value": "VideoAnalyticsToken",
},
{"Name": "Rule", "Value": "MyPeopleDetectorRule"},
],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Key": None,
"Data": {
"SimpleItem": [{"Name": "IsPeople", "Value": "true"}],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Extension": None,
"UtcTime": datetime.datetime(
2025, 1, 3, 20, 9, 22, tzinfo=datetime.UTC
),
"PropertyOperation": "Changed",
"_attr_1": {},
}
},
}
)
assert event is not None
assert event.name == "Person Detection"
assert event.platform == "binary_sensor"
assert event.device_class == "motion"
assert event.value
assert event.uid == (
f"{TEST_UID}_tns1:RuleEngine/CellMotionDetector/"
"People_VideoSourceToken_VideoAnalyticsToken_MyPeopleDetectorRule"
)
async def test_tapo_tamper(hass: HomeAssistant) -> None:
"""Tests tns1:RuleEngine/CellMotionDetector/Tamper - tamper."""
event = await get_event(
{
"SubscriptionReference": {
"Address": {
"_value_1": "http://CAMERA_LOCAL_IP:2020/event-0_2020",
"_attr_1": None,
},
"ReferenceParameters": None,
"Metadata": None,
"_value_1": None,
"_attr_1": None,
},
"Topic": {
"_value_1": "tns1:RuleEngine/CellMotionDetector/Tamper",
"Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet",
"_attr_1": {},
},
"ProducerReference": {
"Address": {
"_value_1": "http://CAMERA_LOCAL_IP:5656/event",
"_attr_1": None,
},
"ReferenceParameters": None,
"Metadata": None,
"_value_1": None,
"_attr_1": None,
},
"Message": {
"_value_1": {
"Source": {
"SimpleItem": [
{
"Name": "VideoSourceConfigurationToken",
"Value": "vsconf",
},
{
"Name": "VideoAnalyticsConfigurationToken",
"Value": "VideoAnalyticsToken",
},
{"Name": "Rule", "Value": "MyTamperDetectorRule"},
],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Key": None,
"Data": {
"SimpleItem": [{"Name": "IsTamper", "Value": "true"}],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Extension": None,
"UtcTime": datetime.datetime(
2025, 1, 5, 21, 1, 5, tzinfo=datetime.UTC
),
"PropertyOperation": "Changed",
"_attr_1": {},
}
},
}
)
assert event is not None
assert event.name == "Tamper Detection"
assert event.platform == "binary_sensor"
assert event.device_class == "tamper"
assert event.value
assert event.uid == (
f"{TEST_UID}_tns1:RuleEngine/CellMotionDetector/"
"Tamper_VideoSourceToken_VideoAnalyticsToken_MyTamperDetectorRule"
)
async def test_tapo_intrusion(hass: HomeAssistant) -> None:
"""Tests tns1:RuleEngine/CellMotionDetector/Intrusion - intrusion."""
event = await get_event(
{
"SubscriptionReference": {
"Address": {
"_value_1": "http://192.168.100.155:2020/event-0_2020",
"_attr_1": None,
},
"ReferenceParameters": None,
"Metadata": None,
"_value_1": None,
"_attr_1": None,
},
"Topic": {
"_value_1": "tns1:RuleEngine/CellMotionDetector/Intrusion",
"Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet",
"_attr_1": {},
},
"ProducerReference": {
"Address": {
"_value_1": "http://192.168.100.155:5656/event",
"_attr_1": None,
},
"ReferenceParameters": None,
"Metadata": None,
"_value_1": None,
"_attr_1": None,
},
"Message": {
"_value_1": {
"Source": {
"SimpleItem": [
{
"Name": "VideoSourceConfigurationToken",
"Value": "vsconf",
},
{
"Name": "VideoAnalyticsConfigurationToken",
"Value": "VideoAnalyticsToken",
},
{"Name": "Rule", "Value": "MyIntrusionDetectorRule"},
],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Key": None,
"Data": {
"SimpleItem": [{"Name": "IsIntrusion", "Value": "true"}],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Extension": None,
"UtcTime": datetime.datetime(
2025, 1, 11, 10, 40, 45, tzinfo=datetime.UTC
),
"PropertyOperation": "Changed",
"_attr_1": {},
}
},
}
)
assert event is not None
assert event.name == "Intrusion Detection"
assert event.platform == "binary_sensor"
assert event.device_class == "safety"
assert event.value
assert event.uid == (
f"{TEST_UID}_tns1:RuleEngine/CellMotionDetector/"
"Intrusion_VideoSourceToken_VideoAnalyticsToken_MyIntrusionDetectorRule"
)
async def test_tapo_missing_attributes(hass: HomeAssistant) -> None:
"""Tests async_parse_tplink_detector with missing fields."""
with pytest.raises(AttributeError, match="SimpleItem"):
await get_event(
{
"Message": {
"_value_1": {
"Data": {
"ElementItem": [],
"Extension": None,
"SimpleItem": [{"Name": "IsPeople", "Value": "true"}],
"_attr_1": None,
},
}
},
"Topic": {
"_value_1": "tns1:RuleEngine/PeopleDetector/People",
},
}
)
async def test_tapo_unknown_type(hass: HomeAssistant) -> None:
"""Tests async_parse_tplink_detector with unknown event type."""
event = await get_event(
{
"Message": {
"_value_1": {
"Data": {
"ElementItem": [],
"Extension": None,
"SimpleItem": [{"Name": "IsNotPerson", "Value": "true"}],
"_attr_1": None,
},
"Source": {
"ElementItem": [],
"Extension": None,
"SimpleItem": [
{
"Name": "VideoSourceConfigurationToken",
"Value": "vsconf",
},
{
"Name": "VideoAnalyticsConfigurationToken",
"Value": "VideoAnalyticsToken",
},
{"Name": "Rule", "Value": "MyPeopleDetectorRule"},
],
},
}
},
"Topic": {
"_value_1": "tns1:RuleEngine/PeopleDetector/People",
},
}
)
assert event is None
async def test_reolink_package(hass: HomeAssistant) -> None:
"""Tests reolink package event."""
event = await get_event(
{
"SubscriptionReference": None,
"Topic": {
"_value_1": "tns1:RuleEngine/MyRuleDetector/Package",
"Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet",
"_attr_1": {},
},
"ProducerReference": None,
"Message": {
"_value_1": {
"Source": {
"SimpleItem": [{"Name": "Source", "Value": "000"}],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Key": None,
"Data": {
"SimpleItem": [{"Name": "State", "Value": "true"}],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Extension": None,
"UtcTime": datetime.datetime(
2025, 3, 12, 9, 54, 27, tzinfo=datetime.UTC
),
"PropertyOperation": "Initialized",
"_attr_1": {},
}
},
}
)
assert event is not None
assert event.name == "Package Detection"
assert event.platform == "binary_sensor"
assert event.device_class == "occupancy"
assert event.value
assert event.uid == (f"{TEST_UID}_tns1:RuleEngine/MyRuleDetector/Package_000")
async def test_hikvision_alarm(hass: HomeAssistant) -> None:
"""Tests hikvision camera alarm event."""
event = await get_event(
{
"SubscriptionReference": None,
"Topic": {
"_value_1": "tns1:Device/Trigger/tnshik:AlarmIn",
"Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet",
"_attr_1": {},
},
"ProducerReference": None,
"Message": {
"_value_1": {
"Source": {
"SimpleItem": [{"Name": "AlarmInToken", "Value": "AlarmIn_1"}],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Key": None,
"Data": {
"SimpleItem": [{"Name": "State", "Value": "true"}],
"ElementItem": [],
"Extension": None,
"_attr_1": None,
},
"Extension": None,
"UtcTime": datetime.datetime(
2025, 3, 13, 22, 57, 26, tzinfo=datetime.UTC
),
"PropertyOperation": "Initialized",
"_attr_1": {},
}
},
}
)
assert event is not None
assert event.name == "Motion Alarm"
assert event.platform == "binary_sensor"
assert event.device_class == "motion"
assert event.value
assert event.uid == (f"{TEST_UID}_tns1:Device/Trigger/tnshik:AlarmIn_AlarmIn_1")

View File

@@ -6,19 +6,19 @@ from unittest.mock import AsyncMock
import pytest
from homeassistant.components.binary_sensor import BinarySensorDeviceClass
from homeassistant.components.satel_integra.const import (
from homeassistant.components.satel_integra import (
CONF_ARM_HOME_MODE,
CONF_OUTPUT_NUMBER,
CONF_PARTITION_NUMBER,
CONF_SWITCHABLE_OUTPUT_NUMBER,
CONF_ZONE_NUMBER,
CONF_ZONE_TYPE,
DEFAULT_PORT,
SUBENTRY_TYPE_OUTPUT,
SUBENTRY_TYPE_PARTITION,
SUBENTRY_TYPE_SWITCHABLE_OUTPUT,
SUBENTRY_TYPE_ZONE,
)
from homeassistant.components.satel_integra.const import DEFAULT_PORT
from homeassistant.config_entries import ConfigSubentry
from homeassistant.const import CONF_CODE, CONF_HOST, CONF_NAME, CONF_PORT
from homeassistant.core import HomeAssistant

View File

@@ -8,15 +8,24 @@ import pytest
from homeassistant.components.binary_sensor import BinarySensorDeviceClass
from homeassistant.components.satel_integra.const import (
CONF_ARM_HOME_MODE,
CONF_DEVICE_PARTITIONS,
CONF_OUTPUT_NUMBER,
CONF_OUTPUTS,
CONF_PARTITION_NUMBER,
CONF_SWITCHABLE_OUTPUT_NUMBER,
CONF_SWITCHABLE_OUTPUTS,
CONF_ZONE_NUMBER,
CONF_ZONE_TYPE,
CONF_ZONES,
DEFAULT_PORT,
DOMAIN,
)
from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER, ConfigSubentry
from homeassistant.config_entries import (
SOURCE_IMPORT,
SOURCE_RECONFIGURE,
SOURCE_USER,
ConfigSubentry,
)
from homeassistant.const import CONF_CODE, CONF_HOST, CONF_NAME, CONF_PORT
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
@@ -109,6 +118,75 @@ async def test_setup_connection_failed(
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize(
("import_input", "entry_data", "entry_options"),
[
(
{
CONF_HOST: MOCK_CONFIG_DATA[CONF_HOST],
CONF_PORT: MOCK_CONFIG_DATA[CONF_PORT],
CONF_CODE: MOCK_CONFIG_OPTIONS[CONF_CODE],
CONF_DEVICE_PARTITIONS: {
"1": {CONF_NAME: "Partition Import 1", CONF_ARM_HOME_MODE: 1}
},
CONF_ZONES: {
"1": {CONF_NAME: "Zone Import 1", CONF_ZONE_TYPE: "motion"},
"2": {CONF_NAME: "Zone Import 2", CONF_ZONE_TYPE: "door"},
},
CONF_OUTPUTS: {
"1": {CONF_NAME: "Output Import 1", CONF_ZONE_TYPE: "light"},
"2": {CONF_NAME: "Output Import 2", CONF_ZONE_TYPE: "safety"},
},
CONF_SWITCHABLE_OUTPUTS: {
"1": {CONF_NAME: "Switchable output Import 1"},
"2": {CONF_NAME: "Switchable output Import 2"},
},
},
MOCK_CONFIG_DATA,
MOCK_CONFIG_OPTIONS,
)
],
)
async def test_import_flow(
hass: HomeAssistant,
mock_satel: AsyncMock,
mock_setup_entry: AsyncMock,
import_input: dict[str, Any],
entry_data: dict[str, Any],
entry_options: dict[str, Any],
) -> None:
"""Test the import flow."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=import_input
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == MOCK_CONFIG_DATA[CONF_HOST]
assert result["data"] == entry_data
assert result["options"] == entry_options
assert len(result["subentries"]) == 7
assert len(mock_setup_entry.mock_calls) == 1
async def test_import_flow_connection_failure(
hass: HomeAssistant, mock_satel: AsyncMock
) -> None:
"""Test the import flow."""
mock_satel.connect.return_value = False
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=MOCK_CONFIG_DATA,
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "cannot_connect"
@pytest.mark.parametrize(
("user_input", "entry_options"),
[

View File

@@ -24,7 +24,6 @@ from homeassistant.components.vacuum import (
VacuumEntityFeature,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import entity_registry as er, issue_registry as ir
from . import (
@@ -277,41 +276,6 @@ async def test_clean_area_service(
assert mock_vacuum.clean_segments_calls[0][0] == targeted_segments
@pytest.mark.usefixtures("config_flow_fixture")
async def test_clean_area_not_configured(hass: HomeAssistant) -> None:
"""Test clean_area raises when area mapping is not configured."""
mock_vacuum = MockVacuumWithCleanArea(name="Testing", entity_id="vacuum.testing")
config_entry = MockConfigEntry(domain="test")
config_entry.add_to_hass(hass)
mock_integration(
hass,
MockModule(
"test",
async_setup_entry=help_async_setup_entry_init,
async_unload_entry=help_async_unload_entry,
),
)
setup_test_component_platform(hass, DOMAIN, [mock_vacuum], from_config_entry=True)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with pytest.raises(ServiceValidationError) as exc_info:
await hass.services.async_call(
DOMAIN,
SERVICE_CLEAN_AREA,
{"entity_id": mock_vacuum.entity_id, "cleaning_area_id": ["area_1"]},
blocking=True,
)
assert exc_info.value.translation_domain == DOMAIN
assert exc_info.value.translation_key == "area_mapping_not_configured"
assert exc_info.value.translation_placeholders == {
"entity_id": mock_vacuum.entity_id
}
@pytest.mark.usefixtures("config_flow_fixture")
@pytest.mark.parametrize(
("area_mapping", "targeted_areas"),
@@ -344,6 +308,13 @@ async def test_clean_area_no_segments(
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
await hass.services.async_call(
DOMAIN,
SERVICE_CLEAN_AREA,
{"entity_id": mock_vacuum.entity_id, "cleaning_area_id": targeted_areas},
blocking=True,
)
entity_registry.async_update_entity_options(
mock_vacuum.entity_id,
DOMAIN,

View File

@@ -163,7 +163,7 @@ def mock_xbox_live_client() -> Generator[AsyncMock]:
)
client.people = AsyncMock()
client.people.get_friend_by_xuid.return_value = PeopleResponse(
client.people.get_friends_by_xuid.return_value = PeopleResponse(
**load_json_object_fixture("people_batch.json", DOMAIN)
)
client.people.get_friends_own.return_value = PeopleResponse(

View File

@@ -648,7 +648,7 @@ async def test_unique_id_and_friends_migration(
@pytest.mark.parametrize(
("provider", "method"),
[
("people", "get_friend_by_xuid"),
("people", "get_friends_by_xuid"),
("people", "get_friends_own"),
],
)

View File

@@ -104,7 +104,7 @@ async def test_load_image_from_url(
assert resp.content_type == "image/png"
assert resp.content_length == 4
xbox_live_client.people.get_friend_by_xuid.return_value = PeopleResponse(
xbox_live_client.people.get_friends_by_xuid.return_value = PeopleResponse(
**await async_load_json_object_fixture(
hass, "people_batch gamerpic.json", DOMAIN
) # pyright: ignore[reportArgumentType]

View File

@@ -173,7 +173,7 @@ async def test_oauth_session_refresh_user_and_xsts_token_exceptions(
[
("smartglass", "get_console_status"),
("catalog", "get_product_from_alternate_id"),
("people", "get_friend_by_xuid"),
("people", "get_friends_by_xuid"),
("people", "get_friends_own"),
],
)

View File

@@ -116,7 +116,7 @@ async def test_browse_media_accounts(
assert config_entry.state is ConfigEntryState.LOADED
xbox_live_client.people.get_friend_by_xuid.return_value = PeopleResponse(
xbox_live_client.people.get_friends_by_xuid.return_value = PeopleResponse(
**(await async_load_json_object_fixture(hass, "people_batch2.json", DOMAIN)) # type: ignore[reportArgumentType]
)

View File

@@ -77,7 +77,6 @@
"isListening": true,
"isRouting": false,
"isSecure": "unknown",
"dsk": "00000-11111-22222-33333-44444-55555-66666-77777",
"manufacturerId": 134,
"productId": 90,
"productType": 1,
@@ -182,7 +181,6 @@
"isListening": false,
"isRouting": true,
"isSecure": true,
"dsk": "12345-67890-12345-67890-12345-67890-12345-67890",
"firmwareVersion": "113.22",
"name": "Front Door Lock",
"location": "Foyer",

View File

@@ -78,7 +78,6 @@
"isListening": true,
"isRouting": false,
"isSecure": "unknown",
"dsk": "**REDACTED**",
"manufacturerId": 134,
"productId": 90,
"productType": 1,
@@ -183,7 +182,6 @@
"isListening": false,
"isRouting": true,
"isSecure": true,
"dsk": "**REDACTED**",
"firmwareVersion": "113.22",
"name": "Front Door Lock",
"location": "**REDACTED**",