mirror of
https://github.com/home-assistant/core.git
synced 2026-03-03 14:26:59 +01:00
Compare commits
1 Commits
dev
...
epenet/202
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bda5b10388 |
4
.github/workflows/builder.yml
vendored
4
.github/workflows/builder.yml
vendored
@@ -182,7 +182,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -544,7 +544,7 @@ jobs:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
|
||||
8
.github/workflows/ci.yaml
vendored
8
.github/workflows/ci.yaml
vendored
@@ -978,7 +978,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@@ -1387,7 +1387,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
@@ -1558,7 +1558,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
@@ -1587,7 +1587,7 @@ jobs:
|
||||
&& needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
steps:
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
|
||||
10
.github/workflows/wheels.yml
vendored
10
.github/workflows/wheels.yml
vendored
@@ -124,12 +124,12 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -175,17 +175,17 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
|
||||
@@ -70,7 +70,7 @@ from .const import (
|
||||
SIGNAL_BOOTSTRAP_INTEGRATIONS,
|
||||
)
|
||||
from .core_config import async_process_ha_core_config
|
||||
from .exceptions import HomeAssistantError, UnsupportedStorageVersionError
|
||||
from .exceptions import HomeAssistantError
|
||||
from .helpers import (
|
||||
area_registry,
|
||||
category_registry,
|
||||
@@ -433,56 +433,32 @@ def _init_blocking_io_modules_in_executor() -> None:
|
||||
is_docker_env()
|
||||
|
||||
|
||||
async def async_load_base_functionality(hass: core.HomeAssistant) -> bool:
|
||||
"""Load the registries and modules that will do blocking I/O.
|
||||
|
||||
Return whether loading succeeded.
|
||||
"""
|
||||
async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
||||
"""Load the registries and modules that will do blocking I/O."""
|
||||
if DATA_REGISTRIES_LOADED in hass.data:
|
||||
return True
|
||||
|
||||
return
|
||||
hass.data[DATA_REGISTRIES_LOADED] = None
|
||||
entity.async_setup(hass)
|
||||
frame.async_setup(hass)
|
||||
template.async_setup(hass)
|
||||
translation.async_setup(hass)
|
||||
|
||||
recovery = hass.config.recovery_mode
|
||||
try:
|
||||
await asyncio.gather(
|
||||
create_eager_task(get_internal_store_manager(hass).async_initialize()),
|
||||
create_eager_task(area_registry.async_load(hass, load_empty=recovery)),
|
||||
create_eager_task(category_registry.async_load(hass, load_empty=recovery)),
|
||||
create_eager_task(device_registry.async_load(hass, load_empty=recovery)),
|
||||
create_eager_task(entity_registry.async_load(hass, load_empty=recovery)),
|
||||
create_eager_task(floor_registry.async_load(hass, load_empty=recovery)),
|
||||
create_eager_task(issue_registry.async_load(hass, load_empty=recovery)),
|
||||
create_eager_task(label_registry.async_load(hass, load_empty=recovery)),
|
||||
hass.async_add_executor_job(_init_blocking_io_modules_in_executor),
|
||||
create_eager_task(template.async_load_custom_templates(hass)),
|
||||
create_eager_task(restore_state.async_load(hass, load_empty=recovery)),
|
||||
create_eager_task(hass.config_entries.async_initialize()),
|
||||
create_eager_task(async_get_system_info(hass)),
|
||||
create_eager_task(condition.async_setup(hass)),
|
||||
create_eager_task(trigger.async_setup(hass)),
|
||||
)
|
||||
except UnsupportedStorageVersionError as err:
|
||||
# If we're already in recovery mode, we don't want to handle the exception
|
||||
# and activate recovery mode again, as that would lead to an infinite loop.
|
||||
if recovery:
|
||||
raise
|
||||
|
||||
_LOGGER.error(
|
||||
"Storage file %s was created by a newer version of Home Assistant"
|
||||
" (storage version %s > %s); activating recovery mode; on-disk data"
|
||||
" is preserved; upgrade Home Assistant or restore from a backup",
|
||||
err.storage_key,
|
||||
err.found_version,
|
||||
err.max_supported_version,
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
await asyncio.gather(
|
||||
create_eager_task(get_internal_store_manager(hass).async_initialize()),
|
||||
create_eager_task(area_registry.async_load(hass)),
|
||||
create_eager_task(category_registry.async_load(hass)),
|
||||
create_eager_task(device_registry.async_load(hass)),
|
||||
create_eager_task(entity_registry.async_load(hass)),
|
||||
create_eager_task(floor_registry.async_load(hass)),
|
||||
create_eager_task(issue_registry.async_load(hass)),
|
||||
create_eager_task(label_registry.async_load(hass)),
|
||||
hass.async_add_executor_job(_init_blocking_io_modules_in_executor),
|
||||
create_eager_task(template.async_load_custom_templates(hass)),
|
||||
create_eager_task(restore_state.async_load(hass)),
|
||||
create_eager_task(hass.config_entries.async_initialize()),
|
||||
create_eager_task(async_get_system_info(hass)),
|
||||
create_eager_task(condition.async_setup(hass)),
|
||||
create_eager_task(trigger.async_setup(hass)),
|
||||
)
|
||||
|
||||
|
||||
async def async_from_config_dict(
|
||||
@@ -499,9 +475,7 @@ async def async_from_config_dict(
|
||||
# Prime custom component cache early so we know if registry entries are tied
|
||||
# to a custom integration
|
||||
await loader.async_get_custom_components(hass)
|
||||
|
||||
if not await async_load_base_functionality(hass):
|
||||
return None
|
||||
await async_load_base_functionality(hass)
|
||||
|
||||
# Set up core.
|
||||
_LOGGER.debug("Setting up %s", CORE_INTEGRATIONS)
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "ubisys",
|
||||
"name": "Ubisys",
|
||||
"iot_standards": ["zigbee"]
|
||||
}
|
||||
@@ -44,7 +44,7 @@ def make_entity_state_trigger_required_features(
|
||||
class CustomTrigger(EntityStateTriggerRequiredFeatures):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_domains = {domain}
|
||||
_domain = domain
|
||||
_to_states = {to_state}
|
||||
_required_features = required_features
|
||||
|
||||
|
||||
@@ -29,17 +29,12 @@ class StoredBackupData(TypedDict):
|
||||
class _BackupStore(Store[StoredBackupData]):
|
||||
"""Class to help storing backup data."""
|
||||
|
||||
# Maximum version we support reading for forward compatibility.
|
||||
# This allows reading data written by a newer HA version after downgrade.
|
||||
_MAX_READABLE_VERSION = 2
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize storage class."""
|
||||
super().__init__(
|
||||
hass,
|
||||
STORAGE_VERSION,
|
||||
STORAGE_KEY,
|
||||
max_readable_version=self._MAX_READABLE_VERSION,
|
||||
minor_version=STORAGE_VERSION_MINOR,
|
||||
)
|
||||
|
||||
@@ -91,8 +86,8 @@ class _BackupStore(Store[StoredBackupData]):
|
||||
# data["config"]["schedule"]["state"] will be removed. The bump to 2 is
|
||||
# planned to happen after a 6 month quiet period with no minor version
|
||||
# changes.
|
||||
# Reject if major version is higher than _MAX_READABLE_VERSION.
|
||||
if old_major_version > self._MAX_READABLE_VERSION:
|
||||
# Reject if major version is higher than 2.
|
||||
if old_major_version > 2:
|
||||
raise NotImplementedError
|
||||
return data
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ class BinarySensorOnOffTrigger(EntityTargetStateTriggerBase):
|
||||
"""Class for binary sensor on/off triggers."""
|
||||
|
||||
_device_class: BinarySensorDeviceClass | None
|
||||
_domains = {DOMAIN}
|
||||
_domain: str = DOMAIN
|
||||
|
||||
def entity_filter(self, entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
|
||||
@@ -14,7 +14,7 @@ from . import DOMAIN
|
||||
class ButtonPressedTrigger(EntityTriggerBase):
|
||||
"""Trigger for button entity presses."""
|
||||
|
||||
_domains = {DOMAIN}
|
||||
_domain = DOMAIN
|
||||
_schema = ENTITY_STATE_TRIGGER_SCHEMA
|
||||
|
||||
def is_valid_transition(self, from_state: State, to_state: State) -> bool:
|
||||
|
||||
@@ -29,12 +29,6 @@
|
||||
"early_update": {
|
||||
"default": "mdi:update"
|
||||
},
|
||||
"equalizer": {
|
||||
"default": "mdi:equalizer",
|
||||
"state": {
|
||||
"off": "mdi:equalizer-outline"
|
||||
}
|
||||
},
|
||||
"pre_amp": {
|
||||
"default": "mdi:volume-high",
|
||||
"state": {
|
||||
|
||||
@@ -65,9 +65,6 @@
|
||||
"early_update": {
|
||||
"name": "Early update"
|
||||
},
|
||||
"equalizer": {
|
||||
"name": "Equalizer"
|
||||
},
|
||||
"pre_amp": {
|
||||
"name": "Pre-Amp"
|
||||
},
|
||||
|
||||
@@ -33,13 +33,6 @@ def room_correction_enabled(client: StreamMagicClient) -> bool:
|
||||
return client.audio.tilt_eq.enabled
|
||||
|
||||
|
||||
def equalizer_enabled(client: StreamMagicClient) -> bool:
|
||||
"""Check if equalizer is enabled."""
|
||||
if TYPE_CHECKING:
|
||||
assert client.audio.user_eq is not None
|
||||
return client.audio.user_eq.enabled
|
||||
|
||||
|
||||
CONTROL_ENTITIES: tuple[CambridgeAudioSwitchEntityDescription, ...] = (
|
||||
CambridgeAudioSwitchEntityDescription(
|
||||
key="pre_amp",
|
||||
@@ -63,14 +56,6 @@ CONTROL_ENTITIES: tuple[CambridgeAudioSwitchEntityDescription, ...] = (
|
||||
value_fn=room_correction_enabled,
|
||||
set_value_fn=lambda client, value: client.set_room_correction_mode(value),
|
||||
),
|
||||
CambridgeAudioSwitchEntityDescription(
|
||||
key="equalizer",
|
||||
translation_key="equalizer",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
load_fn=lambda client: client.audio.user_eq is not None,
|
||||
value_fn=equalizer_enabled,
|
||||
set_value_fn=lambda client, value: client.set_equalizer_mode(value),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ HVAC_MODE_CHANGED_TRIGGER_SCHEMA = ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST.extend
|
||||
class HVACModeChangedTrigger(EntityTargetStateTriggerBase):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_domains = {DOMAIN}
|
||||
_domain = DOMAIN
|
||||
_schema = HVAC_MODE_CHANGED_TRIGGER_SCHEMA
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
|
||||
@@ -48,8 +48,6 @@ def async_setup(hass: HomeAssistant) -> None:
|
||||
vol.Optional("conversation_id"): vol.Any(str, None),
|
||||
vol.Optional("language"): str,
|
||||
vol.Optional("agent_id"): agent_id_validator,
|
||||
vol.Optional("device_id"): vol.Any(str, None),
|
||||
vol.Optional("satellite_id"): vol.Any(str, None),
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@@ -66,8 +64,6 @@ async def websocket_process(
|
||||
context=connection.context(msg),
|
||||
language=msg.get("language"),
|
||||
agent_id=msg.get("agent_id"),
|
||||
device_id=msg.get("device_id"),
|
||||
satellite_id=msg.get("satellite_id"),
|
||||
)
|
||||
connection.send_result(msg["id"], result.as_dict())
|
||||
|
||||
@@ -252,8 +248,6 @@ class ConversationProcessView(http.HomeAssistantView):
|
||||
vol.Optional("conversation_id"): str,
|
||||
vol.Optional("language"): str,
|
||||
vol.Optional("agent_id"): agent_id_validator,
|
||||
vol.Optional("device_id"): vol.Any(str, None),
|
||||
vol.Optional("satellite_id"): vol.Any(str, None),
|
||||
}
|
||||
)
|
||||
)
|
||||
@@ -268,8 +262,6 @@ class ConversationProcessView(http.HomeAssistantView):
|
||||
context=self.context(request),
|
||||
language=data.get("language"),
|
||||
agent_id=data.get("agent_id"),
|
||||
device_id=data.get("device_id"),
|
||||
satellite_id=data.get("satellite_id"),
|
||||
)
|
||||
|
||||
return self.json(result.as_dict())
|
||||
|
||||
@@ -112,12 +112,11 @@ def _zone_is_configured(zone: DaikinZone) -> bool:
|
||||
|
||||
def _zone_temperature_lists(device: Appliance) -> tuple[list[str], list[str]]:
|
||||
"""Return the decoded zone temperature lists."""
|
||||
values = device.values
|
||||
if DAIKIN_ZONE_TEMP_HEAT not in values or DAIKIN_ZONE_TEMP_COOL not in values:
|
||||
try:
|
||||
heating = device.represent(DAIKIN_ZONE_TEMP_HEAT)[1]
|
||||
cooling = device.represent(DAIKIN_ZONE_TEMP_COOL)[1]
|
||||
except AttributeError, KeyError:
|
||||
return ([], [])
|
||||
|
||||
heating = device.represent(DAIKIN_ZONE_TEMP_HEAT)[1]
|
||||
cooling = device.represent(DAIKIN_ZONE_TEMP_COOL)[1]
|
||||
return (list(heating or []), list(cooling or []))
|
||||
|
||||
|
||||
|
||||
@@ -2,39 +2,14 @@
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import EafmConfigEntry, EafmCoordinator
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
def _fix_device_registry_identifiers(
|
||||
hass: HomeAssistant, entry: EafmConfigEntry
|
||||
) -> None:
|
||||
"""Fix invalid identifiers in device registry.
|
||||
|
||||
Added in 2026.4, can be removed in 2026.10 or later.
|
||||
"""
|
||||
device_registry = dr.async_get(hass)
|
||||
for device_entry in dr.async_entries_for_config_entry(
|
||||
device_registry, entry.entry_id
|
||||
):
|
||||
old_identifier = (DOMAIN, "measure-id", entry.data["station"])
|
||||
if old_identifier not in device_entry.identifiers: # type: ignore[comparison-overlap]
|
||||
continue
|
||||
new_identifiers = device_entry.identifiers.copy()
|
||||
new_identifiers.discard(old_identifier) # type: ignore[arg-type]
|
||||
new_identifiers.add((DOMAIN, entry.data["station"]))
|
||||
device_registry.async_update_device(
|
||||
device_entry.id, new_identifiers=new_identifiers
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: EafmConfigEntry) -> bool:
|
||||
"""Set up flood monitoring sensors for this config entry."""
|
||||
_fix_device_registry_identifiers(hass, entry)
|
||||
coordinator = EafmCoordinator(hass, entry=entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
@@ -94,11 +94,11 @@ class Measurement(CoordinatorEntity, SensorEntity):
|
||||
return self.coordinator.data["measures"][self.key]["parameterName"]
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
def device_info(self):
|
||||
"""Return the device info."""
|
||||
return DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
identifiers={(DOMAIN, self.station_id)},
|
||||
identifiers={(DOMAIN, "measure-id", self.station_id)},
|
||||
manufacturer="https://environment.data.gov.uk/",
|
||||
model=self.parameter_name,
|
||||
name=f"{self.station_name} {self.parameter_name} {self.qualifier}",
|
||||
|
||||
@@ -189,7 +189,6 @@ async def platform_async_setup_entry(
|
||||
info_type: type[_InfoT],
|
||||
entity_type: type[_EntityT],
|
||||
state_type: type[_StateT],
|
||||
info_filter: Callable[[_InfoT], bool] | None = None,
|
||||
) -> None:
|
||||
"""Set up an esphome platform.
|
||||
|
||||
@@ -209,22 +208,10 @@ async def platform_async_setup_entry(
|
||||
entity_type,
|
||||
state_type,
|
||||
)
|
||||
|
||||
if info_filter is not None:
|
||||
|
||||
def on_filtered_update(infos: list[EntityInfo]) -> None:
|
||||
on_static_info_update(
|
||||
[info for info in infos if info_filter(cast(_InfoT, info))]
|
||||
)
|
||||
|
||||
info_callback = on_filtered_update
|
||||
else:
|
||||
info_callback = on_static_info_update
|
||||
|
||||
entry_data.cleanup_callbacks.append(
|
||||
entry_data.async_register_static_info_callback(
|
||||
info_type,
|
||||
info_callback,
|
||||
on_static_info_update,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -29,7 +29,6 @@ from aioesphomeapi import (
|
||||
Event,
|
||||
EventInfo,
|
||||
FanInfo,
|
||||
InfraredInfo,
|
||||
LightInfo,
|
||||
LockInfo,
|
||||
MediaPlayerInfo,
|
||||
@@ -86,7 +85,6 @@ INFO_TYPE_TO_PLATFORM: dict[type[EntityInfo], Platform] = {
|
||||
DateTimeInfo: Platform.DATETIME,
|
||||
EventInfo: Platform.EVENT,
|
||||
FanInfo: Platform.FAN,
|
||||
InfraredInfo: Platform.INFRARED,
|
||||
LightInfo: Platform.LIGHT,
|
||||
LockInfo: Platform.LOCK,
|
||||
MediaPlayerInfo: Platform.MEDIA_PLAYER,
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
"""Infrared platform for ESPHome."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
import logging
|
||||
|
||||
from aioesphomeapi import EntityState, InfraredCapability, InfraredInfo
|
||||
|
||||
from homeassistant.components.infrared import InfraredCommand, InfraredEntity
|
||||
from homeassistant.core import callback
|
||||
|
||||
from .entity import (
|
||||
EsphomeEntity,
|
||||
convert_api_error_ha_error,
|
||||
platform_async_setup_entry,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class EsphomeInfraredEntity(EsphomeEntity[InfraredInfo, EntityState], InfraredEntity):
|
||||
"""ESPHome infrared entity using native API."""
|
||||
|
||||
@callback
|
||||
def _on_device_update(self) -> None:
|
||||
"""Call when device updates or entry data changes."""
|
||||
super()._on_device_update()
|
||||
if self._entry_data.available:
|
||||
# Infrared entities should go available as soon as the device comes online
|
||||
self.async_write_ha_state()
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_send_command(self, command: InfraredCommand) -> None:
|
||||
"""Send an IR command."""
|
||||
timings = [
|
||||
interval
|
||||
for timing in command.get_raw_timings()
|
||||
for interval in (timing.high_us, -timing.low_us)
|
||||
]
|
||||
_LOGGER.debug("Sending command: %s", timings)
|
||||
|
||||
self._client.infrared_rf_transmit_raw_timings(
|
||||
self._static_info.key,
|
||||
carrier_frequency=command.modulation,
|
||||
timings=timings,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
|
||||
async_setup_entry = partial(
|
||||
platform_async_setup_entry,
|
||||
info_type=InfraredInfo,
|
||||
entity_type=EsphomeInfraredEntity,
|
||||
state_type=EntityState,
|
||||
info_filter=lambda info: bool(info.capabilities & InfraredCapability.TRANSMITTER),
|
||||
)
|
||||
@@ -21,5 +21,5 @@
|
||||
"integration_type": "system",
|
||||
"preview_features": { "winter_mode": {} },
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20260302.0"]
|
||||
"requirements": ["home-assistant-frontend==20260226.0"]
|
||||
}
|
||||
|
||||
@@ -54,10 +54,6 @@
|
||||
"connectable": false,
|
||||
"local_name": "GVH5110*"
|
||||
},
|
||||
{
|
||||
"connectable": false,
|
||||
"local_name": "GV5140*"
|
||||
},
|
||||
{
|
||||
"connectable": false,
|
||||
"manufacturer_id": 1,
|
||||
|
||||
@@ -21,7 +21,6 @@ from homeassistant.components.sensor import (
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
PERCENTAGE,
|
||||
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
UnitOfTemperature,
|
||||
@@ -73,12 +72,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
(DeviceClass.CO2, Units.CONCENTRATION_PARTS_PER_MILLION): SensorEntityDescription(
|
||||
key=f"{DeviceClass.CO2}_{Units.CONCENTRATION_PARTS_PER_MILLION}",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"requirements": [
|
||||
"xknx==3.15.0",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2026.3.2.183756"
|
||||
"knx-frontend==2026.2.25.165736"
|
||||
],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ def _convert_uint8_to_percentage(value: Any) -> float:
|
||||
class BrightnessChangedTrigger(EntityNumericalStateAttributeChangedTriggerBase):
|
||||
"""Trigger for brightness changed."""
|
||||
|
||||
_domains = {DOMAIN}
|
||||
_domain = DOMAIN
|
||||
_attribute = ATTR_BRIGHTNESS
|
||||
|
||||
_converter = staticmethod(_convert_uint8_to_percentage)
|
||||
@@ -34,7 +34,7 @@ class BrightnessCrossedThresholdTrigger(
|
||||
):
|
||||
"""Trigger for brightness crossed threshold."""
|
||||
|
||||
_domains = {DOMAIN}
|
||||
_domain = DOMAIN
|
||||
_attribute = ATTR_BRIGHTNESS
|
||||
_converter = staticmethod(_convert_uint8_to_percentage)
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pylutron_caseta"],
|
||||
"requirements": ["pylutron-caseta==0.27.0"],
|
||||
"requirements": ["pylutron-caseta==0.26.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"properties": {
|
||||
|
||||
@@ -14,6 +14,7 @@ from chip.clusters.Types import NullValue
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
|
||||
from .const import (
|
||||
CLEAR_ALL_INDEX,
|
||||
CRED_TYPE_FACE,
|
||||
CRED_TYPE_FINGER_VEIN,
|
||||
CRED_TYPE_FINGERPRINT,
|
||||
@@ -221,6 +222,42 @@ def _format_user_response(user_data: Any) -> LockUserData | None:
|
||||
# --- Credential management helpers ---
|
||||
|
||||
|
||||
async def _clear_user_credentials(
|
||||
matter_client: MatterClient,
|
||||
node_id: int,
|
||||
endpoint_id: int,
|
||||
user_index: int,
|
||||
) -> None:
|
||||
"""Clear all credentials for a specific user.
|
||||
|
||||
Fetches the user to get credential list, then clears each credential.
|
||||
"""
|
||||
get_user_response = await matter_client.send_device_command(
|
||||
node_id=node_id,
|
||||
endpoint_id=endpoint_id,
|
||||
command=clusters.DoorLock.Commands.GetUser(userIndex=user_index),
|
||||
)
|
||||
|
||||
creds = _get_attr(get_user_response, "credentials")
|
||||
if not creds:
|
||||
return
|
||||
|
||||
for cred in creds:
|
||||
cred_type = _get_attr(cred, "credentialType")
|
||||
cred_index = _get_attr(cred, "credentialIndex")
|
||||
await matter_client.send_device_command(
|
||||
node_id=node_id,
|
||||
endpoint_id=endpoint_id,
|
||||
command=clusters.DoorLock.Commands.ClearCredential(
|
||||
credential=clusters.DoorLock.Structs.CredentialStruct(
|
||||
credentialType=cred_type,
|
||||
credentialIndex=cred_index,
|
||||
),
|
||||
),
|
||||
timed_request_timeout_ms=LOCK_TIMED_REQUEST_TIMEOUT_MS,
|
||||
)
|
||||
|
||||
|
||||
class LockEndpointNotFoundError(HomeAssistantError):
|
||||
"""Lock endpoint not found on node."""
|
||||
|
||||
@@ -520,16 +557,33 @@ async def clear_lock_user(
|
||||
node: MatterNode,
|
||||
user_index: int,
|
||||
) -> None:
|
||||
"""Clear a user from the lock.
|
||||
"""Clear a user from the lock, cleaning up credentials first.
|
||||
|
||||
Per the Matter spec, ClearUser also clears all associated credentials
|
||||
and schedules for the user.
|
||||
Use index 0xFFFE (CLEAR_ALL_INDEX) to clear all users.
|
||||
Raises HomeAssistantError on failure.
|
||||
"""
|
||||
lock_endpoint = _get_lock_endpoint_or_raise(node)
|
||||
_ensure_usr_support(lock_endpoint)
|
||||
|
||||
if user_index == CLEAR_ALL_INDEX:
|
||||
# Clear all: clear all credentials first, then all users
|
||||
await matter_client.send_device_command(
|
||||
node_id=node.node_id,
|
||||
endpoint_id=lock_endpoint.endpoint_id,
|
||||
command=clusters.DoorLock.Commands.ClearCredential(
|
||||
credential=None,
|
||||
),
|
||||
timed_request_timeout_ms=LOCK_TIMED_REQUEST_TIMEOUT_MS,
|
||||
)
|
||||
else:
|
||||
# Clear credentials for this specific user before deleting them
|
||||
await _clear_user_credentials(
|
||||
matter_client,
|
||||
node.node_id,
|
||||
lock_endpoint.endpoint_id,
|
||||
user_index,
|
||||
)
|
||||
|
||||
await matter_client.send_device_command(
|
||||
node_id=node.node_id,
|
||||
endpoint_id=lock_endpoint.endpoint_id,
|
||||
|
||||
@@ -18,17 +18,20 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import (
|
||||
MetOfficeConfigEntry,
|
||||
MetOfficeRuntimeData,
|
||||
MetOfficeUpdateCoordinator,
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
METOFFICE_COORDINATES,
|
||||
METOFFICE_DAILY_COORDINATOR,
|
||||
METOFFICE_HOURLY_COORDINATOR,
|
||||
METOFFICE_NAME,
|
||||
METOFFICE_TWICE_DAILY_COORDINATOR,
|
||||
)
|
||||
from .coordinator import MetOfficeUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.SENSOR, Platform.WEATHER]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MetOfficeConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a Met Office entry."""
|
||||
|
||||
latitude: float = entry.data[CONF_LATITUDE]
|
||||
@@ -36,6 +39,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: MetOfficeConfigEntry) ->
|
||||
api_key: str = entry.data[CONF_API_KEY]
|
||||
site_name: str = entry.data[CONF_NAME]
|
||||
|
||||
coordinates = f"{latitude}_{longitude}"
|
||||
|
||||
connection = Manager(api_key=api_key)
|
||||
|
||||
metoffice_hourly_coordinator = MetOfficeUpdateCoordinator(
|
||||
@@ -68,20 +73,21 @@ async def async_setup_entry(hass: HomeAssistant, entry: MetOfficeConfigEntry) ->
|
||||
frequency="twice-daily",
|
||||
)
|
||||
|
||||
metoffice_hass_data = hass.data.setdefault(DOMAIN, {})
|
||||
metoffice_hass_data[entry.entry_id] = {
|
||||
METOFFICE_HOURLY_COORDINATOR: metoffice_hourly_coordinator,
|
||||
METOFFICE_DAILY_COORDINATOR: metoffice_daily_coordinator,
|
||||
METOFFICE_TWICE_DAILY_COORDINATOR: metoffice_twice_daily_coordinator,
|
||||
METOFFICE_NAME: site_name,
|
||||
METOFFICE_COORDINATES: coordinates,
|
||||
}
|
||||
|
||||
# Fetch initial data so we have data when entities subscribe
|
||||
await asyncio.gather(
|
||||
metoffice_hourly_coordinator.async_config_entry_first_refresh(),
|
||||
metoffice_daily_coordinator.async_config_entry_first_refresh(),
|
||||
)
|
||||
|
||||
entry.runtime_data = MetOfficeRuntimeData(
|
||||
coordinates=f"{latitude}_{longitude}",
|
||||
hourly_coordinator=metoffice_hourly_coordinator,
|
||||
daily_coordinator=metoffice_daily_coordinator,
|
||||
twice_daily_coordinator=metoffice_twice_daily_coordinator,
|
||||
name=site_name,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
@@ -89,7 +95,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: MetOfficeConfigEntry) ->
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
if not hass.data[DOMAIN]:
|
||||
hass.data.pop(DOMAIN)
|
||||
return unload_ok
|
||||
|
||||
|
||||
def get_device_info(coordinates: str, name: str) -> DeviceInfo:
|
||||
|
||||
@@ -38,6 +38,13 @@ ATTRIBUTION = "Data provided by the Met Office"
|
||||
|
||||
DEFAULT_SCAN_INTERVAL = timedelta(minutes=15)
|
||||
|
||||
METOFFICE_COORDINATES = "metoffice_coordinates"
|
||||
METOFFICE_HOURLY_COORDINATOR = "metoffice_hourly_coordinator"
|
||||
METOFFICE_DAILY_COORDINATOR = "metoffice_daily_coordinator"
|
||||
METOFFICE_TWICE_DAILY_COORDINATOR = "metoffice_twice_daily_coordinator"
|
||||
METOFFICE_MONITORED_CONDITIONS = "metoffice_monitored_conditions"
|
||||
METOFFICE_NAME = "metoffice_name"
|
||||
|
||||
CONDITION_CLASSES: dict[str, list[int]] = {
|
||||
ATTR_CONDITION_CLEAR_NIGHT: [0],
|
||||
ATTR_CONDITION_CLOUDY: [7, 8],
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Literal
|
||||
|
||||
@@ -23,19 +22,6 @@ from .const import DEFAULT_SCAN_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type MetOfficeConfigEntry = ConfigEntry[MetOfficeRuntimeData]
|
||||
|
||||
|
||||
@dataclass
|
||||
class MetOfficeRuntimeData:
|
||||
"""Met Office config entry."""
|
||||
|
||||
coordinates: str
|
||||
hourly_coordinator: MetOfficeUpdateCoordinator
|
||||
daily_coordinator: MetOfficeUpdateCoordinator
|
||||
twice_daily_coordinator: MetOfficeUpdateCoordinator
|
||||
name: str
|
||||
|
||||
|
||||
class MetOfficeUpdateCoordinator(TimestampDataUpdateCoordinator[Forecast]):
|
||||
"""Coordinator for Met Office forecast data."""
|
||||
|
||||
@@ -13,6 +13,7 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
DEGREE,
|
||||
PERCENTAGE,
|
||||
@@ -29,12 +30,15 @@ from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import get_device_info
|
||||
from .const import ATTRIBUTION, CONDITION_MAP, DOMAIN
|
||||
from .coordinator import (
|
||||
MetOfficeConfigEntry,
|
||||
MetOfficeRuntimeData,
|
||||
MetOfficeUpdateCoordinator,
|
||||
from .const import (
|
||||
ATTRIBUTION,
|
||||
CONDITION_MAP,
|
||||
DOMAIN,
|
||||
METOFFICE_COORDINATES,
|
||||
METOFFICE_HOURLY_COORDINATOR,
|
||||
METOFFICE_NAME,
|
||||
)
|
||||
from .coordinator import MetOfficeUpdateCoordinator
|
||||
from .helpers import get_attribute
|
||||
|
||||
ATTR_LAST_UPDATE = "last_update"
|
||||
@@ -168,19 +172,19 @@ SENSOR_TYPES: tuple[MetOfficeSensorEntityDescription, ...] = (
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: MetOfficeConfigEntry,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Met Office weather sensor platform."""
|
||||
entity_registry = er.async_get(hass)
|
||||
hass_data = entry.runtime_data
|
||||
hass_data = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
# Remove daily entities from legacy config entries
|
||||
for description in SENSOR_TYPES:
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
SENSOR_DOMAIN,
|
||||
DOMAIN,
|
||||
f"{description.key}_{hass_data.coordinates}_daily",
|
||||
f"{description.key}_{hass_data[METOFFICE_COORDINATES]}_daily",
|
||||
):
|
||||
entity_registry.async_remove(entity_id)
|
||||
|
||||
@@ -188,20 +192,20 @@ async def async_setup_entry(
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
SENSOR_DOMAIN,
|
||||
DOMAIN,
|
||||
f"visibility_distance_{hass_data.coordinates}_daily",
|
||||
f"visibility_distance_{hass_data[METOFFICE_COORDINATES]}_daily",
|
||||
):
|
||||
entity_registry.async_remove(entity_id)
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
SENSOR_DOMAIN,
|
||||
DOMAIN,
|
||||
f"visibility_distance_{hass_data.coordinates}",
|
||||
f"visibility_distance_{hass_data[METOFFICE_COORDINATES]}",
|
||||
):
|
||||
entity_registry.async_remove(entity_id)
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
MetOfficeCurrentSensor(
|
||||
hass_data.hourly_coordinator,
|
||||
hass_data[METOFFICE_HOURLY_COORDINATOR],
|
||||
hass_data,
|
||||
description,
|
||||
)
|
||||
@@ -224,7 +228,7 @@ class MetOfficeCurrentSensor(
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MetOfficeUpdateCoordinator,
|
||||
hass_data: MetOfficeRuntimeData,
|
||||
hass_data: dict[str, Any],
|
||||
description: MetOfficeSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
@@ -233,9 +237,9 @@ class MetOfficeCurrentSensor(
|
||||
self.entity_description = description
|
||||
|
||||
self._attr_device_info = get_device_info(
|
||||
coordinates=hass_data.coordinates, name=hass_data.name
|
||||
coordinates=hass_data[METOFFICE_COORDINATES], name=hass_data[METOFFICE_NAME]
|
||||
)
|
||||
self._attr_unique_id = f"{description.key}_{hass_data.coordinates}"
|
||||
self._attr_unique_id = f"{description.key}_{hass_data[METOFFICE_COORDINATES]}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
|
||||
@@ -23,6 +23,7 @@ from homeassistant.components.weather import (
|
||||
Forecast as WeatherForecast,
|
||||
WeatherEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
UnitOfLength,
|
||||
UnitOfPressure,
|
||||
@@ -41,39 +42,40 @@ from .const import (
|
||||
DAY_FORECAST_ATTRIBUTE_MAP,
|
||||
DOMAIN,
|
||||
HOURLY_FORECAST_ATTRIBUTE_MAP,
|
||||
METOFFICE_COORDINATES,
|
||||
METOFFICE_DAILY_COORDINATOR,
|
||||
METOFFICE_HOURLY_COORDINATOR,
|
||||
METOFFICE_NAME,
|
||||
METOFFICE_TWICE_DAILY_COORDINATOR,
|
||||
NIGHT_FORECAST_ATTRIBUTE_MAP,
|
||||
)
|
||||
from .coordinator import (
|
||||
MetOfficeConfigEntry,
|
||||
MetOfficeRuntimeData,
|
||||
MetOfficeUpdateCoordinator,
|
||||
)
|
||||
from .coordinator import MetOfficeUpdateCoordinator
|
||||
from .helpers import get_attribute
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: MetOfficeConfigEntry,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Met Office weather sensor platform."""
|
||||
entity_registry = er.async_get(hass)
|
||||
hass_data = entry.runtime_data
|
||||
hass_data = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
# Remove daily entity from legacy config entries
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
WEATHER_DOMAIN,
|
||||
DOMAIN,
|
||||
f"{hass_data.coordinates}_daily",
|
||||
f"{hass_data[METOFFICE_COORDINATES]}_daily",
|
||||
):
|
||||
entity_registry.async_remove(entity_id)
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
MetOfficeWeather(
|
||||
hass_data.daily_coordinator,
|
||||
hass_data.hourly_coordinator,
|
||||
hass_data.twice_daily_coordinator,
|
||||
hass_data[METOFFICE_DAILY_COORDINATOR],
|
||||
hass_data[METOFFICE_HOURLY_COORDINATOR],
|
||||
hass_data[METOFFICE_TWICE_DAILY_COORDINATOR],
|
||||
hass_data,
|
||||
)
|
||||
],
|
||||
@@ -176,7 +178,7 @@ class MetOfficeWeather(
|
||||
coordinator_daily: MetOfficeUpdateCoordinator,
|
||||
coordinator_hourly: MetOfficeUpdateCoordinator,
|
||||
coordinator_twice_daily: MetOfficeUpdateCoordinator,
|
||||
hass_data: MetOfficeRuntimeData,
|
||||
hass_data: dict[str, Any],
|
||||
) -> None:
|
||||
"""Initialise the platform with a data instance."""
|
||||
observation_coordinator = coordinator_hourly
|
||||
@@ -188,9 +190,9 @@ class MetOfficeWeather(
|
||||
)
|
||||
|
||||
self._attr_device_info = get_device_info(
|
||||
coordinates=hass_data.coordinates, name=hass_data.name
|
||||
coordinates=hass_data[METOFFICE_COORDINATES], name=hass_data[METOFFICE_NAME]
|
||||
)
|
||||
self._attr_unique_id = hass_data.coordinates
|
||||
self._attr_unique_id = hass_data[METOFFICE_COORDINATES]
|
||||
|
||||
@property
|
||||
def condition(self) -> str | None:
|
||||
|
||||
@@ -14,26 +14,27 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type MoatConfigEntry = ConfigEntry[PassiveBluetoothProcessorCoordinator]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MoatConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Moat BLE device from a config entry."""
|
||||
address = entry.unique_id
|
||||
assert address is not None
|
||||
data = MoatBluetoothDeviceData()
|
||||
coordinator = PassiveBluetoothProcessorCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
address=address,
|
||||
mode=BluetoothScanningMode.PASSIVE,
|
||||
update_method=data.update,
|
||||
coordinator = hass.data.setdefault(DOMAIN, {})[entry.entry_id] = (
|
||||
PassiveBluetoothProcessorCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
address=address,
|
||||
mode=BluetoothScanningMode.PASSIVE,
|
||||
update_method=data.update,
|
||||
)
|
||||
)
|
||||
entry.runtime_data = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(
|
||||
coordinator.async_start()
|
||||
@@ -41,6 +42,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: MoatConfigEntry) -> bool
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: MoatConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
|
||||
@@ -4,10 +4,12 @@ from __future__ import annotations
|
||||
|
||||
from moat_ble import DeviceClass, DeviceKey, SensorUpdate, Units
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.bluetooth.passive_update_processor import (
|
||||
PassiveBluetoothDataProcessor,
|
||||
PassiveBluetoothDataUpdate,
|
||||
PassiveBluetoothEntityKey,
|
||||
PassiveBluetoothProcessorCoordinator,
|
||||
PassiveBluetoothProcessorEntity,
|
||||
)
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -26,7 +28,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.sensor import sensor_device_info_to_hass_device_info
|
||||
|
||||
from . import MoatConfigEntry
|
||||
from .const import DOMAIN
|
||||
|
||||
SENSOR_DESCRIPTIONS = {
|
||||
(DeviceClass.TEMPERATURE, Units.TEMP_CELSIUS): SensorEntityDescription(
|
||||
@@ -102,11 +104,13 @@ def sensor_update_to_bluetooth_data_update(
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: MoatConfigEntry,
|
||||
entry: config_entries.ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Moat BLE sensors."""
|
||||
coordinator = entry.runtime_data
|
||||
coordinator: PassiveBluetoothProcessorCoordinator = hass.data[DOMAIN][
|
||||
entry.entry_id
|
||||
]
|
||||
processor = PassiveBluetoothDataProcessor(sensor_update_to_bluetooth_data_update)
|
||||
entry.async_on_unload(
|
||||
processor.async_add_entities_listener(
|
||||
|
||||
@@ -13,7 +13,6 @@ from homeassistant.const import (
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import State, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
|
||||
@@ -96,7 +95,7 @@ class MobileAppEntity(RestoreEntity):
|
||||
config[ATTR_SENSOR_ICON] = last_state.attributes[ATTR_ICON]
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
def device_info(self):
|
||||
"""Return device registry information for this entity."""
|
||||
return device_info(self._registration)
|
||||
|
||||
|
||||
@@ -193,7 +193,7 @@ def webhook_response(
|
||||
)
|
||||
|
||||
|
||||
def device_info(registration: Mapping[str, Any]) -> DeviceInfo:
|
||||
def device_info(registration: dict) -> DeviceInfo:
|
||||
"""Return the device info for this registration."""
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, registration[ATTR_DEVICE_ID])},
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -13,14 +14,15 @@ from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
)
|
||||
|
||||
from .api import AuthenticatedMonzoAPI
|
||||
from .coordinator import MonzoConfigEntry, MonzoCoordinator
|
||||
from .const import DOMAIN
|
||||
from .coordinator import MonzoCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: MonzoConfigEntry) -> bool:
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Migrate entry."""
|
||||
_LOGGER.debug("Migrating from version %s.%s", entry.version, entry.minor_version)
|
||||
|
||||
@@ -37,7 +39,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: MonzoConfigEntry) -> b
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MonzoConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Monzo from a config entry."""
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
|
||||
@@ -49,12 +51,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: MonzoConfigEntry) -> boo
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: MonzoConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
return unload_ok
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""The Monzo integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
@@ -20,8 +18,6 @@ from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type MonzoConfigEntry = ConfigEntry[MonzoCoordinator]
|
||||
|
||||
|
||||
@dataclass
|
||||
class MonzoData:
|
||||
@@ -34,13 +30,10 @@ class MonzoData:
|
||||
class MonzoCoordinator(DataUpdateCoordinator[MonzoData]):
|
||||
"""Class to manage fetching Monzo data from the API."""
|
||||
|
||||
config_entry: MonzoConfigEntry
|
||||
config_entry: ConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: MonzoConfigEntry,
|
||||
api: AuthenticatedMonzoAPI,
|
||||
self, hass: HomeAssistant, config_entry: ConfigEntry, api: AuthenticatedMonzoAPI
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
|
||||
@@ -11,11 +11,14 @@ from homeassistant.components.sensor import (
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .coordinator import MonzoConfigEntry, MonzoCoordinator, MonzoData
|
||||
from . import MonzoCoordinator
|
||||
from .const import DOMAIN
|
||||
from .coordinator import MonzoData
|
||||
from .entity import MonzoBaseEntity
|
||||
|
||||
|
||||
@@ -61,11 +64,11 @@ MODEL_POT = "Pot"
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MonzoConfigEntry,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Defer sensor setup to the shared sensor module."""
|
||||
coordinator = config_entry.runtime_data
|
||||
coordinator: MonzoCoordinator = hass.data[DOMAIN][config_entry.entry_id]
|
||||
|
||||
accounts = [
|
||||
MonzoSensor(
|
||||
|
||||
@@ -43,6 +43,8 @@ PLATFORMS: list[Platform] = [
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
type MotionConfigEntry = ConfigEntry[MotionDevice]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Motionblinds Bluetooth integration."""
|
||||
@@ -56,7 +58,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MotionConfigEntry) -> bool:
|
||||
"""Set up Motionblinds Bluetooth device from a config entry."""
|
||||
|
||||
_LOGGER.debug("(%s) Setting up device", entry.data[CONF_MAC_CODE])
|
||||
@@ -95,11 +97,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
)
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = device
|
||||
|
||||
# Register OptionsFlow update listener
|
||||
entry.async_on_unload(entry.add_update_listener(options_update_listener))
|
||||
|
||||
entry.runtime_data = device
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
# Apply options
|
||||
@@ -112,7 +114,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def options_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
async def options_update_listener(
|
||||
hass: HomeAssistant, entry: MotionConfigEntry
|
||||
) -> None:
|
||||
"""Handle options update."""
|
||||
_LOGGER.debug(
|
||||
"(%s) Updated device options: %s", entry.data[CONF_MAC_CODE], entry.options
|
||||
@@ -120,10 +124,10 @@ async def options_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> No
|
||||
await apply_options(hass, entry)
|
||||
|
||||
|
||||
async def apply_options(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
async def apply_options(hass: HomeAssistant, entry: MotionConfigEntry) -> None:
|
||||
"""Apply the options from the OptionsFlow."""
|
||||
|
||||
device: MotionDevice = hass.data[DOMAIN][entry.entry_id]
|
||||
device = entry.runtime_data
|
||||
disconnect_time: float | None = entry.options.get(OPTION_DISCONNECT_TIME, None)
|
||||
permanent_connection: bool = entry.options.get(OPTION_PERMANENT_CONNECTION, False)
|
||||
|
||||
@@ -131,10 +135,7 @@ async def apply_options(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
await device.set_permanent_connection(permanent_connection)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: MotionConfigEntry) -> bool:
|
||||
"""Unload Motionblinds Bluetooth device from a config entry."""
|
||||
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
@@ -10,12 +10,12 @@ from typing import Any
|
||||
from motionblindsble.device import MotionDevice
|
||||
|
||||
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import ATTR_CONNECT, ATTR_DISCONNECT, ATTR_FAVORITE, CONF_MAC_CODE, DOMAIN
|
||||
from . import MotionConfigEntry
|
||||
from .const import ATTR_CONNECT, ATTR_DISCONNECT, ATTR_FAVORITE, CONF_MAC_CODE
|
||||
from .entity import MotionblindsBLEEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -54,12 +54,12 @@ BUTTON_TYPES: list[MotionblindsBLEButtonEntityDescription] = [
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: MotionConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up button entities based on a config entry."""
|
||||
|
||||
device: MotionDevice = hass.data[DOMAIN][entry.entry_id]
|
||||
device = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
MotionblindsBLEButtonEntity(
|
||||
|
||||
@@ -12,12 +12,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
from homeassistant.components.bluetooth import BluetoothServiceInfoBleak
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -27,6 +22,7 @@ from homeassistant.helpers.selector import (
|
||||
SelectSelectorMode,
|
||||
)
|
||||
|
||||
from . import MotionConfigEntry
|
||||
from .const import (
|
||||
CONF_BLIND_TYPE,
|
||||
CONF_LOCAL_NAME,
|
||||
@@ -185,7 +181,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: MotionConfigEntry,
|
||||
) -> OptionsFlow:
|
||||
"""Create the options flow."""
|
||||
return OptionsFlowHandler()
|
||||
|
||||
@@ -7,7 +7,6 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from motionblindsble.const import MotionBlindType, MotionRunningType
|
||||
from motionblindsble.device import MotionDevice
|
||||
|
||||
from homeassistant.components.cover import (
|
||||
ATTR_POSITION,
|
||||
@@ -17,11 +16,11 @@ from homeassistant.components.cover import (
|
||||
CoverEntityDescription,
|
||||
CoverEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import CONF_BLIND_TYPE, CONF_MAC_CODE, DOMAIN, ICON_VERTICAL_BLIND
|
||||
from . import MotionConfigEntry
|
||||
from .const import CONF_BLIND_TYPE, CONF_MAC_CODE, ICON_VERTICAL_BLIND
|
||||
from .entity import MotionblindsBLEEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -62,7 +61,7 @@ BLIND_TYPE_TO_ENTITY_DESCRIPTION: dict[str, MotionblindsBLECoverEntityDescriptio
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: MotionConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up cover entity based on a config entry."""
|
||||
@@ -70,7 +69,7 @@ async def async_setup_entry(
|
||||
cover_class: type[MotionblindsBLECoverEntity] = BLIND_TYPE_TO_CLASS[
|
||||
entry.data[CONF_BLIND_TYPE].upper()
|
||||
]
|
||||
device: MotionDevice = hass.data[DOMAIN][entry.entry_id]
|
||||
device = entry.runtime_data
|
||||
entity_description: MotionblindsBLECoverEntityDescription = (
|
||||
BLIND_TYPE_TO_ENTITY_DESCRIPTION[entry.data[CONF_BLIND_TYPE].upper()]
|
||||
)
|
||||
|
||||
@@ -5,14 +5,11 @@ from __future__ import annotations
|
||||
from collections.abc import Iterable
|
||||
from typing import Any
|
||||
|
||||
from motionblindsble.device import MotionDevice
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_UNIQUE_ID
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
from . import MotionConfigEntry
|
||||
|
||||
CONF_TITLE = "title"
|
||||
|
||||
@@ -24,10 +21,10 @@ TO_REDACT: Iterable[Any] = {
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
hass: HomeAssistant, entry: MotionConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
device: MotionDevice = hass.data[DOMAIN][entry.entry_id]
|
||||
device = entry.runtime_data
|
||||
|
||||
return async_redact_data(
|
||||
{
|
||||
|
||||
@@ -5,11 +5,11 @@ import logging
|
||||
from motionblindsble.const import MotionBlindType
|
||||
from motionblindsble.device import MotionDevice
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
|
||||
from . import MotionConfigEntry
|
||||
from .const import CONF_BLIND_TYPE, CONF_MAC_CODE, MANUFACTURER
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -21,13 +21,10 @@ class MotionblindsBLEEntity(Entity):
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
|
||||
device: MotionDevice
|
||||
entry: ConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device: MotionDevice,
|
||||
entry: ConfigEntry,
|
||||
entry: MotionConfigEntry,
|
||||
entity_description: EntityDescription,
|
||||
unique_id_suffix: str | None = None,
|
||||
) -> None:
|
||||
|
||||
@@ -8,12 +8,12 @@ from motionblindsble.const import MotionBlindType, MotionSpeedLevel
|
||||
from motionblindsble.device import MotionDevice
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import ATTR_SPEED, CONF_MAC_CODE, DOMAIN
|
||||
from . import MotionConfigEntry
|
||||
from .const import ATTR_SPEED, CONF_MAC_CODE
|
||||
from .entity import MotionblindsBLEEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -33,12 +33,12 @@ SELECT_TYPES: dict[str, SelectEntityDescription] = {
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: MotionConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up select entities based on a config entry."""
|
||||
|
||||
device: MotionDevice = hass.data[DOMAIN][entry.entry_id]
|
||||
device = entry.runtime_data
|
||||
|
||||
if device.blind_type not in {MotionBlindType.CURTAIN, MotionBlindType.VERTICAL}:
|
||||
async_add_entities([SpeedSelect(device, entry, SELECT_TYPES[ATTR_SPEED])])
|
||||
@@ -50,7 +50,7 @@ class SpeedSelect(MotionblindsBLEEntity, SelectEntity):
|
||||
def __init__(
|
||||
self,
|
||||
device: MotionDevice,
|
||||
entry: ConfigEntry,
|
||||
entry: MotionConfigEntry,
|
||||
entity_description: SelectEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the speed select entity."""
|
||||
|
||||
@@ -20,7 +20,6 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
PERCENTAGE,
|
||||
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
@@ -30,13 +29,13 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from . import MotionConfigEntry
|
||||
from .const import (
|
||||
ATTR_BATTERY,
|
||||
ATTR_CALIBRATION,
|
||||
ATTR_CONNECTION,
|
||||
ATTR_SIGNAL_STRENGTH,
|
||||
CONF_MAC_CODE,
|
||||
DOMAIN,
|
||||
)
|
||||
from .entity import MotionblindsBLEEntity
|
||||
|
||||
@@ -94,12 +93,12 @@ SENSORS: tuple[MotionblindsBLESensorEntityDescription, ...] = (
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: MotionConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up sensor entities based on a config entry."""
|
||||
|
||||
device: MotionDevice = hass.data[DOMAIN][entry.entry_id]
|
||||
device = entry.runtime_data
|
||||
|
||||
entities: list[SensorEntity] = [
|
||||
MotionblindsBLESensorEntity(device, entry, description)
|
||||
@@ -118,7 +117,7 @@ class MotionblindsBLESensorEntity[_T](MotionblindsBLEEntity, SensorEntity):
|
||||
def __init__(
|
||||
self,
|
||||
device: MotionDevice,
|
||||
entry: ConfigEntry,
|
||||
entry: MotionConfigEntry,
|
||||
entity_description: MotionblindsBLESensorEntityDescription[_T],
|
||||
) -> None:
|
||||
"""Initialize the sensor entity."""
|
||||
@@ -149,7 +148,7 @@ class BatterySensor(MotionblindsBLEEntity, SensorEntity):
|
||||
def __init__(
|
||||
self,
|
||||
device: MotionDevice,
|
||||
entry: ConfigEntry,
|
||||
entry: MotionConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize the sensor entity."""
|
||||
entity_description = SensorEntityDescription(
|
||||
|
||||
@@ -11,7 +11,6 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from .coordinator import NRGkickConfigEntry, NRGkickDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.NUMBER,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
"""Binary sensor platform for NRGkick."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import NRGkickConfigEntry, NRGkickData, NRGkickDataUpdateCoordinator
|
||||
from .entity import NRGkickEntity, get_nested_dict_value
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class NRGkickBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Class describing NRGkick binary sensor entities."""
|
||||
|
||||
is_on_fn: Callable[[NRGkickData], bool | None]
|
||||
|
||||
|
||||
BINARY_SENSORS: tuple[NRGkickBinarySensorEntityDescription, ...] = (
|
||||
NRGkickBinarySensorEntityDescription(
|
||||
key="charge_permitted",
|
||||
translation_key="charge_permitted",
|
||||
is_on_fn=lambda data: (
|
||||
bool(value)
|
||||
if (
|
||||
value := get_nested_dict_value(
|
||||
data.values, "general", "charge_permitted"
|
||||
)
|
||||
)
|
||||
is not None
|
||||
else None
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
_hass: HomeAssistant,
|
||||
entry: NRGkickConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up NRGkick binary sensors based on a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
NRGkickBinarySensor(coordinator, description) for description in BINARY_SENSORS
|
||||
)
|
||||
|
||||
|
||||
class NRGkickBinarySensor(NRGkickEntity, BinarySensorEntity):
|
||||
"""Representation of a NRGkick binary sensor."""
|
||||
|
||||
entity_description: NRGkickBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: NRGkickDataUpdateCoordinator,
|
||||
entity_description: NRGkickBinarySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the binary sensor."""
|
||||
super().__init__(coordinator, entity_description.key)
|
||||
self.entity_description = entity_description
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return the state of the binary sensor."""
|
||||
return self.entity_description.is_on_fn(self.coordinator.data)
|
||||
@@ -14,17 +14,6 @@ from .const import DOMAIN
|
||||
from .coordinator import NRGkickDataUpdateCoordinator
|
||||
|
||||
|
||||
def get_nested_dict_value(data: Any, *keys: str) -> Any:
|
||||
"""Safely get a nested value from dict-like API responses."""
|
||||
current: Any = data
|
||||
for key in keys:
|
||||
try:
|
||||
current = current.get(key)
|
||||
except AttributeError:
|
||||
return None
|
||||
return current
|
||||
|
||||
|
||||
class NRGkickEntity(CoordinatorEntity[NRGkickDataUpdateCoordinator]):
|
||||
"""Base class for NRGkick entities with common device info setup."""
|
||||
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"charge_permitted": {
|
||||
"default": "mdi:ev-station"
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"current_set": {
|
||||
"default": "mdi:current-ac"
|
||||
|
||||
@@ -45,11 +45,22 @@ from .const import (
|
||||
WARNING_CODE_MAP,
|
||||
)
|
||||
from .coordinator import NRGkickConfigEntry, NRGkickData, NRGkickDataUpdateCoordinator
|
||||
from .entity import NRGkickEntity, get_nested_dict_value
|
||||
from .entity import NRGkickEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
def _get_nested_dict_value(data: Any, *keys: str) -> Any:
|
||||
"""Safely get a nested value from dict-like API responses."""
|
||||
current: Any = data
|
||||
for key in keys:
|
||||
try:
|
||||
current = current.get(key)
|
||||
except AttributeError:
|
||||
return None
|
||||
return current
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class NRGkickSensorEntityDescription(SensorEntityDescription):
|
||||
"""Class describing NRGkick sensor entities."""
|
||||
@@ -148,7 +159,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.info, "general", "rated_current"
|
||||
),
|
||||
),
|
||||
@@ -156,7 +167,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
NRGkickSensorEntityDescription(
|
||||
key="connector_phase_count",
|
||||
translation_key="connector_phase_count",
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.info, "connector", "phase_count"
|
||||
),
|
||||
),
|
||||
@@ -167,7 +178,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.info, "connector", "max_current"
|
||||
),
|
||||
),
|
||||
@@ -178,7 +189,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
options=_enum_options_from_mapping(CONNECTOR_TYPE_MAP),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: _map_code_to_translation_key(
|
||||
cast(StateType, get_nested_dict_value(data.info, "connector", "type")),
|
||||
cast(StateType, _get_nested_dict_value(data.info, "connector", "type")),
|
||||
CONNECTOR_TYPE_MAP,
|
||||
),
|
||||
),
|
||||
@@ -187,7 +198,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
translation_key="connector_serial",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(data.info, "connector", "serial"),
|
||||
value_fn=lambda data: _get_nested_dict_value(data.info, "connector", "serial"),
|
||||
),
|
||||
# INFO - Grid
|
||||
NRGkickSensorEntityDescription(
|
||||
@@ -197,7 +208,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda data: get_nested_dict_value(data.info, "grid", "voltage"),
|
||||
value_fn=lambda data: _get_nested_dict_value(data.info, "grid", "voltage"),
|
||||
),
|
||||
NRGkickSensorEntityDescription(
|
||||
key="grid_frequency",
|
||||
@@ -206,7 +217,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfFrequency.HERTZ,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda data: get_nested_dict_value(data.info, "grid", "frequency"),
|
||||
value_fn=lambda data: _get_nested_dict_value(data.info, "grid", "frequency"),
|
||||
),
|
||||
# INFO - Network
|
||||
NRGkickSensorEntityDescription(
|
||||
@@ -214,7 +225,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
translation_key="network_ssid",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(data.info, "network", "ssid"),
|
||||
value_fn=lambda data: _get_nested_dict_value(data.info, "network", "ssid"),
|
||||
),
|
||||
NRGkickSensorEntityDescription(
|
||||
key="network_rssi",
|
||||
@@ -223,7 +234,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: get_nested_dict_value(data.info, "network", "rssi"),
|
||||
value_fn=lambda data: _get_nested_dict_value(data.info, "network", "rssi"),
|
||||
),
|
||||
# INFO - Cellular (optional, only if cellular module is available)
|
||||
NRGkickSensorEntityDescription(
|
||||
@@ -235,7 +246,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
entity_registry_enabled_default=False,
|
||||
requires_sim_module=True,
|
||||
value_fn=lambda data: _map_code_to_translation_key(
|
||||
cast(StateType, get_nested_dict_value(data.info, "cellular", "mode")),
|
||||
cast(StateType, _get_nested_dict_value(data.info, "cellular", "mode")),
|
||||
CELLULAR_MODE_MAP,
|
||||
),
|
||||
),
|
||||
@@ -248,7 +259,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
requires_sim_module=True,
|
||||
value_fn=lambda data: get_nested_dict_value(data.info, "cellular", "rssi"),
|
||||
value_fn=lambda data: _get_nested_dict_value(data.info, "cellular", "rssi"),
|
||||
),
|
||||
NRGkickSensorEntityDescription(
|
||||
key="cellular_operator",
|
||||
@@ -256,7 +267,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
requires_sim_module=True,
|
||||
value_fn=lambda data: get_nested_dict_value(data.info, "cellular", "operator"),
|
||||
value_fn=lambda data: _get_nested_dict_value(data.info, "cellular", "operator"),
|
||||
),
|
||||
# VALUES - Energy
|
||||
NRGkickSensorEntityDescription(
|
||||
@@ -267,7 +278,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_display_precision=3,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "energy", "total_charged_energy"
|
||||
),
|
||||
),
|
||||
@@ -279,7 +290,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_display_precision=3,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "energy", "charged_energy"
|
||||
),
|
||||
),
|
||||
@@ -291,7 +302,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "charging_voltage"
|
||||
),
|
||||
),
|
||||
@@ -302,7 +313,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "charging_current"
|
||||
),
|
||||
),
|
||||
@@ -315,7 +326,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
suggested_display_precision=2,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "grid_frequency"
|
||||
),
|
||||
),
|
||||
@@ -328,7 +339,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
suggested_display_precision=2,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "peak_power"
|
||||
),
|
||||
),
|
||||
@@ -339,7 +350,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "total_active_power"
|
||||
),
|
||||
),
|
||||
@@ -351,7 +362,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "total_reactive_power"
|
||||
),
|
||||
),
|
||||
@@ -363,7 +374,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "total_apparent_power"
|
||||
),
|
||||
),
|
||||
@@ -375,7 +386,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "total_power_factor"
|
||||
),
|
||||
),
|
||||
@@ -389,7 +400,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
suggested_display_precision=2,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l1", "voltage"
|
||||
),
|
||||
),
|
||||
@@ -400,7 +411,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l1", "current"
|
||||
),
|
||||
),
|
||||
@@ -411,7 +422,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l1", "active_power"
|
||||
),
|
||||
),
|
||||
@@ -423,7 +434,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l1", "reactive_power"
|
||||
),
|
||||
),
|
||||
@@ -435,7 +446,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l1", "apparent_power"
|
||||
),
|
||||
),
|
||||
@@ -447,7 +458,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l1", "power_factor"
|
||||
),
|
||||
),
|
||||
@@ -461,7 +472,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
suggested_display_precision=2,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l2", "voltage"
|
||||
),
|
||||
),
|
||||
@@ -472,7 +483,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l2", "current"
|
||||
),
|
||||
),
|
||||
@@ -483,7 +494,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l2", "active_power"
|
||||
),
|
||||
),
|
||||
@@ -495,7 +506,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l2", "reactive_power"
|
||||
),
|
||||
),
|
||||
@@ -507,7 +518,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l2", "apparent_power"
|
||||
),
|
||||
),
|
||||
@@ -519,7 +530,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l2", "power_factor"
|
||||
),
|
||||
),
|
||||
@@ -533,7 +544,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
suggested_display_precision=2,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l3", "voltage"
|
||||
),
|
||||
),
|
||||
@@ -544,7 +555,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l3", "current"
|
||||
),
|
||||
),
|
||||
@@ -555,7 +566,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l3", "active_power"
|
||||
),
|
||||
),
|
||||
@@ -567,7 +578,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l3", "reactive_power"
|
||||
),
|
||||
),
|
||||
@@ -579,7 +590,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l3", "apparent_power"
|
||||
),
|
||||
),
|
||||
@@ -591,7 +602,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "l3", "power_factor"
|
||||
),
|
||||
),
|
||||
@@ -605,7 +616,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
suggested_display_precision=2,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "powerflow", "n", "current"
|
||||
),
|
||||
),
|
||||
@@ -615,7 +626,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
translation_key="charging_rate",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "general", "charging_rate"
|
||||
),
|
||||
),
|
||||
@@ -627,12 +638,12 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
_seconds_to_stable_timestamp(
|
||||
cast(
|
||||
StateType,
|
||||
get_nested_dict_value(
|
||||
_get_nested_dict_value(
|
||||
data.values, "general", "vehicle_connect_time"
|
||||
),
|
||||
)
|
||||
)
|
||||
if get_nested_dict_value(data.values, "general", "status")
|
||||
if _get_nested_dict_value(data.values, "general", "status")
|
||||
!= ChargingStatus.STANDBY
|
||||
else None
|
||||
),
|
||||
@@ -644,7 +655,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
suggested_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "general", "vehicle_charging_time"
|
||||
),
|
||||
),
|
||||
@@ -654,7 +665,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=_enum_options_from_mapping(STATUS_MAP),
|
||||
value_fn=lambda data: _map_code_to_translation_key(
|
||||
cast(StateType, get_nested_dict_value(data.values, "general", "status")),
|
||||
cast(StateType, _get_nested_dict_value(data.values, "general", "status")),
|
||||
STATUS_MAP,
|
||||
),
|
||||
),
|
||||
@@ -664,7 +675,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "general", "charge_count"
|
||||
),
|
||||
),
|
||||
@@ -676,7 +687,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: _map_code_to_translation_key(
|
||||
cast(
|
||||
StateType, get_nested_dict_value(data.values, "general", "rcd_trigger")
|
||||
StateType, _get_nested_dict_value(data.values, "general", "rcd_trigger")
|
||||
),
|
||||
RCD_TRIGGER_MAP,
|
||||
),
|
||||
@@ -689,7 +700,8 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: _map_code_to_translation_key(
|
||||
cast(
|
||||
StateType, get_nested_dict_value(data.values, "general", "warning_code")
|
||||
StateType,
|
||||
_get_nested_dict_value(data.values, "general", "warning_code"),
|
||||
),
|
||||
WARNING_CODE_MAP,
|
||||
),
|
||||
@@ -702,7 +714,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: _map_code_to_translation_key(
|
||||
cast(
|
||||
StateType, get_nested_dict_value(data.values, "general", "error_code")
|
||||
StateType, _get_nested_dict_value(data.values, "general", "error_code")
|
||||
),
|
||||
ERROR_CODE_MAP,
|
||||
),
|
||||
@@ -715,7 +727,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "temperatures", "housing"
|
||||
),
|
||||
),
|
||||
@@ -726,7 +738,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "temperatures", "connector_l1"
|
||||
),
|
||||
),
|
||||
@@ -737,7 +749,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "temperatures", "connector_l2"
|
||||
),
|
||||
),
|
||||
@@ -748,7 +760,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "temperatures", "connector_l3"
|
||||
),
|
||||
),
|
||||
@@ -759,7 +771,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "temperatures", "domestic_plug_1"
|
||||
),
|
||||
),
|
||||
@@ -770,7 +782,7 @@ SENSORS: tuple[NRGkickSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: get_nested_dict_value(
|
||||
value_fn=lambda data: _get_nested_dict_value(
|
||||
data.values, "temperatures", "domestic_plug_2"
|
||||
),
|
||||
),
|
||||
|
||||
@@ -78,11 +78,6 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"charge_permitted": {
|
||||
"name": "Charge permitted"
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"current_set": {
|
||||
"name": "Charging current"
|
||||
|
||||
@@ -16,30 +16,23 @@ from onvif.client import (
|
||||
)
|
||||
from onvif.exceptions import ONVIFError
|
||||
from onvif.util import stringify_onvif_error
|
||||
import onvif_parsers
|
||||
from zeep.exceptions import Fault, TransportError, ValidationError, XMLParseError
|
||||
|
||||
from homeassistant.components import webhook
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.network import NoURLAvailableError, get_url
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .models import Event, PullPointManagerState, WebHookManagerState
|
||||
from .parsers import PARSERS
|
||||
|
||||
# Topics in this list are ignored because we do not want to create
|
||||
# entities for them.
|
||||
UNHANDLED_TOPICS: set[str] = {"tns1:MediaControl/VideoEncoderConfiguration"}
|
||||
|
||||
ENTITY_CATEGORY_MAPPING: dict[str, EntityCategory] = {
|
||||
"diagnostic": EntityCategory.DIAGNOSTIC,
|
||||
"config": EntityCategory.CONFIG,
|
||||
}
|
||||
|
||||
SUBSCRIPTION_ERRORS = (Fault, TimeoutError, TransportError)
|
||||
CREATE_ERRORS = (
|
||||
ONVIFError,
|
||||
@@ -88,18 +81,6 @@ PULLPOINT_MESSAGE_LIMIT = 100
|
||||
PULLPOINT_COOLDOWN_TIME = 0.75
|
||||
|
||||
|
||||
def _local_datetime_or_none(value: str) -> dt.datetime | None:
|
||||
"""Convert strings to datetimes, if invalid, return None."""
|
||||
# Handle cameras that return times like '0000-00-00T00:00:00Z' (e.g. Hikvision)
|
||||
try:
|
||||
ret = dt_util.parse_datetime(value)
|
||||
except ValueError:
|
||||
return None
|
||||
if ret is not None:
|
||||
return dt_util.as_local(ret)
|
||||
return None
|
||||
|
||||
|
||||
class EventManager:
|
||||
"""ONVIF Event Manager."""
|
||||
|
||||
@@ -195,10 +176,7 @@ class EventManager:
|
||||
# tns1:RuleEngine/CellMotionDetector/Motion
|
||||
topic = msg.Topic._value_1.rstrip("/.") # noqa: SLF001
|
||||
|
||||
try:
|
||||
event = await onvif_parsers.parse(topic, unique_id, msg)
|
||||
error = None
|
||||
except onvif_parsers.errors.UnknownTopicError:
|
||||
if not (parser := PARSERS.get(topic)):
|
||||
if topic not in UNHANDLED_TOPICS:
|
||||
LOGGER.warning(
|
||||
"%s: No registered handler for event from %s: %s",
|
||||
@@ -208,6 +186,10 @@ class EventManager:
|
||||
)
|
||||
UNHANDLED_TOPICS.add(topic)
|
||||
continue
|
||||
|
||||
try:
|
||||
event = await parser(unique_id, msg)
|
||||
error = None
|
||||
except (AttributeError, KeyError) as e:
|
||||
event = None
|
||||
error = e
|
||||
@@ -220,26 +202,10 @@ class EventManager:
|
||||
error,
|
||||
msg,
|
||||
)
|
||||
continue
|
||||
return
|
||||
|
||||
value = event.value
|
||||
if event.device_class == "timestamp" and isinstance(value, str):
|
||||
value = _local_datetime_or_none(value)
|
||||
|
||||
ha_event = Event(
|
||||
uid=event.uid,
|
||||
name=event.name,
|
||||
platform=event.platform,
|
||||
device_class=event.device_class,
|
||||
unit_of_measurement=event.unit_of_measurement,
|
||||
value=value,
|
||||
entity_category=ENTITY_CATEGORY_MAPPING.get(
|
||||
event.entity_category or ""
|
||||
),
|
||||
entity_enabled=event.entity_enabled,
|
||||
)
|
||||
self.get_uids_by_platform(ha_event.platform).add(ha_event.uid)
|
||||
self._events[ha_event.uid] = ha_event
|
||||
self.get_uids_by_platform(event.platform).add(event.uid)
|
||||
self._events[event.uid] = event
|
||||
|
||||
def get_uid(self, uid: str) -> Event | None:
|
||||
"""Retrieve event for given id."""
|
||||
|
||||
@@ -13,9 +13,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["onvif", "wsdiscovery", "zeep"],
|
||||
"requirements": [
|
||||
"onvif-zeep-async==4.0.4",
|
||||
"onvif_parsers==1.2.2",
|
||||
"WSDiscovery==2.1.2"
|
||||
]
|
||||
"requirements": ["onvif-zeep-async==4.0.4", "WSDiscovery==2.1.2"]
|
||||
}
|
||||
|
||||
755
homeassistant/components/onvif/parsers.py
Normal file
755
homeassistant/components/onvif/parsers.py
Normal file
@@ -0,0 +1,755 @@
|
||||
"""ONVIF event parsers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
import dataclasses
|
||||
import datetime
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.decorator import Registry
|
||||
|
||||
from .models import Event
|
||||
|
||||
PARSERS: Registry[str, Callable[[str, Any], Coroutine[Any, Any, Event | None]]] = (
|
||||
Registry()
|
||||
)
|
||||
|
||||
VIDEO_SOURCE_MAPPING = {
|
||||
"vsconf": "VideoSourceToken",
|
||||
}
|
||||
|
||||
|
||||
def extract_message(msg: Any) -> tuple[str, Any]:
|
||||
"""Extract the message content and the topic."""
|
||||
return msg.Topic._value_1, msg.Message._value_1 # noqa: SLF001
|
||||
|
||||
|
||||
def _normalize_video_source(source: str) -> str:
|
||||
"""Normalize video source.
|
||||
|
||||
Some cameras do not set the VideoSourceToken correctly so we get duplicate
|
||||
sensors, so we need to normalize it to the correct value.
|
||||
"""
|
||||
return VIDEO_SOURCE_MAPPING.get(source, source)
|
||||
|
||||
|
||||
def local_datetime_or_none(value: str) -> datetime.datetime | None:
|
||||
"""Convert strings to datetimes, if invalid, return None."""
|
||||
# To handle cameras that return times like '0000-00-00T00:00:00Z' (e.g. hikvision)
|
||||
try:
|
||||
ret = dt_util.parse_datetime(value)
|
||||
except ValueError:
|
||||
return None
|
||||
if ret is not None:
|
||||
return dt_util.as_local(ret)
|
||||
return None
|
||||
|
||||
|
||||
@PARSERS.register("tns1:VideoSource/MotionAlarm")
|
||||
@PARSERS.register("tns1:Device/Trigger/tnshik:AlarmIn")
|
||||
async def async_parse_motion_alarm(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:VideoSource/MotionAlarm
|
||||
"""
|
||||
topic, payload = extract_message(msg)
|
||||
source = payload.Source.SimpleItem[0].Value
|
||||
return Event(
|
||||
f"{uid}_{topic}_{source}",
|
||||
"Motion Alarm",
|
||||
"binary_sensor",
|
||||
"motion",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:VideoSource/ImageTooBlurry/AnalyticsService")
|
||||
@PARSERS.register("tns1:VideoSource/ImageTooBlurry/ImagingService")
|
||||
@PARSERS.register("tns1:VideoSource/ImageTooBlurry/RecordingService")
|
||||
async def async_parse_image_too_blurry(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:VideoSource/ImageTooBlurry/*
|
||||
"""
|
||||
topic, payload = extract_message(msg)
|
||||
source = payload.Source.SimpleItem[0].Value
|
||||
return Event(
|
||||
f"{uid}_{topic}_{source}",
|
||||
"Image Too Blurry",
|
||||
"binary_sensor",
|
||||
"problem",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
EntityCategory.DIAGNOSTIC,
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:VideoSource/ImageTooDark/AnalyticsService")
|
||||
@PARSERS.register("tns1:VideoSource/ImageTooDark/ImagingService")
|
||||
@PARSERS.register("tns1:VideoSource/ImageTooDark/RecordingService")
|
||||
async def async_parse_image_too_dark(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:VideoSource/ImageTooDark/*
|
||||
"""
|
||||
topic, payload = extract_message(msg)
|
||||
source = payload.Source.SimpleItem[0].Value
|
||||
return Event(
|
||||
f"{uid}_{topic}_{source}",
|
||||
"Image Too Dark",
|
||||
"binary_sensor",
|
||||
"problem",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
EntityCategory.DIAGNOSTIC,
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:VideoSource/ImageTooBright/AnalyticsService")
|
||||
@PARSERS.register("tns1:VideoSource/ImageTooBright/ImagingService")
|
||||
@PARSERS.register("tns1:VideoSource/ImageTooBright/RecordingService")
|
||||
async def async_parse_image_too_bright(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:VideoSource/ImageTooBright/*
|
||||
"""
|
||||
topic, payload = extract_message(msg)
|
||||
source = payload.Source.SimpleItem[0].Value
|
||||
return Event(
|
||||
f"{uid}_{topic}_{source}",
|
||||
"Image Too Bright",
|
||||
"binary_sensor",
|
||||
"problem",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
EntityCategory.DIAGNOSTIC,
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:VideoSource/GlobalSceneChange/AnalyticsService")
|
||||
@PARSERS.register("tns1:VideoSource/GlobalSceneChange/ImagingService")
|
||||
@PARSERS.register("tns1:VideoSource/GlobalSceneChange/RecordingService")
|
||||
async def async_parse_scene_change(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:VideoSource/GlobalSceneChange/*
|
||||
"""
|
||||
topic, payload = extract_message(msg)
|
||||
source = payload.Source.SimpleItem[0].Value
|
||||
return Event(
|
||||
f"{uid}_{topic}_{source}",
|
||||
"Global Scene Change",
|
||||
"binary_sensor",
|
||||
"problem",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:AudioAnalytics/Audio/DetectedSound")
|
||||
async def async_parse_detected_sound(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:AudioAnalytics/Audio/DetectedSound
|
||||
"""
|
||||
audio_source = ""
|
||||
audio_analytics = ""
|
||||
rule = ""
|
||||
topic, payload = extract_message(msg)
|
||||
for source in payload.Source.SimpleItem:
|
||||
if source.Name == "AudioSourceConfigurationToken":
|
||||
audio_source = source.Value
|
||||
if source.Name == "AudioAnalyticsConfigurationToken":
|
||||
audio_analytics = source.Value
|
||||
if source.Name == "Rule":
|
||||
rule = source.Value
|
||||
|
||||
return Event(
|
||||
f"{uid}_{topic}_{audio_source}_{audio_analytics}_{rule}",
|
||||
"Detected Sound",
|
||||
"binary_sensor",
|
||||
"sound",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:RuleEngine/FieldDetector/ObjectsInside")
|
||||
async def async_parse_field_detector(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:RuleEngine/FieldDetector/ObjectsInside
|
||||
"""
|
||||
video_source = ""
|
||||
video_analytics = ""
|
||||
rule = ""
|
||||
topic, payload = extract_message(msg)
|
||||
for source in payload.Source.SimpleItem:
|
||||
if source.Name == "VideoSourceConfigurationToken":
|
||||
video_source = _normalize_video_source(source.Value)
|
||||
if source.Name == "VideoAnalyticsConfigurationToken":
|
||||
video_analytics = source.Value
|
||||
if source.Name == "Rule":
|
||||
rule = source.Value
|
||||
|
||||
return Event(
|
||||
f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}",
|
||||
"Field Detection",
|
||||
"binary_sensor",
|
||||
"motion",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:RuleEngine/CellMotionDetector/Motion")
|
||||
async def async_parse_cell_motion_detector(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:RuleEngine/CellMotionDetector/Motion
|
||||
"""
|
||||
video_source = ""
|
||||
video_analytics = ""
|
||||
rule = ""
|
||||
topic, payload = extract_message(msg)
|
||||
for source in payload.Source.SimpleItem:
|
||||
if source.Name == "VideoSourceConfigurationToken":
|
||||
video_source = _normalize_video_source(source.Value)
|
||||
if source.Name == "VideoAnalyticsConfigurationToken":
|
||||
video_analytics = source.Value
|
||||
if source.Name == "Rule":
|
||||
rule = source.Value
|
||||
|
||||
return Event(
|
||||
f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}",
|
||||
"Cell Motion Detection",
|
||||
"binary_sensor",
|
||||
"motion",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:RuleEngine/MotionRegionDetector/Motion")
|
||||
async def async_parse_motion_region_detector(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:RuleEngine/MotionRegionDetector/Motion
|
||||
"""
|
||||
video_source = ""
|
||||
video_analytics = ""
|
||||
rule = ""
|
||||
topic, payload = extract_message(msg)
|
||||
for source in payload.Source.SimpleItem:
|
||||
if source.Name == "VideoSourceConfigurationToken":
|
||||
video_source = _normalize_video_source(source.Value)
|
||||
if source.Name == "VideoAnalyticsConfigurationToken":
|
||||
video_analytics = source.Value
|
||||
if source.Name == "Rule":
|
||||
rule = source.Value
|
||||
|
||||
return Event(
|
||||
f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}",
|
||||
"Motion Region Detection",
|
||||
"binary_sensor",
|
||||
"motion",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value in ["1", "true"],
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:RuleEngine/TamperDetector/Tamper")
|
||||
async def async_parse_tamper_detector(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:RuleEngine/TamperDetector/Tamper
|
||||
"""
|
||||
video_source = ""
|
||||
video_analytics = ""
|
||||
rule = ""
|
||||
topic, payload = extract_message(msg)
|
||||
for source in payload.Source.SimpleItem:
|
||||
if source.Name == "VideoSourceConfigurationToken":
|
||||
video_source = _normalize_video_source(source.Value)
|
||||
if source.Name == "VideoAnalyticsConfigurationToken":
|
||||
video_analytics = source.Value
|
||||
if source.Name == "Rule":
|
||||
rule = source.Value
|
||||
|
||||
return Event(
|
||||
f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}",
|
||||
"Tamper Detection",
|
||||
"binary_sensor",
|
||||
"problem",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
EntityCategory.DIAGNOSTIC,
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:RuleEngine/MyRuleDetector/DogCatDetect")
|
||||
async def async_parse_dog_cat_detector(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:RuleEngine/MyRuleDetector/DogCatDetect
|
||||
"""
|
||||
video_source = ""
|
||||
topic, payload = extract_message(msg)
|
||||
for source in payload.Source.SimpleItem:
|
||||
if source.Name == "Source":
|
||||
video_source = _normalize_video_source(source.Value)
|
||||
|
||||
return Event(
|
||||
f"{uid}_{topic}_{video_source}",
|
||||
"Pet Detection",
|
||||
"binary_sensor",
|
||||
"motion",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:RuleEngine/MyRuleDetector/VehicleDetect")
|
||||
async def async_parse_vehicle_detector(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:RuleEngine/MyRuleDetector/VehicleDetect
|
||||
"""
|
||||
video_source = ""
|
||||
topic, payload = extract_message(msg)
|
||||
for source in payload.Source.SimpleItem:
|
||||
if source.Name == "Source":
|
||||
video_source = _normalize_video_source(source.Value)
|
||||
|
||||
return Event(
|
||||
f"{uid}_{topic}_{video_source}",
|
||||
"Vehicle Detection",
|
||||
"binary_sensor",
|
||||
"motion",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
)
|
||||
|
||||
|
||||
_TAPO_EVENT_TEMPLATES: dict[str, Event] = {
|
||||
"IsVehicle": Event(
|
||||
uid="",
|
||||
name="Vehicle Detection",
|
||||
platform="binary_sensor",
|
||||
device_class="motion",
|
||||
),
|
||||
"IsPeople": Event(
|
||||
uid="", name="Person Detection", platform="binary_sensor", device_class="motion"
|
||||
),
|
||||
"IsPet": Event(
|
||||
uid="", name="Pet Detection", platform="binary_sensor", device_class="motion"
|
||||
),
|
||||
"IsLineCross": Event(
|
||||
uid="",
|
||||
name="Line Detector Crossed",
|
||||
platform="binary_sensor",
|
||||
device_class="motion",
|
||||
),
|
||||
"IsTamper": Event(
|
||||
uid="", name="Tamper Detection", platform="binary_sensor", device_class="tamper"
|
||||
),
|
||||
"IsIntrusion": Event(
|
||||
uid="",
|
||||
name="Intrusion Detection",
|
||||
platform="binary_sensor",
|
||||
device_class="safety",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@PARSERS.register("tns1:RuleEngine/CellMotionDetector/Intrusion")
|
||||
@PARSERS.register("tns1:RuleEngine/CellMotionDetector/LineCross")
|
||||
@PARSERS.register("tns1:RuleEngine/CellMotionDetector/People")
|
||||
@PARSERS.register("tns1:RuleEngine/CellMotionDetector/Tamper")
|
||||
@PARSERS.register("tns1:RuleEngine/CellMotionDetector/TpSmartEvent")
|
||||
@PARSERS.register("tns1:RuleEngine/PeopleDetector/People")
|
||||
@PARSERS.register("tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent")
|
||||
async def async_parse_tplink_detector(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing tplink smart event messages.
|
||||
|
||||
Topic: tns1:RuleEngine/CellMotionDetector/Intrusion
|
||||
Topic: tns1:RuleEngine/CellMotionDetector/LineCross
|
||||
Topic: tns1:RuleEngine/CellMotionDetector/People
|
||||
Topic: tns1:RuleEngine/CellMotionDetector/Tamper
|
||||
Topic: tns1:RuleEngine/CellMotionDetector/TpSmartEvent
|
||||
Topic: tns1:RuleEngine/PeopleDetector/People
|
||||
Topic: tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent
|
||||
"""
|
||||
video_source = ""
|
||||
video_analytics = ""
|
||||
rule = ""
|
||||
topic, payload = extract_message(msg)
|
||||
for source in payload.Source.SimpleItem:
|
||||
if source.Name == "VideoSourceConfigurationToken":
|
||||
video_source = _normalize_video_source(source.Value)
|
||||
if source.Name == "VideoAnalyticsConfigurationToken":
|
||||
video_analytics = source.Value
|
||||
if source.Name == "Rule":
|
||||
rule = source.Value
|
||||
|
||||
for item in payload.Data.SimpleItem:
|
||||
event_template = _TAPO_EVENT_TEMPLATES.get(item.Name)
|
||||
if event_template is None:
|
||||
continue
|
||||
|
||||
return dataclasses.replace(
|
||||
event_template,
|
||||
uid=f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}",
|
||||
value=item.Value == "true",
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@PARSERS.register("tns1:RuleEngine/MyRuleDetector/PeopleDetect")
|
||||
async def async_parse_person_detector(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:RuleEngine/MyRuleDetector/PeopleDetect
|
||||
"""
|
||||
video_source = ""
|
||||
topic, payload = extract_message(msg)
|
||||
for source in payload.Source.SimpleItem:
|
||||
if source.Name == "Source":
|
||||
video_source = _normalize_video_source(source.Value)
|
||||
|
||||
return Event(
|
||||
f"{uid}_{topic}_{video_source}",
|
||||
"Person Detection",
|
||||
"binary_sensor",
|
||||
"motion",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:RuleEngine/MyRuleDetector/FaceDetect")
|
||||
async def async_parse_face_detector(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:RuleEngine/MyRuleDetector/FaceDetect
|
||||
"""
|
||||
video_source = ""
|
||||
topic, payload = extract_message(msg)
|
||||
for source in payload.Source.SimpleItem:
|
||||
if source.Name == "Source":
|
||||
video_source = _normalize_video_source(source.Value)
|
||||
|
||||
return Event(
|
||||
f"{uid}_{topic}_{video_source}",
|
||||
"Face Detection",
|
||||
"binary_sensor",
|
||||
"motion",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:RuleEngine/MyRuleDetector/Visitor")
|
||||
async def async_parse_visitor_detector(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:RuleEngine/MyRuleDetector/Visitor
|
||||
"""
|
||||
video_source = ""
|
||||
topic, payload = extract_message(msg)
|
||||
for source in payload.Source.SimpleItem:
|
||||
if source.Name == "Source":
|
||||
video_source = _normalize_video_source(source.Value)
|
||||
|
||||
return Event(
|
||||
f"{uid}_{topic}_{video_source}",
|
||||
"Visitor Detection",
|
||||
"binary_sensor",
|
||||
"occupancy",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:RuleEngine/MyRuleDetector/Package")
|
||||
async def async_parse_package_detector(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:RuleEngine/MyRuleDetector/Package
|
||||
"""
|
||||
video_source = ""
|
||||
topic, payload = extract_message(msg)
|
||||
for source in payload.Source.SimpleItem:
|
||||
if source.Name == "Source":
|
||||
video_source = _normalize_video_source(source.Value)
|
||||
|
||||
return Event(
|
||||
f"{uid}_{topic}_{video_source}",
|
||||
"Package Detection",
|
||||
"binary_sensor",
|
||||
"occupancy",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:Device/Trigger/DigitalInput")
|
||||
async def async_parse_digital_input(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:Device/Trigger/DigitalInput
|
||||
"""
|
||||
topic, payload = extract_message(msg)
|
||||
source = payload.Source.SimpleItem[0].Value
|
||||
return Event(
|
||||
f"{uid}_{topic}_{source}",
|
||||
"Digital Input",
|
||||
"binary_sensor",
|
||||
None,
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:Device/Trigger/Relay")
|
||||
async def async_parse_relay(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:Device/Trigger/Relay
|
||||
"""
|
||||
topic, payload = extract_message(msg)
|
||||
source = payload.Source.SimpleItem[0].Value
|
||||
return Event(
|
||||
f"{uid}_{topic}_{source}",
|
||||
"Relay Triggered",
|
||||
"binary_sensor",
|
||||
None,
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "active",
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:Device/HardwareFailure/StorageFailure")
|
||||
async def async_parse_storage_failure(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:Device/HardwareFailure/StorageFailure
|
||||
"""
|
||||
topic, payload = extract_message(msg)
|
||||
source = payload.Source.SimpleItem[0].Value
|
||||
return Event(
|
||||
f"{uid}_{topic}_{source}",
|
||||
"Storage Failure",
|
||||
"binary_sensor",
|
||||
"problem",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
EntityCategory.DIAGNOSTIC,
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:Monitoring/ProcessorUsage")
|
||||
async def async_parse_processor_usage(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:Monitoring/ProcessorUsage
|
||||
"""
|
||||
topic, payload = extract_message(msg)
|
||||
usage = float(payload.Data.SimpleItem[0].Value)
|
||||
if usage <= 1:
|
||||
usage *= 100
|
||||
|
||||
return Event(
|
||||
f"{uid}_{topic}",
|
||||
"Processor Usage",
|
||||
"sensor",
|
||||
None,
|
||||
"percent",
|
||||
int(usage),
|
||||
EntityCategory.DIAGNOSTIC,
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:Monitoring/OperatingTime/LastReboot")
|
||||
async def async_parse_last_reboot(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:Monitoring/OperatingTime/LastReboot
|
||||
"""
|
||||
topic, payload = extract_message(msg)
|
||||
date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value)
|
||||
return Event(
|
||||
f"{uid}_{topic}",
|
||||
"Last Reboot",
|
||||
"sensor",
|
||||
"timestamp",
|
||||
None,
|
||||
date_time,
|
||||
EntityCategory.DIAGNOSTIC,
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:Monitoring/OperatingTime/LastReset")
|
||||
async def async_parse_last_reset(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:Monitoring/OperatingTime/LastReset
|
||||
"""
|
||||
topic, payload = extract_message(msg)
|
||||
date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value)
|
||||
return Event(
|
||||
f"{uid}_{topic}",
|
||||
"Last Reset",
|
||||
"sensor",
|
||||
"timestamp",
|
||||
None,
|
||||
date_time,
|
||||
EntityCategory.DIAGNOSTIC,
|
||||
entity_enabled=False,
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:Monitoring/Backup/Last")
|
||||
async def async_parse_backup_last(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:Monitoring/Backup/Last
|
||||
"""
|
||||
topic, payload = extract_message(msg)
|
||||
date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value)
|
||||
return Event(
|
||||
f"{uid}_{topic}",
|
||||
"Last Backup",
|
||||
"sensor",
|
||||
"timestamp",
|
||||
None,
|
||||
date_time,
|
||||
EntityCategory.DIAGNOSTIC,
|
||||
entity_enabled=False,
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:Monitoring/OperatingTime/LastClockSynchronization")
|
||||
async def async_parse_last_clock_sync(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:Monitoring/OperatingTime/LastClockSynchronization
|
||||
"""
|
||||
topic, payload = extract_message(msg)
|
||||
date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value)
|
||||
return Event(
|
||||
f"{uid}_{topic}",
|
||||
"Last Clock Synchronization",
|
||||
"sensor",
|
||||
"timestamp",
|
||||
None,
|
||||
date_time,
|
||||
EntityCategory.DIAGNOSTIC,
|
||||
entity_enabled=False,
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:RecordingConfig/JobState")
|
||||
async def async_parse_jobstate(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:RecordingConfig/JobState
|
||||
"""
|
||||
|
||||
topic, payload = extract_message(msg)
|
||||
source = payload.Source.SimpleItem[0].Value
|
||||
return Event(
|
||||
f"{uid}_{topic}_{source}",
|
||||
"Recording Job State",
|
||||
"binary_sensor",
|
||||
None,
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "Active",
|
||||
EntityCategory.DIAGNOSTIC,
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:RuleEngine/LineDetector/Crossed")
|
||||
async def async_parse_linedetector_crossed(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:RuleEngine/LineDetector/Crossed
|
||||
"""
|
||||
video_source = ""
|
||||
video_analytics = ""
|
||||
rule = ""
|
||||
topic, payload = extract_message(msg)
|
||||
for source in payload.Source.SimpleItem:
|
||||
if source.Name == "VideoSourceConfigurationToken":
|
||||
video_source = source.Value
|
||||
if source.Name == "VideoAnalyticsConfigurationToken":
|
||||
video_analytics = source.Value
|
||||
if source.Name == "Rule":
|
||||
rule = source.Value
|
||||
|
||||
return Event(
|
||||
f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}",
|
||||
"Line Detector Crossed",
|
||||
"sensor",
|
||||
None,
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value,
|
||||
EntityCategory.DIAGNOSTIC,
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:RuleEngine/CountAggregation/Counter")
|
||||
async def async_parse_count_aggregation_counter(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:RuleEngine/CountAggregation/Counter
|
||||
"""
|
||||
video_source = ""
|
||||
video_analytics = ""
|
||||
rule = ""
|
||||
topic, payload = extract_message(msg)
|
||||
for source in payload.Source.SimpleItem:
|
||||
if source.Name == "VideoSourceConfigurationToken":
|
||||
video_source = _normalize_video_source(source.Value)
|
||||
if source.Name == "VideoAnalyticsConfigurationToken":
|
||||
video_analytics = source.Value
|
||||
if source.Name == "Rule":
|
||||
rule = source.Value
|
||||
|
||||
return Event(
|
||||
f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}",
|
||||
"Count Aggregation Counter",
|
||||
"sensor",
|
||||
None,
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value,
|
||||
EntityCategory.DIAGNOSTIC,
|
||||
)
|
||||
|
||||
|
||||
@PARSERS.register("tns1:UserAlarm/IVA/HumanShapeDetect")
|
||||
async def async_parse_human_shape_detect(uid: str, msg) -> Event | None:
|
||||
"""Handle parsing event message.
|
||||
|
||||
Topic: tns1:UserAlarm/IVA/HumanShapeDetect
|
||||
"""
|
||||
topic, payload = extract_message(msg)
|
||||
video_source = ""
|
||||
for source in payload.Source.SimpleItem:
|
||||
if source.Name == "VideoSourceConfigurationToken":
|
||||
video_source = _normalize_video_source(source.Value)
|
||||
break
|
||||
|
||||
return Event(
|
||||
f"{uid}_{topic}_{video_source}",
|
||||
"Human Shape Detect",
|
||||
"binary_sensor",
|
||||
"motion",
|
||||
None,
|
||||
payload.Data.SimpleItem[0].Value == "true",
|
||||
)
|
||||
@@ -9,6 +9,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["openevsehttp"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["python-openevse-http==0.2.5"],
|
||||
"requirements": ["python-openevse-http==0.2.1"],
|
||||
"zeroconf": ["_openevse._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["renault_api"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["renault-api==0.5.6"]
|
||||
"requirements": ["renault-api==0.5.5"]
|
||||
}
|
||||
|
||||
@@ -2,17 +2,35 @@
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IMPORT
|
||||
from homeassistant.const import CONF_CODE, CONF_HOST, CONF_NAME, CONF_PORT, Platform
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .client import SatelClient
|
||||
from .const import (
|
||||
CONF_ARM_HOME_MODE,
|
||||
CONF_DEVICE_PARTITIONS,
|
||||
CONF_OUTPUT_NUMBER,
|
||||
CONF_OUTPUTS,
|
||||
CONF_PARTITION_NUMBER,
|
||||
CONF_SWITCHABLE_OUTPUT_NUMBER,
|
||||
CONF_SWITCHABLE_OUTPUTS,
|
||||
CONF_ZONE_NUMBER,
|
||||
CONF_ZONE_TYPE,
|
||||
CONF_ZONES,
|
||||
DEFAULT_CONF_ARM_HOME_MODE,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_ZONE_TYPE,
|
||||
DOMAIN,
|
||||
SUBENTRY_TYPE_OUTPUT,
|
||||
SUBENTRY_TYPE_PARTITION,
|
||||
@@ -31,7 +49,104 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.ALARM_CONTROL_PANEL, Platform.BINARY_SENSOR, Platform.SWITCH]
|
||||
|
||||
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
||||
|
||||
ZONE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_ZONE_TYPE, default=DEFAULT_ZONE_TYPE): cv.string,
|
||||
}
|
||||
)
|
||||
EDITABLE_OUTPUT_SCHEMA = vol.Schema({vol.Required(CONF_NAME): cv.string})
|
||||
PARTITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_ARM_HOME_MODE, default=DEFAULT_CONF_ARM_HOME_MODE): vol.In(
|
||||
[1, 2, 3]
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def is_alarm_code_necessary(value):
|
||||
"""Check if alarm code must be configured."""
|
||||
if value.get(CONF_SWITCHABLE_OUTPUTS) and CONF_CODE not in value:
|
||||
raise vol.Invalid("You need to specify alarm code to use switchable_outputs")
|
||||
|
||||
return value
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.All(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_CODE): cv.string,
|
||||
vol.Optional(CONF_DEVICE_PARTITIONS, default={}): {
|
||||
vol.Coerce(int): PARTITION_SCHEMA
|
||||
},
|
||||
vol.Optional(CONF_ZONES, default={}): {vol.Coerce(int): ZONE_SCHEMA},
|
||||
vol.Optional(CONF_OUTPUTS, default={}): {vol.Coerce(int): ZONE_SCHEMA},
|
||||
vol.Optional(CONF_SWITCHABLE_OUTPUTS, default={}): {
|
||||
vol.Coerce(int): EDITABLE_OUTPUT_SCHEMA
|
||||
},
|
||||
},
|
||||
is_alarm_code_necessary,
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, hass_config: ConfigType) -> bool:
|
||||
"""Set up Satel Integra from YAML."""
|
||||
|
||||
if config := hass_config.get(DOMAIN):
|
||||
hass.async_create_task(_async_import(hass, config))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def _async_import(hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Process YAML import."""
|
||||
|
||||
if not hass.config_entries.async_entries(DOMAIN):
|
||||
# Start import flow
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
|
||||
)
|
||||
|
||||
if result.get("type") == FlowResultType.ABORT:
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_yaml_import_issue_cannot_connect",
|
||||
breaks_in_ha_version="2026.4.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml_import_issue_cannot_connect",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Satel Integra",
|
||||
},
|
||||
)
|
||||
return
|
||||
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
breaks_in_ha_version="2026.4.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Satel Integra",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: SatelConfigEntry) -> bool:
|
||||
|
||||
@@ -13,6 +13,7 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
ConfigSubentryData,
|
||||
ConfigSubentryFlow,
|
||||
OptionsFlow,
|
||||
SubentryFlowResult,
|
||||
@@ -23,11 +24,15 @@ from homeassistant.helpers import config_validation as cv, selector
|
||||
|
||||
from .const import (
|
||||
CONF_ARM_HOME_MODE,
|
||||
CONF_DEVICE_PARTITIONS,
|
||||
CONF_OUTPUT_NUMBER,
|
||||
CONF_OUTPUTS,
|
||||
CONF_PARTITION_NUMBER,
|
||||
CONF_SWITCHABLE_OUTPUT_NUMBER,
|
||||
CONF_SWITCHABLE_OUTPUTS,
|
||||
CONF_ZONE_NUMBER,
|
||||
CONF_ZONE_TYPE,
|
||||
CONF_ZONES,
|
||||
DEFAULT_CONF_ARM_HOME_MODE,
|
||||
DEFAULT_PORT,
|
||||
DOMAIN,
|
||||
@@ -48,7 +53,6 @@ CONNECTION_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
CODE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_CODE): cv.string,
|
||||
@@ -139,6 +143,97 @@ class SatelConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="user", data_schema=CONNECTION_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_import(
|
||||
self, import_config: dict[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by import."""
|
||||
|
||||
valid = await self.test_connection(
|
||||
import_config[CONF_HOST], import_config.get(CONF_PORT, DEFAULT_PORT)
|
||||
)
|
||||
|
||||
if valid:
|
||||
subentries: list[ConfigSubentryData] = []
|
||||
|
||||
for partition_number, partition_data in import_config.get(
|
||||
CONF_DEVICE_PARTITIONS, {}
|
||||
).items():
|
||||
subentries.append(
|
||||
{
|
||||
"subentry_type": SUBENTRY_TYPE_PARTITION,
|
||||
"title": f"{partition_data[CONF_NAME]} ({partition_number})",
|
||||
"unique_id": f"{SUBENTRY_TYPE_PARTITION}_{partition_number}",
|
||||
"data": {
|
||||
CONF_NAME: partition_data[CONF_NAME],
|
||||
CONF_ARM_HOME_MODE: partition_data.get(
|
||||
CONF_ARM_HOME_MODE, DEFAULT_CONF_ARM_HOME_MODE
|
||||
),
|
||||
CONF_PARTITION_NUMBER: partition_number,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
for zone_number, zone_data in import_config.get(CONF_ZONES, {}).items():
|
||||
subentries.append(
|
||||
{
|
||||
"subentry_type": SUBENTRY_TYPE_ZONE,
|
||||
"title": f"{zone_data[CONF_NAME]} ({zone_number})",
|
||||
"unique_id": f"{SUBENTRY_TYPE_ZONE}_{zone_number}",
|
||||
"data": {
|
||||
CONF_NAME: zone_data[CONF_NAME],
|
||||
CONF_ZONE_NUMBER: zone_number,
|
||||
CONF_ZONE_TYPE: zone_data.get(
|
||||
CONF_ZONE_TYPE, BinarySensorDeviceClass.MOTION
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
for output_number, output_data in import_config.get(
|
||||
CONF_OUTPUTS, {}
|
||||
).items():
|
||||
subentries.append(
|
||||
{
|
||||
"subentry_type": SUBENTRY_TYPE_OUTPUT,
|
||||
"title": f"{output_data[CONF_NAME]} ({output_number})",
|
||||
"unique_id": f"{SUBENTRY_TYPE_OUTPUT}_{output_number}",
|
||||
"data": {
|
||||
CONF_NAME: output_data[CONF_NAME],
|
||||
CONF_OUTPUT_NUMBER: output_number,
|
||||
CONF_ZONE_TYPE: output_data.get(
|
||||
CONF_ZONE_TYPE, BinarySensorDeviceClass.MOTION
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
for switchable_output_number, switchable_output_data in import_config.get(
|
||||
CONF_SWITCHABLE_OUTPUTS, {}
|
||||
).items():
|
||||
subentries.append(
|
||||
{
|
||||
"subentry_type": SUBENTRY_TYPE_SWITCHABLE_OUTPUT,
|
||||
"title": f"{switchable_output_data[CONF_NAME]} ({switchable_output_number})",
|
||||
"unique_id": f"{SUBENTRY_TYPE_SWITCHABLE_OUTPUT}_{switchable_output_number}",
|
||||
"data": {
|
||||
CONF_NAME: switchable_output_data[CONF_NAME],
|
||||
CONF_SWITCHABLE_OUTPUT_NUMBER: switchable_output_number,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=import_config[CONF_HOST],
|
||||
data={
|
||||
CONF_HOST: import_config[CONF_HOST],
|
||||
CONF_PORT: import_config.get(CONF_PORT, DEFAULT_PORT),
|
||||
},
|
||||
options={CONF_CODE: import_config.get(CONF_CODE)},
|
||||
subentries=subentries,
|
||||
)
|
||||
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
async def test_connection(self, host: str, port: int) -> bool:
|
||||
"""Test a connection to the Satel alarm."""
|
||||
controller = AsyncSatel(host, port, self.hass.loop)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
DEFAULT_CONF_ARM_HOME_MODE = 1
|
||||
DEFAULT_PORT = 7094
|
||||
DEFAULT_ZONE_TYPE = "motion"
|
||||
|
||||
DOMAIN = "satel_integra"
|
||||
|
||||
@@ -15,7 +16,11 @@ CONF_ZONE_NUMBER = "zone_number"
|
||||
CONF_OUTPUT_NUMBER = "output_number"
|
||||
CONF_SWITCHABLE_OUTPUT_NUMBER = "switchable_output_number"
|
||||
|
||||
CONF_DEVICE_PARTITIONS = "partitions"
|
||||
CONF_ARM_HOME_MODE = "arm_home_mode"
|
||||
CONF_ZONE_TYPE = "type"
|
||||
CONF_ZONES = "zones"
|
||||
CONF_OUTPUTS = "outputs"
|
||||
CONF_SWITCHABLE_OUTPUTS = "switchable_outputs"
|
||||
|
||||
ZONES = "zones"
|
||||
|
||||
@@ -167,6 +167,12 @@
|
||||
"message": "Cannot control switchable outputs because no user code is configured for this Satel Integra entry. Configure a code in the integration options to enable output control."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_yaml_import_issue_cannot_connect": {
|
||||
"description": "Configuring {integration_title} using YAML is being removed but there was an connection error importing your existing configuration.\n\nEnsure connection to {integration_title} works and restart Home Assistant to try again or remove the `{domain}` YAML configuration from your configuration.yaml file and add the {integration_title} integration manually.",
|
||||
"title": "YAML import failed due to a connection error"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pysaunum"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pysaunum==0.6.0"]
|
||||
"requirements": ["pysaunum==0.5.0"]
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ from . import DOMAIN
|
||||
class SceneActivatedTrigger(EntityTriggerBase):
|
||||
"""Trigger for scene entity activations."""
|
||||
|
||||
_domains = {DOMAIN}
|
||||
_domain = DOMAIN
|
||||
_schema = ENTITY_STATE_TRIGGER_SCHEMA
|
||||
|
||||
def is_valid_transition(self, from_state: State, to_state: State) -> bool:
|
||||
|
||||
@@ -17,11 +17,13 @@ from homeassistant.components import climate as FanState
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_FAN_MODE,
|
||||
ATTR_TEMPERATURE,
|
||||
PRESET_AWAY,
|
||||
PRESET_BOOST,
|
||||
PRESET_COMFORT,
|
||||
PRESET_ECO,
|
||||
PRESET_HOME,
|
||||
PRESET_NONE,
|
||||
PRESET_SLEEP,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
@@ -38,11 +40,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
|
||||
from . import SwitchbotCloudData, SwitchBotCoordinator
|
||||
from .const import (
|
||||
CLIMATE_PRESET_SCHEDULE,
|
||||
DOMAIN,
|
||||
SMART_RADIATOR_THERMOSTAT_AFTER_COMMAND_REFRESH,
|
||||
)
|
||||
from .const import DOMAIN, SMART_RADIATOR_THERMOSTAT_AFTER_COMMAND_REFRESH
|
||||
from .entity import SwitchBotCloudEntity
|
||||
|
||||
_LOGGER = getLogger(__name__)
|
||||
@@ -208,7 +206,6 @@ RADIATOR_PRESET_MODE_MAP: dict[str, SmartRadiatorThermostatMode] = {
|
||||
PRESET_BOOST: SmartRadiatorThermostatMode.FAST_HEATING,
|
||||
PRESET_COMFORT: SmartRadiatorThermostatMode.COMFORT,
|
||||
PRESET_HOME: SmartRadiatorThermostatMode.MANUAL,
|
||||
CLIMATE_PRESET_SCHEDULE: SmartRadiatorThermostatMode.SCHEDULE,
|
||||
}
|
||||
|
||||
RADIATOR_HA_PRESET_MODE_MAP = {
|
||||
@@ -230,10 +227,15 @@ class SwitchBotCloudSmartRadiatorThermostat(SwitchBotCloudEntity, ClimateEntity)
|
||||
_attr_target_temperature_step = PRECISION_TENTHS
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
_attr_preset_modes = list(RADIATOR_PRESET_MODE_MAP)
|
||||
|
||||
_attr_translation_key = "smart_radiator_thermostat"
|
||||
|
||||
_attr_preset_modes = [
|
||||
PRESET_NONE,
|
||||
PRESET_ECO,
|
||||
PRESET_AWAY,
|
||||
PRESET_BOOST,
|
||||
PRESET_COMFORT,
|
||||
PRESET_HOME,
|
||||
PRESET_SLEEP,
|
||||
]
|
||||
_attr_preset_mode = PRESET_HOME
|
||||
|
||||
_attr_hvac_modes = [
|
||||
@@ -298,7 +300,7 @@ class SwitchBotCloudSmartRadiatorThermostat(SwitchBotCloudEntity, ClimateEntity)
|
||||
SmartRadiatorThermostatMode(mode)
|
||||
]
|
||||
|
||||
if self.preset_mode == PRESET_NONE:
|
||||
if self.preset_mode in [PRESET_NONE, PRESET_AWAY]:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
else:
|
||||
self._attr_hvac_mode = HVACMode.HEAT
|
||||
|
||||
@@ -17,9 +17,6 @@ VACUUM_FAN_SPEED_STANDARD = "standard"
|
||||
VACUUM_FAN_SPEED_STRONG = "strong"
|
||||
VACUUM_FAN_SPEED_MAX = "max"
|
||||
|
||||
|
||||
CLIMATE_PRESET_SCHEDULE = "schedule"
|
||||
|
||||
AFTER_COMMAND_REFRESH = 5
|
||||
COVER_ENTITY_AFTER_COMMAND_REFRESH = 10
|
||||
SMART_RADIATOR_THERMOSTAT_AFTER_COMMAND_REFRESH = 30
|
||||
|
||||
@@ -8,17 +8,6 @@
|
||||
"default": "mdi:chevron-left-box"
|
||||
}
|
||||
},
|
||||
"climate": {
|
||||
"smart_radiator_thermostat": {
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"schedule": "mdi:clock-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"fan": {
|
||||
"air_purifier": {
|
||||
"default": "mdi:air-purifier",
|
||||
|
||||
@@ -26,17 +26,6 @@
|
||||
"name": "Previous"
|
||||
}
|
||||
},
|
||||
"climate": {
|
||||
"smart_radiator_thermostat": {
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"schedule": "Schedule"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"fan": {
|
||||
"air_purifier": {
|
||||
"state_attributes": {
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aiotankerkoenig"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiotankerkoenig==0.5.1"]
|
||||
"requirements": ["aiotankerkoenig==0.4.2"]
|
||||
}
|
||||
|
||||
@@ -62,8 +62,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
DESCRIPTION_PLACEHOLDERS: dict[str, str] = {
|
||||
"botfather_username": "@BotFather",
|
||||
"botfather_url": "https://t.me/botfather",
|
||||
"id_bot_username": "@id_bot",
|
||||
"id_bot_url": "https://t.me/id_bot",
|
||||
"getidsbot_username": "@GetIDs Bot",
|
||||
"getidsbot_url": "https://t.me/getidsbot",
|
||||
"socks_url": "socks5://username:password@proxy_ip:proxy_port",
|
||||
# used in advanced settings section
|
||||
"default_api_endpoint": DEFAULT_API_ENDPOINT,
|
||||
@@ -611,15 +611,10 @@ class AllowedChatIdsSubEntryFlowHandler(ConfigSubentryFlow):
|
||||
|
||||
errors["base"] = "chat_not_found"
|
||||
|
||||
service: TelegramNotificationService = self._get_entry().runtime_data
|
||||
description_placeholders = DESCRIPTION_PLACEHOLDERS.copy()
|
||||
description_placeholders["bot_username"] = f"@{service.bot.username}"
|
||||
description_placeholders["bot_url"] = f"https://t.me/{service.bot.username}"
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema({vol.Required(CONF_CHAT_ID): vol.Coerce(int)}),
|
||||
description_placeholders=description_placeholders,
|
||||
description_placeholders=DESCRIPTION_PLACEHOLDERS,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
@@ -105,7 +105,7 @@
|
||||
"data_description": {
|
||||
"chat_id": "ID representing the user or group chat to which messages can be sent."
|
||||
},
|
||||
"description": "Before you proceed, send any message to your bot: [{bot_username}]({bot_url}). This is required because Telegram prevents bots from initiating chats with users.\n\nThen follow these steps to get your chat ID:\n\n1. Open Telegram and start a chat with [{id_bot_username}]({id_bot_url}).\n1. Send any message to the bot.\n1. Your chat ID is in the `ID` field of the bot's response.",
|
||||
"description": "To get your chat ID, follow these steps:\n\n1. Open Telegram and start a chat with [{getidsbot_username}]({getidsbot_url}).\n1. Send any message to the bot.\n1. Your chat ID is in the `id` field of the bot's response.",
|
||||
"title": "Add chat"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ from .const import DOMAIN
|
||||
class TextChangedTrigger(EntityTriggerBase):
|
||||
"""Trigger for text entity when its content changes."""
|
||||
|
||||
_domains = {DOMAIN}
|
||||
_domain = DOMAIN
|
||||
_schema = ENTITY_STATE_TRIGGER_SCHEMA
|
||||
|
||||
def is_valid_state(self, state: State) -> bool:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["uhooapi==1.2.8"]
|
||||
"requirements": ["uhooapi==1.2.6"]
|
||||
}
|
||||
|
||||
@@ -23,7 +23,6 @@ from homeassistant.const import ( # noqa: F401 # STATE_PAUSED/IDLE are API
|
||||
STATE_ON,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
@@ -64,6 +63,7 @@ SERVICE_STOP = "stop"
|
||||
DEFAULT_NAME = "Vacuum cleaner robot"
|
||||
|
||||
ISSUE_SEGMENTS_CHANGED = "segments_changed"
|
||||
ISSUE_SEGMENTS_MAPPING_NOT_CONFIGURED = "segments_mapping_not_configured"
|
||||
|
||||
_BATTERY_DEPRECATION_IGNORED_PLATFORMS = ("template",)
|
||||
|
||||
@@ -438,14 +438,7 @@ class StateVacuumEntity(
|
||||
)
|
||||
|
||||
options: Mapping[str, Any] = self.registry_entry.options.get(DOMAIN, {})
|
||||
area_mapping: dict[str, list[str]] | None = options.get("area_mapping")
|
||||
|
||||
if area_mapping is None:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="area_mapping_not_configured",
|
||||
translation_placeholders={"entity_id": self.entity_id},
|
||||
)
|
||||
area_mapping: dict[str, list[str]] = options.get("area_mapping", {})
|
||||
|
||||
# We use a dict to preserve the order of segments.
|
||||
segment_ids: dict[str, None] = {}
|
||||
|
||||
@@ -89,11 +89,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"area_mapping_not_configured": {
|
||||
"message": "Area mapping is not configured for `{entity_id}`. Configure the segment-to-area mapping before using this action."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"segments_changed": {
|
||||
"description": "",
|
||||
|
||||
@@ -35,10 +35,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
and entry.data[CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.TV
|
||||
):
|
||||
store: Store[list[dict[str, Any]]] = Store(hass, 1, DOMAIN)
|
||||
coordinator = VizioAppsDataUpdateCoordinator(hass, store)
|
||||
await coordinator.async_setup()
|
||||
coordinator = VizioAppsDataUpdateCoordinator(hass, entry, store)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
hass.data[DOMAIN][CONF_APPS] = coordinator
|
||||
await coordinator.async_refresh()
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
@@ -54,8 +53,7 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
entry.data[CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.TV
|
||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN)
|
||||
):
|
||||
if coordinator := hass.data[DOMAIN].pop(CONF_APPS, None):
|
||||
await coordinator.async_shutdown()
|
||||
hass.data[DOMAIN].pop(CONF_APPS, None)
|
||||
|
||||
if not hass.data[DOMAIN]:
|
||||
hass.data.pop(DOMAIN)
|
||||
|
||||
@@ -9,6 +9,7 @@ from typing import Any
|
||||
from pyvizio.const import APPS
|
||||
from pyvizio.util import gen_apps_list_from_url
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.storage import Store
|
||||
@@ -22,16 +23,19 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class VizioAppsDataUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]]]):
|
||||
"""Define an object to hold Vizio app config data."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
store: Store[list[dict[str, Any]]],
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=None,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(days=1),
|
||||
)
|
||||
@@ -39,9 +43,8 @@ class VizioAppsDataUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]]]
|
||||
self.fail_threshold = 10
|
||||
self.store = store
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Load initial data from storage and register shutdown."""
|
||||
await self.async_register_shutdown()
|
||||
async def _async_setup(self) -> None:
|
||||
"""Refresh data for the first time when a config entry is setup."""
|
||||
self.data = await self.store.async_load() or APPS
|
||||
|
||||
async def _async_update_data(self) -> list[dict[str, Any]]:
|
||||
|
||||
@@ -111,7 +111,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: XboxConfigEntry) -> bo
|
||||
# Migrate unique_id from `xbox` to account xuid and
|
||||
# change generic entry name to user's gamertag
|
||||
try:
|
||||
own = await client.people.get_friend_by_xuid(client.xuid)
|
||||
own = await client.people.get_friends_by_xuid(client.xuid)
|
||||
except TimeoutException as e:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
|
||||
@@ -74,7 +74,7 @@ class OAuth2FlowHandler(
|
||||
|
||||
client = XboxLiveClient(auth)
|
||||
|
||||
me = await client.people.get_friend_by_xuid(client.xuid)
|
||||
me = await client.people.get_friends_by_xuid(client.xuid)
|
||||
|
||||
await self.async_set_unique_id(client.xuid)
|
||||
|
||||
|
||||
@@ -213,10 +213,10 @@ class XboxPresenceCoordinator(XboxBaseCoordinator[XboxData]):
|
||||
async def update_data(self) -> XboxData:
|
||||
"""Fetch presence data."""
|
||||
|
||||
me = await self.client.people.get_friend_by_xuid(self.client.xuid)
|
||||
batch = await self.client.people.get_friends_by_xuid(self.client.xuid)
|
||||
friends = await self.client.people.get_friends_own()
|
||||
|
||||
presence_data = {self.client.xuid: me.people[0]}
|
||||
presence_data = {self.client.xuid: batch.people[0]}
|
||||
presence_data.update(
|
||||
{
|
||||
friend.xuid: friend
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "platinum",
|
||||
|
||||
"requirements": ["python-xbox==0.2.0"],
|
||||
"requirements": ["python-xbox==0.1.3"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Microsoft Corporation",
|
||||
|
||||
@@ -28,7 +28,7 @@ from .helpers import (
|
||||
)
|
||||
from .models import ZwaveJSConfigEntry
|
||||
|
||||
KEYS_TO_REDACT = {"homeId", "location", "dsk"}
|
||||
KEYS_TO_REDACT = {"homeId", "location"}
|
||||
|
||||
VALUES_TO_REDACT = (
|
||||
ZwaveValueMatcher(property_="userCode", command_class=CommandClass.USER_CODE),
|
||||
|
||||
@@ -381,20 +381,3 @@ class DependencyError(HomeAssistantError):
|
||||
f"Could not setup dependencies: {', '.join(failed_dependencies)}",
|
||||
)
|
||||
self.failed_dependencies = failed_dependencies
|
||||
|
||||
|
||||
class UnsupportedStorageVersionError(HomeAssistantError):
|
||||
"""Raised when a storage file has a newer major version than expected."""
|
||||
|
||||
def __init__(
|
||||
self, storage_key: str, found_version: int, max_supported_version: int
|
||||
) -> None:
|
||||
"""Initialize error."""
|
||||
super().__init__(
|
||||
f"Storage file {storage_key} has version {found_version}"
|
||||
f" which is newer than the max supported version {max_supported_version};"
|
||||
" upgrade Home Assistant or restore from a backup",
|
||||
)
|
||||
self.storage_key = storage_key
|
||||
self.found_version = found_version
|
||||
self.max_supported_version = max_supported_version
|
||||
|
||||
5
homeassistant/generated/bluetooth.py
generated
5
homeassistant/generated/bluetooth.py
generated
@@ -212,11 +212,6 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [
|
||||
"domain": "govee_ble",
|
||||
"local_name": "GVH5110*",
|
||||
},
|
||||
{
|
||||
"connectable": False,
|
||||
"domain": "govee_ble",
|
||||
"local_name": "GV5140*",
|
||||
},
|
||||
{
|
||||
"connectable": False,
|
||||
"domain": "govee_ble",
|
||||
|
||||
@@ -7338,12 +7338,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"ubisys": {
|
||||
"name": "Ubisys",
|
||||
"iot_standards": [
|
||||
"zigbee"
|
||||
]
|
||||
},
|
||||
"ubiwizz": {
|
||||
"name": "Ubiwizz",
|
||||
"integration_type": "virtual",
|
||||
|
||||
@@ -447,7 +447,7 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]):
|
||||
EventAreaRegistryUpdatedData(action="reorder", area_id=None),
|
||||
)
|
||||
|
||||
async def _async_load(self) -> None:
|
||||
async def async_load(self) -> None:
|
||||
"""Load the area registry."""
|
||||
self._async_setup_cleanup()
|
||||
|
||||
@@ -549,10 +549,10 @@ def async_get(hass: HomeAssistant) -> AreaRegistry:
|
||||
return AreaRegistry(hass)
|
||||
|
||||
|
||||
async def async_load(hass: HomeAssistant, *, load_empty: bool = False) -> None:
|
||||
async def async_load(hass: HomeAssistant) -> None:
|
||||
"""Load area registry."""
|
||||
assert DATA_REGISTRY not in hass.data
|
||||
await async_get(hass).async_load(load_empty=load_empty)
|
||||
await async_get(hass).async_load()
|
||||
|
||||
|
||||
@callback
|
||||
|
||||
@@ -77,7 +77,7 @@ class CategoryRegistryStore(Store[CategoryRegistryStoreData]):
|
||||
) -> CategoryRegistryStoreData:
|
||||
"""Migrate to the new version."""
|
||||
if old_major_version > STORAGE_VERSION_MAJOR:
|
||||
raise NotImplementedError
|
||||
raise ValueError("Can't migrate to future version")
|
||||
|
||||
if old_major_version == 1:
|
||||
if old_minor_version < 2:
|
||||
@@ -204,7 +204,7 @@ class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]):
|
||||
|
||||
return new
|
||||
|
||||
async def _async_load(self) -> None:
|
||||
async def async_load(self) -> None:
|
||||
"""Load the category registry."""
|
||||
data = await self._store.async_load()
|
||||
category_entries: dict[str, dict[str, CategoryEntry]] = {}
|
||||
@@ -265,7 +265,7 @@ def async_get(hass: HomeAssistant) -> CategoryRegistry:
|
||||
return CategoryRegistry(hass)
|
||||
|
||||
|
||||
async def async_load(hass: HomeAssistant, *, load_empty: bool = False) -> None:
|
||||
async def async_load(hass: HomeAssistant) -> None:
|
||||
"""Load category registry."""
|
||||
assert DATA_REGISTRY not in hass.data
|
||||
await async_get(hass).async_load(load_empty=load_empty)
|
||||
await async_get(hass).async_load()
|
||||
|
||||
@@ -1461,7 +1461,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]):
|
||||
)
|
||||
self.async_schedule_save()
|
||||
|
||||
async def _async_load(self) -> None:
|
||||
async def async_load(self) -> None:
|
||||
"""Load the device registry."""
|
||||
async_setup_cleanup(self.hass, self)
|
||||
|
||||
@@ -1706,10 +1706,10 @@ def async_get(hass: HomeAssistant) -> DeviceRegistry:
|
||||
return DeviceRegistry(hass)
|
||||
|
||||
|
||||
async def async_load(hass: HomeAssistant, *, load_empty: bool = False) -> None:
|
||||
async def async_load(hass: HomeAssistant) -> None:
|
||||
"""Load device registry."""
|
||||
assert DATA_REGISTRY not in hass.data
|
||||
await async_get(hass).async_load(load_empty=load_empty)
|
||||
await async_get(hass).async_load()
|
||||
|
||||
|
||||
@callback
|
||||
|
||||
@@ -1678,7 +1678,7 @@ class EntityRegistry(BaseRegistry):
|
||||
new_options[domain] = options
|
||||
return self._async_update_entity(entity_id, options=new_options)
|
||||
|
||||
async def _async_load(self) -> None:
|
||||
async def async_load(self) -> None:
|
||||
"""Load the entity registry."""
|
||||
_async_setup_cleanup(self.hass, self)
|
||||
_async_setup_entity_restore(self.hass, self)
|
||||
@@ -1945,10 +1945,10 @@ def async_get(hass: HomeAssistant) -> EntityRegistry:
|
||||
return EntityRegistry(hass)
|
||||
|
||||
|
||||
async def async_load(hass: HomeAssistant, *, load_empty: bool = False) -> None:
|
||||
async def async_load(hass: HomeAssistant) -> None:
|
||||
"""Load entity registry."""
|
||||
assert DATA_REGISTRY not in hass.data
|
||||
await async_get(hass).async_load(load_empty=load_empty)
|
||||
await async_get(hass).async_load()
|
||||
|
||||
|
||||
@callback
|
||||
|
||||
@@ -94,7 +94,7 @@ class FloorRegistryStore(Store[FloorRegistryStoreData]):
|
||||
) -> FloorRegistryStoreData:
|
||||
"""Migrate to the new version."""
|
||||
if old_major_version > STORAGE_VERSION_MAJOR:
|
||||
raise NotImplementedError
|
||||
raise ValueError("Can't migrate to future version")
|
||||
|
||||
if old_major_version == 1:
|
||||
if old_minor_version < 2:
|
||||
@@ -307,7 +307,7 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]):
|
||||
_EventFloorRegistryUpdatedData_Reorder(action="reorder"),
|
||||
)
|
||||
|
||||
async def _async_load(self) -> None:
|
||||
async def async_load(self) -> None:
|
||||
"""Load the floor registry."""
|
||||
data = await self._store.async_load()
|
||||
floors = FloorRegistryItems()
|
||||
@@ -353,7 +353,7 @@ def async_get(hass: HomeAssistant) -> FloorRegistry:
|
||||
return FloorRegistry(hass)
|
||||
|
||||
|
||||
async def async_load(hass: HomeAssistant, *, load_empty: bool = False) -> None:
|
||||
async def async_load(hass: HomeAssistant) -> None:
|
||||
"""Load floor registry."""
|
||||
assert DATA_REGISTRY not in hass.data
|
||||
await async_get(hass).async_load(load_empty=load_empty)
|
||||
await async_get(hass).async_load()
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import importlib
|
||||
import logging
|
||||
import sys
|
||||
@@ -52,10 +53,11 @@ async def async_import_module(hass: HomeAssistant, name: str) -> ModuleType:
|
||||
if isinstance(ex, ModuleNotFoundError):
|
||||
failure_cache[name] = True
|
||||
import_future.set_exception(ex)
|
||||
# Set the exception retrieved flag on the future since
|
||||
# it will never be retrieved unless there
|
||||
# are concurrent calls
|
||||
import_future.exception()
|
||||
with suppress(BaseException):
|
||||
# Set the exception retrieved flag on the future since
|
||||
# it will never be retrieved unless there
|
||||
# are concurrent calls
|
||||
import_future.result()
|
||||
raise
|
||||
finally:
|
||||
del import_futures[name]
|
||||
|
||||
@@ -251,7 +251,7 @@ class IssueRegistry(BaseRegistry):
|
||||
"""
|
||||
self._store.make_read_only()
|
||||
|
||||
async def _async_load(self) -> None:
|
||||
async def async_load(self) -> None:
|
||||
"""Load the issue registry."""
|
||||
data = await self._store.async_load()
|
||||
|
||||
@@ -314,17 +314,12 @@ def async_get(hass: HomeAssistant) -> IssueRegistry:
|
||||
return IssueRegistry(hass)
|
||||
|
||||
|
||||
async def async_load(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
read_only: bool = False,
|
||||
load_empty: bool = False,
|
||||
) -> None:
|
||||
async def async_load(hass: HomeAssistant, *, read_only: bool = False) -> None:
|
||||
"""Load issue registry."""
|
||||
ir = async_get(hass)
|
||||
if read_only: # only used in for check config script
|
||||
ir.make_read_only()
|
||||
await ir.async_load(load_empty=load_empty)
|
||||
return await ir.async_load()
|
||||
|
||||
|
||||
@callback
|
||||
|
||||
@@ -80,7 +80,7 @@ class LabelRegistryStore(Store[LabelRegistryStoreData]):
|
||||
) -> LabelRegistryStoreData:
|
||||
"""Migrate to the new version."""
|
||||
if old_major_version > STORAGE_VERSION_MAJOR:
|
||||
raise NotImplementedError
|
||||
raise ValueError("Can't migrate to future version")
|
||||
|
||||
if old_major_version == 1:
|
||||
if old_minor_version < 2:
|
||||
@@ -224,7 +224,7 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]):
|
||||
|
||||
return new
|
||||
|
||||
async def _async_load(self) -> None:
|
||||
async def async_load(self) -> None:
|
||||
"""Load the label registry."""
|
||||
data = await self._store.async_load()
|
||||
labels = NormalizedNameBaseRegistryItems[LabelEntry]()
|
||||
@@ -270,7 +270,7 @@ def async_get(hass: HomeAssistant) -> LabelRegistry:
|
||||
return LabelRegistry(hass)
|
||||
|
||||
|
||||
async def async_load(hass: HomeAssistant, *, load_empty: bool = False) -> None:
|
||||
async def async_load(hass: HomeAssistant) -> None:
|
||||
"""Load label registry."""
|
||||
assert DATA_REGISTRY not in hass.data
|
||||
await async_get(hass).async_load(load_empty=load_empty)
|
||||
await async_get(hass).async_load()
|
||||
|
||||
@@ -77,19 +77,6 @@ class BaseRegistry[_StoreDataT: Mapping[str, Any] | Sequence[Any]](ABC):
|
||||
delay = SAVE_DELAY if self.hass.state is CoreState.running else SAVE_DELAY_LONG
|
||||
self._store.async_delay_save(self._data_to_save, delay)
|
||||
|
||||
async def async_load(self, *, load_empty: bool = False) -> None:
|
||||
"""Load the registry.
|
||||
|
||||
Optionally set the store to load empty and become read-only.
|
||||
"""
|
||||
if load_empty:
|
||||
self._store.set_load_empty()
|
||||
await self._async_load()
|
||||
|
||||
@abstractmethod
|
||||
async def _async_load(self) -> None:
|
||||
"""Load the registry."""
|
||||
|
||||
@abstractmethod
|
||||
def _data_to_save(self) -> _StoreDataT:
|
||||
"""Return data of registry to store in a file."""
|
||||
|
||||
@@ -9,7 +9,7 @@ from typing import Any, Self, cast
|
||||
|
||||
from homeassistant.const import ATTR_RESTORED, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant, State, callback, valid_entity_id
|
||||
from homeassistant.exceptions import HomeAssistantError, UnsupportedStorageVersionError
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.json import json_loads
|
||||
@@ -95,12 +95,9 @@ class StoredState:
|
||||
)
|
||||
|
||||
|
||||
async def async_load(hass: HomeAssistant, *, load_empty: bool = False) -> None:
|
||||
async def async_load(hass: HomeAssistant) -> None:
|
||||
"""Load the restore state task."""
|
||||
data = async_get(hass)
|
||||
if load_empty:
|
||||
data.set_load_empty()
|
||||
await data.async_setup()
|
||||
await async_get(hass).async_setup()
|
||||
|
||||
|
||||
@callback
|
||||
@@ -127,10 +124,6 @@ class RestoreStateData:
|
||||
self.last_states: dict[str, StoredState] = {}
|
||||
self.entities: dict[str, RestoreEntity] = {}
|
||||
|
||||
def set_load_empty(self) -> None:
|
||||
"""Set the store to load empty and become read-only."""
|
||||
self.store.set_load_empty()
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up up the instance of this data helper."""
|
||||
await self.async_load()
|
||||
@@ -146,8 +139,6 @@ class RestoreStateData:
|
||||
"""Load the instance of this data helper."""
|
||||
try:
|
||||
stored_states = await self.store.async_load()
|
||||
except UnsupportedStorageVersionError:
|
||||
raise
|
||||
except HomeAssistantError as exc:
|
||||
_LOGGER.error("Error loading last states", exc_info=exc)
|
||||
stored_states = None
|
||||
|
||||
@@ -28,7 +28,7 @@ from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError, UnsupportedStorageVersionError
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util import dt as dt_util, json as json_util
|
||||
from homeassistant.util.file import WriteError, write_utf8_file, write_utf8_file_atomic
|
||||
@@ -239,7 +239,6 @@ class Store[_T: Mapping[str, Any] | Sequence[Any]]:
|
||||
*,
|
||||
atomic_writes: bool = False,
|
||||
encoder: type[JSONEncoder] | None = None,
|
||||
max_readable_version: int | None = None,
|
||||
minor_version: int = 1,
|
||||
read_only: bool = False,
|
||||
serialize_in_event_loop: bool = True,
|
||||
@@ -247,10 +246,6 @@ class Store[_T: Mapping[str, Any] | Sequence[Any]]:
|
||||
"""Initialize storage class.
|
||||
|
||||
Args:
|
||||
max_readable_version: Maximum major version that can be read. Defaults
|
||||
to version. Set higher than version to support forward compatibility,
|
||||
allowing reading data written by newer versions (e.g., after downgrade).
|
||||
|
||||
serialize_in_event_loop: Whether to serialize data in the event loop.
|
||||
Set to True (default) if data passed to async_save and data produced by
|
||||
data_func passed to async_delay_save needs to be serialized in the event
|
||||
@@ -278,10 +273,6 @@ class Store[_T: Mapping[str, Any] | Sequence[Any]]:
|
||||
self._encoder = encoder
|
||||
self._atomic_writes = atomic_writes
|
||||
self._read_only = read_only
|
||||
self._load_empty = False
|
||||
self._max_readable_version = (
|
||||
max_readable_version if max_readable_version is not None else version
|
||||
)
|
||||
self._next_write_time = 0.0
|
||||
self._manager = get_internal_store_manager(hass)
|
||||
self._serialize_in_event_loop = serialize_in_event_loop
|
||||
@@ -298,14 +289,6 @@ class Store[_T: Mapping[str, Any] | Sequence[Any]]:
|
||||
"""
|
||||
self._read_only = True
|
||||
|
||||
def set_load_empty(self) -> None:
|
||||
"""Set the store to load empty data and become read-only.
|
||||
|
||||
When set, the store will skip loading data from disk and return None,
|
||||
while also becoming read-only to preserve on-disk data untouched.
|
||||
"""
|
||||
self._load_empty = True
|
||||
|
||||
async def async_load(self) -> _T | None:
|
||||
"""Load data.
|
||||
|
||||
@@ -345,12 +328,6 @@ class Store[_T: Mapping[str, Any] | Sequence[Any]]:
|
||||
|
||||
async def _async_load_data(self):
|
||||
"""Load the data."""
|
||||
# When load_empty is set, skip loading storage files and use empty
|
||||
# data while preserving the on-disk files untouched.
|
||||
if self._load_empty:
|
||||
self.make_read_only()
|
||||
return None
|
||||
|
||||
# Check if we have a pending write
|
||||
if self._data is not None:
|
||||
data = self._data
|
||||
@@ -438,10 +415,6 @@ class Store[_T: Mapping[str, Any] | Sequence[Any]]:
|
||||
):
|
||||
stored = data["data"]
|
||||
else:
|
||||
if data["version"] > self._max_readable_version:
|
||||
raise UnsupportedStorageVersionError(
|
||||
self.key, data["version"], self._max_readable_version
|
||||
)
|
||||
_LOGGER.info(
|
||||
"Migrating %s storage from %s.%s to %s.%s",
|
||||
self.key,
|
||||
|
||||
@@ -336,7 +336,7 @@ ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST = ENTITY_STATE_TRIGGER_SCHEMA.extend(
|
||||
class EntityTriggerBase(Trigger):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_domains: set[str]
|
||||
_domain: str
|
||||
_schema: vol.Schema = ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST
|
||||
|
||||
@override
|
||||
@@ -386,11 +386,11 @@ class EntityTriggerBase(Trigger):
|
||||
)
|
||||
|
||||
def entity_filter(self, entities: set[str]) -> set[str]:
|
||||
"""Filter entities of these domains."""
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] in self._domains
|
||||
if split_entity_id(entity_id)[0] == self._domain
|
||||
}
|
||||
|
||||
@override
|
||||
@@ -792,7 +792,7 @@ def make_entity_target_state_trigger(
|
||||
class CustomTrigger(EntityTargetStateTriggerBase):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_domains = {domain}
|
||||
_domain = domain
|
||||
_to_states = to_states_set
|
||||
|
||||
return CustomTrigger
|
||||
@@ -806,7 +806,7 @@ def make_entity_transition_trigger(
|
||||
class CustomTrigger(EntityTransitionTriggerBase):
|
||||
"""Trigger for conditional entity state changes."""
|
||||
|
||||
_domains = {domain}
|
||||
_domain = domain
|
||||
_from_states = from_states
|
||||
_to_states = to_states
|
||||
|
||||
@@ -821,7 +821,7 @@ def make_entity_origin_state_trigger(
|
||||
class CustomTrigger(EntityOriginStateTriggerBase):
|
||||
"""Trigger for entity "from state" changes."""
|
||||
|
||||
_domains = {domain}
|
||||
_domain = domain
|
||||
_from_state = from_state
|
||||
|
||||
return CustomTrigger
|
||||
@@ -835,7 +835,7 @@ def make_entity_numerical_state_attribute_changed_trigger(
|
||||
class CustomTrigger(EntityNumericalStateAttributeChangedTriggerBase):
|
||||
"""Trigger for numerical state attribute changes."""
|
||||
|
||||
_domains = {domain}
|
||||
_domain = domain
|
||||
_attribute = attribute
|
||||
|
||||
return CustomTrigger
|
||||
@@ -849,7 +849,7 @@ def make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
class CustomTrigger(EntityNumericalStateAttributeCrossedThresholdTriggerBase):
|
||||
"""Trigger for numerical state attribute changes."""
|
||||
|
||||
_domains = {domain}
|
||||
_domain = domain
|
||||
_attribute = attribute
|
||||
|
||||
return CustomTrigger
|
||||
@@ -863,7 +863,7 @@ def make_entity_target_state_attribute_trigger(
|
||||
class CustomTrigger(EntityTargetStateAttributeTriggerBase):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_domains = {domain}
|
||||
_domain = domain
|
||||
_attribute = attribute
|
||||
_attribute_to_state = to_state
|
||||
|
||||
|
||||
@@ -40,7 +40,7 @@ habluetooth==5.8.0
|
||||
hass-nabucasa==1.15.0
|
||||
hassil==3.5.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20260302.0
|
||||
home-assistant-frontend==20260226.0
|
||||
home-assistant-intents==2026.2.13
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
|
||||
21
requirements_all.txt
generated
21
requirements_all.txt
generated
@@ -416,7 +416,7 @@ aioswitcher==6.1.0
|
||||
aiosyncthing==0.7.1
|
||||
|
||||
# homeassistant.components.tankerkoenig
|
||||
aiotankerkoenig==0.5.1
|
||||
aiotankerkoenig==0.4.2
|
||||
|
||||
# homeassistant.components.tedee
|
||||
aiotedee==0.2.25
|
||||
@@ -1223,7 +1223,7 @@ hole==0.9.0
|
||||
holidays==0.84
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20260302.0
|
||||
home-assistant-frontend==20260226.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2026.2.13
|
||||
@@ -1374,7 +1374,7 @@ kiwiki-client==0.1.1
|
||||
knocki==0.4.2
|
||||
|
||||
# homeassistant.components.knx
|
||||
knx-frontend==2026.3.2.183756
|
||||
knx-frontend==2026.2.25.165736
|
||||
|
||||
# homeassistant.components.konnected
|
||||
konnected==1.2.0
|
||||
@@ -1681,9 +1681,6 @@ onedrive-personal-sdk==0.1.4
|
||||
# homeassistant.components.onvif
|
||||
onvif-zeep-async==4.0.4
|
||||
|
||||
# homeassistant.components.onvif
|
||||
onvif_parsers==1.2.2
|
||||
|
||||
# homeassistant.components.opengarage
|
||||
open-garage==0.2.0
|
||||
|
||||
@@ -2236,7 +2233,7 @@ pylitejet==0.6.3
|
||||
pylitterbot==2025.1.0
|
||||
|
||||
# homeassistant.components.lutron_caseta
|
||||
pylutron-caseta==0.27.0
|
||||
pylutron-caseta==0.26.0
|
||||
|
||||
# homeassistant.components.lutron
|
||||
pylutron==0.2.18
|
||||
@@ -2430,7 +2427,7 @@ pysabnzbd==1.1.1
|
||||
pysaj==0.0.16
|
||||
|
||||
# homeassistant.components.saunum
|
||||
pysaunum==0.6.0
|
||||
pysaunum==0.5.0
|
||||
|
||||
# homeassistant.components.schlage
|
||||
pyschlage==2025.9.0
|
||||
@@ -2608,7 +2605,7 @@ python-open-router==0.3.3
|
||||
python-opendata-transport==0.5.0
|
||||
|
||||
# homeassistant.components.openevse
|
||||
python-openevse-http==0.2.5
|
||||
python-openevse-http==0.2.1
|
||||
|
||||
# homeassistant.components.opensky
|
||||
python-opensky==1.0.1
|
||||
@@ -2660,7 +2657,7 @@ python-telegram-bot[socks]==22.1
|
||||
python-vlc==3.0.18122
|
||||
|
||||
# homeassistant.components.xbox
|
||||
python-xbox==0.2.0
|
||||
python-xbox==0.1.3
|
||||
|
||||
# homeassistant.components.egardia
|
||||
pythonegardia==1.0.52
|
||||
@@ -2793,7 +2790,7 @@ refoss-ha==1.2.5
|
||||
regenmaschine==2024.03.0
|
||||
|
||||
# homeassistant.components.renault
|
||||
renault-api==0.5.6
|
||||
renault-api==0.5.5
|
||||
|
||||
# homeassistant.components.renson
|
||||
renson-endura-delta==1.7.2
|
||||
@@ -3145,7 +3142,7 @@ typedmonarchmoney==0.7.0
|
||||
uasiren==0.0.1
|
||||
|
||||
# homeassistant.components.uhoo
|
||||
uhooapi==1.2.8
|
||||
uhooapi==1.2.6
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
uiprotect==10.2.2
|
||||
|
||||
21
requirements_test_all.txt
generated
21
requirements_test_all.txt
generated
@@ -401,7 +401,7 @@ aioswitcher==6.1.0
|
||||
aiosyncthing==0.7.1
|
||||
|
||||
# homeassistant.components.tankerkoenig
|
||||
aiotankerkoenig==0.5.1
|
||||
aiotankerkoenig==0.4.2
|
||||
|
||||
# homeassistant.components.tedee
|
||||
aiotedee==0.2.25
|
||||
@@ -1084,7 +1084,7 @@ hole==0.9.0
|
||||
holidays==0.84
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20260302.0
|
||||
home-assistant-frontend==20260226.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2026.2.13
|
||||
@@ -1211,7 +1211,7 @@ kegtron-ble==1.0.2
|
||||
knocki==0.4.2
|
||||
|
||||
# homeassistant.components.knx
|
||||
knx-frontend==2026.3.2.183756
|
||||
knx-frontend==2026.2.25.165736
|
||||
|
||||
# homeassistant.components.konnected
|
||||
konnected==1.2.0
|
||||
@@ -1467,9 +1467,6 @@ onedrive-personal-sdk==0.1.4
|
||||
# homeassistant.components.onvif
|
||||
onvif-zeep-async==4.0.4
|
||||
|
||||
# homeassistant.components.onvif
|
||||
onvif_parsers==1.2.2
|
||||
|
||||
# homeassistant.components.opengarage
|
||||
open-garage==0.2.0
|
||||
|
||||
@@ -1910,7 +1907,7 @@ pylitejet==0.6.3
|
||||
pylitterbot==2025.1.0
|
||||
|
||||
# homeassistant.components.lutron_caseta
|
||||
pylutron-caseta==0.27.0
|
||||
pylutron-caseta==0.26.0
|
||||
|
||||
# homeassistant.components.lutron
|
||||
pylutron==0.2.18
|
||||
@@ -2071,7 +2068,7 @@ pyrympro==0.0.9
|
||||
pysabnzbd==1.1.1
|
||||
|
||||
# homeassistant.components.saunum
|
||||
pysaunum==0.6.0
|
||||
pysaunum==0.5.0
|
||||
|
||||
# homeassistant.components.schlage
|
||||
pyschlage==2025.9.0
|
||||
@@ -2207,7 +2204,7 @@ python-open-router==0.3.3
|
||||
python-opendata-transport==0.5.0
|
||||
|
||||
# homeassistant.components.openevse
|
||||
python-openevse-http==0.2.5
|
||||
python-openevse-http==0.2.1
|
||||
|
||||
# homeassistant.components.opensky
|
||||
python-opensky==1.0.1
|
||||
@@ -2253,7 +2250,7 @@ python-technove==2.0.0
|
||||
python-telegram-bot[socks]==22.1
|
||||
|
||||
# homeassistant.components.xbox
|
||||
python-xbox==0.2.0
|
||||
python-xbox==0.1.3
|
||||
|
||||
# homeassistant.components.uptime_kuma
|
||||
pythonkuma==0.5.0
|
||||
@@ -2365,7 +2362,7 @@ refoss-ha==1.2.5
|
||||
regenmaschine==2024.03.0
|
||||
|
||||
# homeassistant.components.renault
|
||||
renault-api==0.5.6
|
||||
renault-api==0.5.5
|
||||
|
||||
# homeassistant.components.renson
|
||||
renson-endura-delta==1.7.2
|
||||
@@ -2648,7 +2645,7 @@ typedmonarchmoney==0.7.0
|
||||
uasiren==0.0.1
|
||||
|
||||
# homeassistant.components.uhoo
|
||||
uhooapi==1.2.8
|
||||
uhooapi==1.2.6
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
uiprotect==10.2.2
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user