forked from home-assistant/core
Compare commits
89 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| aeebf67575 | |||
| 6a4160bcc4 | |||
| 411d14c2ce | |||
| d7315f4500 | |||
| c4ac648a2b | |||
| e9616f38d8 | |||
| 1550086dd6 | |||
| 8e28b7b49b | |||
| 4a33b1d936 | |||
| 8bfdbc173a | |||
| 3ce4c47cfc | |||
| 0d9ac25257 | |||
| 15e785b974 | |||
| 13527768cc | |||
| 071e675d9d | |||
| 316a61fcde | |||
| 9901f3c3dd | |||
| c9d8c59b45 | |||
| 0184d8e954 | |||
| 2f892678f6 | |||
| fe8cae8eb5 | |||
| 64752af4c2 | |||
| c5f80dd01d | |||
| 2704090418 | |||
| f01c860c44 | |||
| bb4a497247 | |||
| 488c5a6b9f | |||
| acbd501ede | |||
| d06cd1ad3b | |||
| 4129697dd9 | |||
| 4086d092ff | |||
| 988a0639f4 | |||
| c9c553047c | |||
| f05cffea17 | |||
| d2a188ad3c | |||
| 02e30edc6c | |||
| 0e52ea482f | |||
| d46be61b6f | |||
| f05e234c30 | |||
| bc09e825a9 | |||
| 6f6d485530 | |||
| 63eb27df7b | |||
| da29b2f711 | |||
| c2f6f93f1d | |||
| 39143a2e79 | |||
| 99e65c38b0 | |||
| ec7d2f3731 | |||
| d43187327f | |||
| 8be01ac9d6 | |||
| e052ab27f2 | |||
| 43ec63eabc | |||
| 7a2a6cf7d8 | |||
| eff440d2a8 | |||
| 3fea4efb9f | |||
| dc1928f3eb | |||
| f8618e65f6 | |||
| e99aaed7fa | |||
| d000558227 | |||
| 7daf442271 | |||
| b8f458458b | |||
| 85ecb04abf | |||
| 20db7fdc96 | |||
| a1d43b9387 | |||
| de9c05ad53 | |||
| a01521b224 | |||
| 2413bb4f52 | |||
| 1496da8e94 | |||
| 802ad55493 | |||
| 48da88583f | |||
| 0ab66a4ed1 | |||
| 3b13c5bfdd | |||
| 42532e9695 | |||
| 0dd9845501 | |||
| 3a213b2d17 | |||
| d155d93462 | |||
| 5888b83f22 | |||
| 471f77fea4 | |||
| c684b06734 | |||
| 393551d696 | |||
| 24b81df0e6 | |||
| a66cf62b09 | |||
| 901099325b | |||
| 30695cfef5 | |||
| 5d2a8e8208 | |||
| 4019045e7b | |||
| ec2c8da1c5 | |||
| d1e8a2a32d | |||
| feeee2d15e | |||
| 8a052177a4 |
@@ -291,6 +291,7 @@ homeassistant.components.lcn.*
|
||||
homeassistant.components.ld2410_ble.*
|
||||
homeassistant.components.led_ble.*
|
||||
homeassistant.components.lektrico.*
|
||||
homeassistant.components.letpot.*
|
||||
homeassistant.components.lidarr.*
|
||||
homeassistant.components.lifx.*
|
||||
homeassistant.components.light.*
|
||||
|
||||
@@ -831,6 +831,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/led_ble/ @bdraco
|
||||
/homeassistant/components/lektrico/ @lektrico
|
||||
/tests/components/lektrico/ @lektrico
|
||||
/homeassistant/components/letpot/ @jpelgrom
|
||||
/tests/components/letpot/ @jpelgrom
|
||||
/homeassistant/components/lg_netcast/ @Drafteed @splinter98
|
||||
/tests/components/lg_netcast/ @Drafteed @splinter98
|
||||
/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
|
||||
@@ -67,18 +67,21 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"humidity": SensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"pressure": SensorEntityDescription(
|
||||
key="pressure",
|
||||
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfPressure.MBAR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"battery": SensorEntityDescription(
|
||||
key="battery",
|
||||
@@ -86,24 +89,28 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"co2": SensorEntityDescription(
|
||||
key="co2",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"voc": SensorEntityDescription(
|
||||
key="voc",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"illuminance": SensorEntityDescription(
|
||||
key="illuminance",
|
||||
translation_key="illuminance",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aussie_broadband",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aussiebb"],
|
||||
"requirements": ["pyaussiebb==0.1.4"]
|
||||
"requirements": ["pyaussiebb==0.1.5"]
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass, field, replace
|
||||
from datetime import datetime, timedelta
|
||||
from enum import StrEnum
|
||||
import random
|
||||
from typing import TYPE_CHECKING, Self, TypedDict
|
||||
|
||||
from cronsim import CronSim
|
||||
@@ -28,6 +29,10 @@ if TYPE_CHECKING:
|
||||
CRON_PATTERN_DAILY = "45 4 * * *"
|
||||
CRON_PATTERN_WEEKLY = "45 4 * * {}"
|
||||
|
||||
# Randomize the start time of the backup by up to 60 minutes to avoid
|
||||
# all backups running at the same time.
|
||||
BACKUP_START_TIME_JITTER = 60 * 60
|
||||
|
||||
|
||||
class StoredBackupConfig(TypedDict):
|
||||
"""Represent the stored backup config."""
|
||||
@@ -329,6 +334,8 @@ class BackupSchedule:
|
||||
except Exception: # noqa: BLE001
|
||||
LOGGER.exception("Unexpected error creating automatic backup")
|
||||
|
||||
next_time += timedelta(seconds=random.randint(0, BACKUP_START_TIME_JITTER))
|
||||
LOGGER.debug("Scheduling next automatic backup at %s", next_time)
|
||||
manager.remove_next_backup_event = async_track_point_in_time(
|
||||
manager.hass, _create_backup, next_time
|
||||
)
|
||||
|
||||
@@ -14,10 +14,13 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BluesoundCoordinator
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||
PLATFORMS = [
|
||||
Platform.MEDIA_PLAYER,
|
||||
]
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -26,6 +29,7 @@ class BluesoundRuntimeData:
|
||||
|
||||
player: Player
|
||||
sync_status: SyncStatus
|
||||
coordinator: BluesoundCoordinator
|
||||
|
||||
|
||||
type BluesoundConfigEntry = ConfigEntry[BluesoundRuntimeData]
|
||||
@@ -33,9 +37,6 @@ type BluesoundConfigEntry = ConfigEntry[BluesoundRuntimeData]
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Bluesound."""
|
||||
if DOMAIN not in hass.data:
|
||||
hass.data[DOMAIN] = []
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -46,13 +47,16 @@ async def async_setup_entry(
|
||||
host = config_entry.data[CONF_HOST]
|
||||
port = config_entry.data[CONF_PORT]
|
||||
session = async_get_clientsession(hass)
|
||||
async with Player(host, port, session=session, default_timeout=10) as player:
|
||||
try:
|
||||
sync_status = await player.sync_status(timeout=1)
|
||||
except PlayerUnreachableError as ex:
|
||||
raise ConfigEntryNotReady(f"Error connecting to {host}:{port}") from ex
|
||||
player = Player(host, port, session=session, default_timeout=10)
|
||||
try:
|
||||
sync_status = await player.sync_status(timeout=1)
|
||||
except PlayerUnreachableError as ex:
|
||||
raise ConfigEntryNotReady(f"Error connecting to {host}:{port}") from ex
|
||||
|
||||
config_entry.runtime_data = BluesoundRuntimeData(player, sync_status)
|
||||
coordinator = BluesoundCoordinator(hass, player, sync_status)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
config_entry.runtime_data = BluesoundRuntimeData(player, sync_status, coordinator)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
|
||||
|
||||
@@ -0,0 +1,160 @@
|
||||
"""Define a base coordinator for Bluesound entities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable, Coroutine
|
||||
import contextlib
|
||||
from dataclasses import dataclass, replace
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyblu import Input, Player, Preset, Status, SyncStatus
|
||||
from pyblu.errors import PlayerUnreachableError
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
NODE_OFFLINE_CHECK_TIMEOUT = timedelta(minutes=3)
|
||||
PRESET_AND_INPUTS_INTERVAL = timedelta(minutes=15)
|
||||
|
||||
|
||||
@dataclass
|
||||
class BluesoundData:
|
||||
"""Define a class to hold Bluesound data."""
|
||||
|
||||
sync_status: SyncStatus
|
||||
status: Status
|
||||
presets: list[Preset]
|
||||
inputs: list[Input]
|
||||
|
||||
|
||||
def cancel_task(task: asyncio.Task) -> Callable[[], Coroutine[None, None, None]]:
|
||||
"""Cancel a task."""
|
||||
|
||||
async def _cancel_task() -> None:
|
||||
task.cancel()
|
||||
with contextlib.suppress(asyncio.CancelledError):
|
||||
await task
|
||||
|
||||
return _cancel_task
|
||||
|
||||
|
||||
class BluesoundCoordinator(DataUpdateCoordinator[BluesoundData]):
|
||||
"""Define an object to hold Bluesound data."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, player: Player, sync_status: SyncStatus
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.player = player
|
||||
self._inital_sync_status = sync_status
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=_LOGGER,
|
||||
name=sync_status.name,
|
||||
)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
assert self.config_entry is not None
|
||||
|
||||
preset = await self.player.presets()
|
||||
inputs = await self.player.inputs()
|
||||
status = await self.player.status()
|
||||
|
||||
self.async_set_updated_data(
|
||||
BluesoundData(
|
||||
sync_status=self._inital_sync_status,
|
||||
status=status,
|
||||
presets=preset,
|
||||
inputs=inputs,
|
||||
)
|
||||
)
|
||||
|
||||
status_loop_task = self.hass.async_create_background_task(
|
||||
self._poll_status_loop(),
|
||||
name=f"bluesound.poll_status_loop_{self.data.sync_status.id}",
|
||||
)
|
||||
self.config_entry.async_on_unload(cancel_task(status_loop_task))
|
||||
|
||||
sync_status_loop_task = self.hass.async_create_background_task(
|
||||
self._poll_sync_status_loop(),
|
||||
name=f"bluesound.poll_sync_status_loop_{self.data.sync_status.id}",
|
||||
)
|
||||
self.config_entry.async_on_unload(cancel_task(sync_status_loop_task))
|
||||
|
||||
presets_and_inputs_loop_task = self.hass.async_create_background_task(
|
||||
self._poll_presets_and_inputs_loop(),
|
||||
name=f"bluesound.poll_presets_and_inputs_loop_{self.data.sync_status.id}",
|
||||
)
|
||||
self.config_entry.async_on_unload(cancel_task(presets_and_inputs_loop_task))
|
||||
|
||||
async def _async_update_data(self) -> BluesoundData:
|
||||
return self.data
|
||||
|
||||
async def _poll_presets_and_inputs_loop(self) -> None:
|
||||
while True:
|
||||
await asyncio.sleep(PRESET_AND_INPUTS_INTERVAL.total_seconds())
|
||||
try:
|
||||
preset = await self.player.presets()
|
||||
inputs = await self.player.inputs()
|
||||
self.async_set_updated_data(
|
||||
replace(
|
||||
self.data,
|
||||
presets=preset,
|
||||
inputs=inputs,
|
||||
)
|
||||
)
|
||||
except PlayerUnreachableError as ex:
|
||||
self.async_set_update_error(ex)
|
||||
except asyncio.CancelledError:
|
||||
return
|
||||
except Exception as ex: # noqa: BLE001 - this loop should never stop
|
||||
self.async_set_update_error(ex)
|
||||
|
||||
async def _poll_status_loop(self) -> None:
|
||||
"""Loop which polls the status of the player."""
|
||||
while True:
|
||||
try:
|
||||
status = await self.player.status(
|
||||
etag=self.data.status.etag, poll_timeout=120, timeout=125
|
||||
)
|
||||
self.async_set_updated_data(
|
||||
replace(
|
||||
self.data,
|
||||
status=status,
|
||||
)
|
||||
)
|
||||
except PlayerUnreachableError as ex:
|
||||
self.async_set_update_error(ex)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT.total_seconds())
|
||||
except asyncio.CancelledError:
|
||||
return
|
||||
except Exception as ex: # noqa: BLE001 - this loop should never stop
|
||||
self.async_set_update_error(ex)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT.total_seconds())
|
||||
|
||||
async def _poll_sync_status_loop(self) -> None:
|
||||
"""Loop which polls the sync status of the player."""
|
||||
while True:
|
||||
try:
|
||||
sync_status = await self.player.sync_status(
|
||||
etag=self.data.sync_status.etag, poll_timeout=120, timeout=125
|
||||
)
|
||||
self.async_set_updated_data(
|
||||
replace(
|
||||
self.data,
|
||||
sync_status=sync_status,
|
||||
)
|
||||
)
|
||||
except PlayerUnreachableError as ex:
|
||||
self.async_set_update_error(ex)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT.total_seconds())
|
||||
except asyncio.CancelledError:
|
||||
raise
|
||||
except Exception as ex: # noqa: BLE001 - this loop should never stop
|
||||
self.async_set_update_error(ex)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT.total_seconds())
|
||||
@@ -2,15 +2,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from asyncio import CancelledError, Task
|
||||
from contextlib import suppress
|
||||
from asyncio import Task
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from pyblu import Input, Player, Preset, Status, SyncStatus
|
||||
from pyblu.errors import PlayerUnreachableError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
@@ -23,7 +20,7 @@ from homeassistant.components.media_player import (
|
||||
async_process_play_media_url,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.device_registry import (
|
||||
@@ -36,9 +33,11 @@ from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN
|
||||
from .coordinator import BluesoundCoordinator
|
||||
from .utils import dispatcher_join_signal, dispatcher_unjoin_signal, format_unique_id
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -56,11 +55,6 @@ SERVICE_JOIN = "join"
|
||||
SERVICE_SET_TIMER = "set_sleep_timer"
|
||||
SERVICE_UNJOIN = "unjoin"
|
||||
|
||||
NODE_OFFLINE_CHECK_TIMEOUT = 180
|
||||
NODE_RETRY_INITIATION = timedelta(minutes=3)
|
||||
|
||||
SYNC_STATUS_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
POLL_TIMEOUT = 120
|
||||
|
||||
|
||||
@@ -71,10 +65,10 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Bluesound entry."""
|
||||
bluesound_player = BluesoundPlayer(
|
||||
config_entry.runtime_data.coordinator,
|
||||
config_entry.data[CONF_HOST],
|
||||
config_entry.data[CONF_PORT],
|
||||
config_entry.runtime_data.player,
|
||||
config_entry.runtime_data.sync_status,
|
||||
)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
@@ -89,11 +83,10 @@ async def async_setup_entry(
|
||||
)
|
||||
platform.async_register_entity_service(SERVICE_UNJOIN, None, "async_unjoin")
|
||||
|
||||
hass.data[DATA_BLUESOUND].append(bluesound_player)
|
||||
async_add_entities([bluesound_player], update_before_add=True)
|
||||
|
||||
|
||||
class BluesoundPlayer(MediaPlayerEntity):
|
||||
class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity):
|
||||
"""Representation of a Bluesound Player."""
|
||||
|
||||
_attr_media_content_type = MediaType.MUSIC
|
||||
@@ -102,12 +95,15 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BluesoundCoordinator,
|
||||
host: str,
|
||||
port: int,
|
||||
player: Player,
|
||||
sync_status: SyncStatus,
|
||||
) -> None:
|
||||
"""Initialize the media player."""
|
||||
super().__init__(coordinator)
|
||||
sync_status = coordinator.data.sync_status
|
||||
|
||||
self.host = host
|
||||
self.port = port
|
||||
self._poll_status_loop_task: Task[None] | None = None
|
||||
@@ -115,15 +111,14 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
self._id = sync_status.id
|
||||
self._last_status_update: datetime | None = None
|
||||
self._sync_status = sync_status
|
||||
self._status: Status | None = None
|
||||
self._inputs: list[Input] = []
|
||||
self._presets: list[Preset] = []
|
||||
self._status: Status = coordinator.data.status
|
||||
self._inputs: list[Input] = coordinator.data.inputs
|
||||
self._presets: list[Preset] = coordinator.data.presets
|
||||
self._group_name: str | None = None
|
||||
self._group_list: list[str] = []
|
||||
self._bluesound_device_name = sync_status.name
|
||||
self._player = player
|
||||
self._is_leader = False
|
||||
self._leader: BluesoundPlayer | None = None
|
||||
self._last_status_update = dt_util.utcnow()
|
||||
|
||||
self._attr_unique_id = format_unique_id(sync_status.mac, port)
|
||||
# there should always be one player with the default port per mac
|
||||
@@ -146,52 +141,10 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
via_device=(DOMAIN, format_mac(sync_status.mac)),
|
||||
)
|
||||
|
||||
async def _poll_status_loop(self) -> None:
|
||||
"""Loop which polls the status of the player."""
|
||||
while True:
|
||||
try:
|
||||
await self.async_update_status()
|
||||
except PlayerUnreachableError:
|
||||
_LOGGER.error(
|
||||
"Node %s:%s is offline, retrying later", self.host, self.port
|
||||
)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT)
|
||||
except CancelledError:
|
||||
_LOGGER.debug(
|
||||
"Stopping the polling of node %s:%s", self.host, self.port
|
||||
)
|
||||
return
|
||||
except: # noqa: E722 - this loop should never stop
|
||||
_LOGGER.exception(
|
||||
"Unexpected error for %s:%s, retrying later", self.host, self.port
|
||||
)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT)
|
||||
|
||||
async def _poll_sync_status_loop(self) -> None:
|
||||
"""Loop which polls the sync status of the player."""
|
||||
while True:
|
||||
try:
|
||||
await self.update_sync_status()
|
||||
except PlayerUnreachableError:
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT)
|
||||
except CancelledError:
|
||||
raise
|
||||
except: # noqa: E722 - all errors must be caught for this loop
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Start the polling task."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
self._poll_status_loop_task = self.hass.async_create_background_task(
|
||||
self._poll_status_loop(),
|
||||
name=f"bluesound.poll_status_loop_{self.host}:{self.port}",
|
||||
)
|
||||
self._poll_sync_status_loop_task = self.hass.async_create_background_task(
|
||||
self._poll_sync_status_loop(),
|
||||
name=f"bluesound.poll_sync_status_loop_{self.host}:{self.port}",
|
||||
)
|
||||
|
||||
assert self._sync_status.id is not None
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
@@ -212,105 +165,24 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
"""Stop the polling task."""
|
||||
await super().async_will_remove_from_hass()
|
||||
|
||||
assert self._poll_status_loop_task is not None
|
||||
if self._poll_status_loop_task.cancel():
|
||||
# the sleeps in _poll_loop will raise CancelledError
|
||||
with suppress(CancelledError):
|
||||
await self._poll_status_loop_task
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._sync_status = self.coordinator.data.sync_status
|
||||
self._status = self.coordinator.data.status
|
||||
self._inputs = self.coordinator.data.inputs
|
||||
self._presets = self.coordinator.data.presets
|
||||
|
||||
assert self._poll_sync_status_loop_task is not None
|
||||
if self._poll_sync_status_loop_task.cancel():
|
||||
# the sleeps in _poll_sync_status_loop will raise CancelledError
|
||||
with suppress(CancelledError):
|
||||
await self._poll_sync_status_loop_task
|
||||
|
||||
self.hass.data[DATA_BLUESOUND].remove(self)
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update internal status of the entity."""
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
with suppress(PlayerUnreachableError):
|
||||
await self.async_update_presets()
|
||||
await self.async_update_captures()
|
||||
|
||||
async def async_update_status(self) -> None:
|
||||
"""Use the poll session to always get the status of the player."""
|
||||
etag = None
|
||||
if self._status is not None:
|
||||
etag = self._status.etag
|
||||
|
||||
try:
|
||||
status = await self._player.status(
|
||||
etag=etag, poll_timeout=POLL_TIMEOUT, timeout=POLL_TIMEOUT + 5
|
||||
)
|
||||
|
||||
self._attr_available = True
|
||||
self._last_status_update = dt_util.utcnow()
|
||||
self._status = status
|
||||
|
||||
self.async_write_ha_state()
|
||||
except PlayerUnreachableError:
|
||||
self._attr_available = False
|
||||
self._last_status_update = None
|
||||
self._status = None
|
||||
self.async_write_ha_state()
|
||||
_LOGGER.error(
|
||||
"Client connection error, marking %s as offline",
|
||||
self._bluesound_device_name,
|
||||
)
|
||||
raise
|
||||
|
||||
async def update_sync_status(self) -> None:
|
||||
"""Update the internal status."""
|
||||
etag = None
|
||||
if self._sync_status:
|
||||
etag = self._sync_status.etag
|
||||
sync_status = await self._player.sync_status(
|
||||
etag=etag, poll_timeout=POLL_TIMEOUT, timeout=POLL_TIMEOUT + 5
|
||||
)
|
||||
|
||||
self._sync_status = sync_status
|
||||
self._last_status_update = dt_util.utcnow()
|
||||
|
||||
self._group_list = self.rebuild_bluesound_group()
|
||||
|
||||
if sync_status.leader is not None:
|
||||
self._is_leader = False
|
||||
leader_id = f"{sync_status.leader.ip}:{sync_status.leader.port}"
|
||||
leader_device = [
|
||||
device
|
||||
for device in self.hass.data[DATA_BLUESOUND]
|
||||
if device.id == leader_id
|
||||
]
|
||||
|
||||
if leader_device and leader_id != self.id:
|
||||
self._leader = leader_device[0]
|
||||
else:
|
||||
self._leader = None
|
||||
_LOGGER.error("Leader not found %s", leader_id)
|
||||
else:
|
||||
if self._leader is not None:
|
||||
self._leader = None
|
||||
followers = self._sync_status.followers
|
||||
self._is_leader = followers is not None
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update_captures(self) -> None:
|
||||
"""Update Capture sources."""
|
||||
inputs = await self._player.inputs()
|
||||
self._inputs = inputs
|
||||
|
||||
async def async_update_presets(self) -> None:
|
||||
"""Update Presets."""
|
||||
presets = await self._player.presets()
|
||||
self._presets = presets
|
||||
|
||||
@property
|
||||
def state(self) -> MediaPlayerState:
|
||||
"""Return the state of the device."""
|
||||
if self._status is None:
|
||||
if self.available is False:
|
||||
return MediaPlayerState.OFF
|
||||
|
||||
if self.is_grouped and not self.is_leader:
|
||||
@@ -327,7 +199,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_title(self) -> str | None:
|
||||
"""Title of current playing media."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
return self._status.name
|
||||
@@ -335,7 +207,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_artist(self) -> str | None:
|
||||
"""Artist of current playing media (Music track only)."""
|
||||
if self._status is None:
|
||||
if self.available is False:
|
||||
return None
|
||||
|
||||
if self.is_grouped and not self.is_leader:
|
||||
@@ -346,7 +218,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_album_name(self) -> str | None:
|
||||
"""Artist of current playing media (Music track only)."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
return self._status.album
|
||||
@@ -354,7 +226,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_image_url(self) -> str | None:
|
||||
"""Image url of current playing media."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
url = self._status.image
|
||||
@@ -369,7 +241,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_position(self) -> int | None:
|
||||
"""Position of current playing media in seconds."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
mediastate = self.state
|
||||
@@ -388,7 +260,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_duration(self) -> int | None:
|
||||
"""Duration of current playing media in seconds."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
duration = self._status.total_seconds
|
||||
@@ -405,16 +277,11 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def volume_level(self) -> float | None:
|
||||
"""Volume level of the media player (0..1)."""
|
||||
volume = None
|
||||
volume = self._status.volume
|
||||
|
||||
if self._status is not None:
|
||||
volume = self._status.volume
|
||||
if self.is_grouped:
|
||||
volume = self._sync_status.volume
|
||||
|
||||
if volume is None:
|
||||
return None
|
||||
|
||||
return volume / 100
|
||||
|
||||
@property
|
||||
@@ -447,7 +314,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def source_list(self) -> list[str] | None:
|
||||
"""List of available input sources."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
sources = [x.text for x in self._inputs]
|
||||
@@ -458,7 +325,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def source(self) -> str | None:
|
||||
"""Name of the current input source."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
if self._status.input_id is not None:
|
||||
@@ -475,7 +342,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def supported_features(self) -> MediaPlayerEntityFeature:
|
||||
"""Flag of media commands that are supported."""
|
||||
if self._status is None:
|
||||
if self.available is False:
|
||||
return MediaPlayerEntityFeature(0)
|
||||
|
||||
if self.is_grouped and not self.is_leader:
|
||||
@@ -577,16 +444,21 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
if self.sync_status.leader is None and self.sync_status.followers is None:
|
||||
return []
|
||||
|
||||
player_entities: list[BluesoundPlayer] = self.hass.data[DATA_BLUESOUND]
|
||||
config_entries: list[BluesoundConfigEntry] = (
|
||||
self.hass.config_entries.async_entries(DOMAIN)
|
||||
)
|
||||
sync_status_list = [
|
||||
x.runtime_data.coordinator.data.sync_status for x in config_entries
|
||||
]
|
||||
|
||||
leader_sync_status: SyncStatus | None = None
|
||||
if self.sync_status.leader is None:
|
||||
leader_sync_status = self.sync_status
|
||||
else:
|
||||
required_id = f"{self.sync_status.leader.ip}:{self.sync_status.leader.port}"
|
||||
for x in player_entities:
|
||||
if x.sync_status.id == required_id:
|
||||
leader_sync_status = x.sync_status
|
||||
for sync_status in sync_status_list:
|
||||
if sync_status.id == required_id:
|
||||
leader_sync_status = sync_status
|
||||
break
|
||||
|
||||
if leader_sync_status is None or leader_sync_status.followers is None:
|
||||
@@ -594,9 +466,9 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
|
||||
follower_ids = [f"{x.ip}:{x.port}" for x in leader_sync_status.followers]
|
||||
follower_names = [
|
||||
x.sync_status.name
|
||||
for x in player_entities
|
||||
if x.sync_status.id in follower_ids
|
||||
sync_status.name
|
||||
for sync_status in sync_status_list
|
||||
if sync_status.id in follower_ids
|
||||
]
|
||||
follower_names.insert(0, leader_sync_status.name)
|
||||
return follower_names
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"bluetooth-adapters==0.20.2",
|
||||
"bluetooth-auto-recovery==1.4.2",
|
||||
"bluetooth-data-tools==1.20.0",
|
||||
"dbus-fast==2.24.3",
|
||||
"dbus-fast==2.28.0",
|
||||
"habluetooth==3.7.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -2,10 +2,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import base64
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
|
||||
import hashlib
|
||||
import logging
|
||||
import random
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientError, ClientTimeout
|
||||
@@ -27,6 +29,9 @@ from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_STORAGE_BACKUP = "backup"
|
||||
_RETRY_LIMIT = 5
|
||||
_RETRY_SECONDS_MIN = 60
|
||||
_RETRY_SECONDS_MAX = 600
|
||||
|
||||
|
||||
async def _b64md5(stream: AsyncIterator[bytes]) -> str:
|
||||
@@ -114,13 +119,55 @@ class CloudBackupAgent(BackupAgent):
|
||||
raise BackupAgentError("Failed to get download details") from err
|
||||
|
||||
try:
|
||||
resp = await self._cloud.websession.get(details["url"])
|
||||
resp = await self._cloud.websession.get(
|
||||
details["url"],
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
|
||||
resp.raise_for_status()
|
||||
except ClientError as err:
|
||||
raise BackupAgentError("Failed to download backup") from err
|
||||
|
||||
return ChunkAsyncStreamIterator(resp.content)
|
||||
|
||||
async def _async_do_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
filename: str,
|
||||
base64md5hash: str,
|
||||
metadata: dict[str, Any],
|
||||
size: int,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
try:
|
||||
details = await async_files_upload_details(
|
||||
self._cloud,
|
||||
storage_type=_STORAGE_BACKUP,
|
||||
filename=filename,
|
||||
metadata=metadata,
|
||||
size=size,
|
||||
base64md5hash=base64md5hash,
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to get upload details") from err
|
||||
|
||||
try:
|
||||
upload_status = await self._cloud.websession.put(
|
||||
details["url"],
|
||||
data=await open_stream(),
|
||||
headers=details["headers"] | {"content-length": str(size)},
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
_LOGGER.log(
|
||||
logging.DEBUG if upload_status.status < 400 else logging.WARNING,
|
||||
"Backup upload status: %s",
|
||||
upload_status.status,
|
||||
)
|
||||
upload_status.raise_for_status()
|
||||
except (TimeoutError, ClientError) as err:
|
||||
raise BackupAgentError("Failed to upload backup") from err
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
@@ -137,34 +184,34 @@ class CloudBackupAgent(BackupAgent):
|
||||
raise BackupAgentError("Cloud backups must be protected")
|
||||
|
||||
base64md5hash = await _b64md5(await open_stream())
|
||||
filename = self._get_backup_filename()
|
||||
metadata = backup.as_dict()
|
||||
size = backup.size
|
||||
|
||||
try:
|
||||
details = await async_files_upload_details(
|
||||
self._cloud,
|
||||
storage_type=_STORAGE_BACKUP,
|
||||
filename=self._get_backup_filename(),
|
||||
metadata=backup.as_dict(),
|
||||
size=backup.size,
|
||||
base64md5hash=base64md5hash,
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to get upload details") from err
|
||||
|
||||
try:
|
||||
upload_status = await self._cloud.websession.put(
|
||||
details["url"],
|
||||
data=await open_stream(),
|
||||
headers=details["headers"] | {"content-length": str(backup.size)},
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
_LOGGER.log(
|
||||
logging.DEBUG if upload_status.status < 400 else logging.WARNING,
|
||||
"Backup upload status: %s",
|
||||
upload_status.status,
|
||||
)
|
||||
upload_status.raise_for_status()
|
||||
except (TimeoutError, ClientError) as err:
|
||||
raise BackupAgentError("Failed to upload backup") from err
|
||||
tries = 1
|
||||
while tries <= _RETRY_LIMIT:
|
||||
try:
|
||||
await self._async_do_upload_backup(
|
||||
open_stream=open_stream,
|
||||
filename=filename,
|
||||
base64md5hash=base64md5hash,
|
||||
metadata=metadata,
|
||||
size=size,
|
||||
)
|
||||
break
|
||||
except BackupAgentError as err:
|
||||
if tries == _RETRY_LIMIT:
|
||||
raise
|
||||
tries += 1
|
||||
retry_timer = random.randint(_RETRY_SECONDS_MIN, _RETRY_SECONDS_MAX)
|
||||
_LOGGER.info(
|
||||
"Failed to upload backup, retrying (%s/%s) in %ss: %s",
|
||||
tries,
|
||||
_RETRY_LIMIT,
|
||||
retry_timer,
|
||||
err,
|
||||
)
|
||||
await asyncio.sleep(retry_timer)
|
||||
|
||||
async def async_delete_backup(
|
||||
self,
|
||||
|
||||
@@ -2,41 +2,29 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from cookidoo_api import Cookidoo, CookidooConfig, get_localization_options
|
||||
import logging
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_COUNTRY,
|
||||
CONF_EMAIL,
|
||||
CONF_LANGUAGE,
|
||||
CONF_PASSWORD,
|
||||
Platform,
|
||||
)
|
||||
from cookidoo_api import CookidooAuthException, CookidooRequestException
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import CookidooConfigEntry, CookidooDataUpdateCoordinator
|
||||
from .helpers import cookidoo_from_config_entry
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.BUTTON, Platform.TODO]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool:
|
||||
"""Set up Cookidoo from a config entry."""
|
||||
|
||||
localizations = await get_localization_options(
|
||||
country=entry.data[CONF_COUNTRY].lower(),
|
||||
language=entry.data[CONF_LANGUAGE],
|
||||
coordinator = CookidooDataUpdateCoordinator(
|
||||
hass, await cookidoo_from_config_entry(hass, entry), entry
|
||||
)
|
||||
|
||||
cookidoo = Cookidoo(
|
||||
async_get_clientsession(hass),
|
||||
CookidooConfig(
|
||||
email=entry.data[CONF_EMAIL],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
localization=localizations[0],
|
||||
),
|
||||
)
|
||||
|
||||
coordinator = CookidooDataUpdateCoordinator(hass, cookidoo, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
@@ -49,3 +37,56 @@ async def async_setup_entry(hass: HomeAssistant, entry: CookidooConfigEntry) ->
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, config_entry: CookidooConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate config entry."""
|
||||
_LOGGER.debug("Migrating from version %s", config_entry.version)
|
||||
|
||||
if config_entry.version == 1 and config_entry.minor_version == 1:
|
||||
# Add the unique uuid
|
||||
cookidoo = await cookidoo_from_config_entry(hass, config_entry)
|
||||
|
||||
try:
|
||||
auth_data = await cookidoo.login()
|
||||
except (CookidooRequestException, CookidooAuthException) as e:
|
||||
_LOGGER.error(
|
||||
"Could not migrate config config_entry: %s",
|
||||
str(e),
|
||||
)
|
||||
return False
|
||||
|
||||
unique_id = auth_data.sub
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
entity_registry = er.async_get(hass)
|
||||
device_entries = dr.async_entries_for_config_entry(
|
||||
device_registry, config_entry_id=config_entry.entry_id
|
||||
)
|
||||
entity_entries = er.async_entries_for_config_entry(
|
||||
entity_registry, config_entry_id=config_entry.entry_id
|
||||
)
|
||||
for dev in device_entries:
|
||||
device_registry.async_update_device(
|
||||
dev.id, new_identifiers={(DOMAIN, unique_id)}
|
||||
)
|
||||
for ent in entity_entries:
|
||||
assert ent.config_entry_id
|
||||
entity_registry.async_update_entity(
|
||||
ent.entity_id,
|
||||
new_unique_id=ent.unique_id.replace(ent.config_entry_id, unique_id),
|
||||
)
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry, unique_id=auth_data.sub, minor_version=2
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migration to version %s.%s successful",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -56,7 +56,8 @@ class CookidooButton(CookidooBaseEntity, ButtonEntity):
|
||||
"""Initialize cookidoo button."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{description.key}"
|
||||
assert coordinator.config_entry.unique_id
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{description.key}"
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Press the button."""
|
||||
|
||||
@@ -7,9 +7,7 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from cookidoo_api import (
|
||||
Cookidoo,
|
||||
CookidooAuthException,
|
||||
CookidooConfig,
|
||||
CookidooRequestException,
|
||||
get_country_options,
|
||||
get_localization_options,
|
||||
@@ -23,7 +21,6 @@ from homeassistant.config_entries import (
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.const import CONF_COUNTRY, CONF_EMAIL, CONF_LANGUAGE, CONF_PASSWORD
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
CountrySelector,
|
||||
CountrySelectorConfig,
|
||||
@@ -35,6 +32,7 @@ from homeassistant.helpers.selector import (
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
from .helpers import cookidoo_from_config_data
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -57,10 +55,14 @@ AUTH_DATA_SCHEMA = {
|
||||
class CookidooConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Cookidoo."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
COUNTRY_DATA_SCHEMA: dict
|
||||
LANGUAGE_DATA_SCHEMA: dict
|
||||
|
||||
user_input: dict[str, Any]
|
||||
user_uuid: str
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any]
|
||||
@@ -78,8 +80,11 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None and not (
|
||||
errors := await self.validate_input(user_input)
|
||||
):
|
||||
await self.async_set_unique_id(self.user_uuid)
|
||||
if self.source == SOURCE_USER:
|
||||
self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]})
|
||||
self._abort_if_unique_id_configured()
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
self._abort_if_unique_id_mismatch()
|
||||
self.user_input = user_input
|
||||
return await self.async_step_language()
|
||||
await self.generate_country_schema()
|
||||
@@ -153,10 +158,8 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if not (
|
||||
errors := await self.validate_input({**reauth_entry.data, **user_input})
|
||||
):
|
||||
if user_input[CONF_EMAIL] != reauth_entry.data[CONF_EMAIL]:
|
||||
self._async_abort_entries_match(
|
||||
{CONF_EMAIL: user_input[CONF_EMAIL]}
|
||||
)
|
||||
await self.async_set_unique_id(self.user_uuid)
|
||||
self._abort_if_unique_id_mismatch()
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry, data_updates=user_input
|
||||
)
|
||||
@@ -220,21 +223,10 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await get_localization_options(country=data_input[CONF_COUNTRY].lower())
|
||||
)[0].language # Pick any language to test login
|
||||
|
||||
localizations = await get_localization_options(
|
||||
country=data_input[CONF_COUNTRY].lower(),
|
||||
language=data_input[CONF_LANGUAGE],
|
||||
)
|
||||
|
||||
cookidoo = Cookidoo(
|
||||
async_get_clientsession(self.hass),
|
||||
CookidooConfig(
|
||||
email=data_input[CONF_EMAIL],
|
||||
password=data_input[CONF_PASSWORD],
|
||||
localization=localizations[0],
|
||||
),
|
||||
)
|
||||
cookidoo = await cookidoo_from_config_data(self.hass, data_input)
|
||||
try:
|
||||
await cookidoo.login()
|
||||
auth_data = await cookidoo.login()
|
||||
self.user_uuid = auth_data.sub
|
||||
if language_input:
|
||||
await cookidoo.get_additional_items()
|
||||
except CookidooRequestException:
|
||||
|
||||
@@ -21,10 +21,12 @@ class CookidooBaseEntity(CoordinatorEntity[CookidooDataUpdateCoordinator]):
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
assert coordinator.config_entry.unique_id
|
||||
|
||||
self.device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
name="Cookidoo",
|
||||
identifiers={(DOMAIN, coordinator.config_entry.entry_id)},
|
||||
identifiers={(DOMAIN, coordinator.config_entry.unique_id)},
|
||||
manufacturer="Vorwerk International & Co. KmG",
|
||||
model="Cookidoo - Thermomix® recipe portal",
|
||||
)
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
"""Helpers for cookidoo."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from cookidoo_api import Cookidoo, CookidooConfig, get_localization_options
|
||||
|
||||
from homeassistant.const import CONF_COUNTRY, CONF_EMAIL, CONF_LANGUAGE, CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import CookidooConfigEntry
|
||||
|
||||
|
||||
async def cookidoo_from_config_data(
|
||||
hass: HomeAssistant, data: dict[str, Any]
|
||||
) -> Cookidoo:
|
||||
"""Build cookidoo from config data."""
|
||||
localizations = await get_localization_options(
|
||||
country=data[CONF_COUNTRY].lower(),
|
||||
language=data[CONF_LANGUAGE],
|
||||
)
|
||||
|
||||
return Cookidoo(
|
||||
async_get_clientsession(hass),
|
||||
CookidooConfig(
|
||||
email=data[CONF_EMAIL],
|
||||
password=data[CONF_PASSWORD],
|
||||
localization=localizations[0],
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def cookidoo_from_config_entry(
|
||||
hass: HomeAssistant, entry: CookidooConfigEntry
|
||||
) -> Cookidoo:
|
||||
"""Build cookidoo from config entry."""
|
||||
return await cookidoo_from_config_data(hass, dict(entry.data))
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["cookidoo_api"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["cookidoo-api==0.11.2"]
|
||||
"requirements": ["cookidoo-api==0.12.2"]
|
||||
}
|
||||
|
||||
@@ -44,7 +44,8 @@
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"unique_id_mismatch": "The user identifier does not match the previous identifier"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
||||
@@ -52,7 +52,8 @@ class CookidooIngredientsTodoListEntity(CookidooBaseEntity, TodoListEntity):
|
||||
def __init__(self, coordinator: CookidooDataUpdateCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_ingredients"
|
||||
assert coordinator.config_entry.unique_id
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_ingredients"
|
||||
|
||||
@property
|
||||
def todo_items(self) -> list[TodoItem]:
|
||||
@@ -112,7 +113,8 @@ class CookidooAdditionalItemTodoListEntity(CookidooBaseEntity, TodoListEntity):
|
||||
def __init__(self, coordinator: CookidooDataUpdateCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_additional_items"
|
||||
assert coordinator.config_entry.unique_id
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_additional_items"
|
||||
|
||||
@property
|
||||
def todo_items(self) -> list[TodoItem]:
|
||||
|
||||
@@ -19,7 +19,7 @@ rules:
|
||||
The integration does not provide any additional actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: todo
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
@@ -41,7 +41,7 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration does not provide any additional options.
|
||||
docs-installation-parameters: todo
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
|
||||
@@ -20,6 +20,7 @@ from dsmr_parser.objects import DSMRObject, MbusDevice, Telegram
|
||||
import serial
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
DOMAIN as SENSOR_DOMAIN,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
@@ -456,24 +457,29 @@ def rename_old_gas_to_mbus(
|
||||
if entity.unique_id.endswith(
|
||||
"belgium_5min_gas_meter_reading"
|
||||
) or entity.unique_id.endswith("hourly_gas_meter_reading"):
|
||||
try:
|
||||
ent_reg.async_update_entity(
|
||||
entity.entity_id,
|
||||
new_unique_id=mbus_device_id,
|
||||
device_id=mbus_device_id,
|
||||
)
|
||||
except ValueError:
|
||||
if ent_reg.async_get_entity_id(
|
||||
SENSOR_DOMAIN, DOMAIN, mbus_device_id
|
||||
):
|
||||
LOGGER.debug(
|
||||
"Skip migration of %s because it already exists",
|
||||
entity.entity_id,
|
||||
)
|
||||
else:
|
||||
LOGGER.debug(
|
||||
"Migrated entity %s from unique id %s to %s",
|
||||
entity.entity_id,
|
||||
entity.unique_id,
|
||||
mbus_device_id,
|
||||
)
|
||||
continue
|
||||
new_device = dev_reg.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, mbus_device_id)},
|
||||
)
|
||||
ent_reg.async_update_entity(
|
||||
entity.entity_id,
|
||||
new_unique_id=mbus_device_id,
|
||||
device_id=new_device.id,
|
||||
)
|
||||
LOGGER.debug(
|
||||
"Migrated entity %s from unique id %s to %s",
|
||||
entity.entity_id,
|
||||
entity.unique_id,
|
||||
mbus_device_id,
|
||||
)
|
||||
# Cleanup old device
|
||||
dev_entities = er.async_entries_for_device(
|
||||
ent_reg, device_id, include_disabled_entities=True
|
||||
|
||||
@@ -8,11 +8,7 @@ rules:
|
||||
comment: fixed 1 minute cycle based on Enphase Envoy device characteristics
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
- test_zero_conf_malformed_serial_property - with pytest.raises(KeyError) as ex::
|
||||
I don't believe this should be able to raise a KeyError Shouldn't we abort the flow?
|
||||
config-flow-test-coverage: done
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: |
|
||||
@@ -60,11 +56,7 @@ rules:
|
||||
status: done
|
||||
comment: pending https://github.com/home-assistant/core/pull/132373
|
||||
reauthentication-flow: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
- test_config_different_unique_id -> unique_id set to the mock config entry is an int, not a str
|
||||
- Apart from the coverage, test_option_change_reload does not verify that the config entry is reloaded
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyflick"],
|
||||
"requirements": ["PyFlick==1.1.2"]
|
||||
"requirements": ["PyFlick==1.1.3"]
|
||||
}
|
||||
|
||||
@@ -51,19 +51,19 @@ class FlickPricingSensor(CoordinatorEntity[FlickElectricDataCoordinator], Sensor
|
||||
_LOGGER.warning(
|
||||
"Unexpected quantity for unit price: %s", self.coordinator.data
|
||||
)
|
||||
return self.coordinator.data.cost
|
||||
return self.coordinator.data.cost * 100
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the state attributes."""
|
||||
components: dict[str, Decimal] = {}
|
||||
components: dict[str, float] = {}
|
||||
|
||||
for component in self.coordinator.data.components:
|
||||
if component.charge_setter not in ATTR_COMPONENTS:
|
||||
_LOGGER.warning("Found unknown component: %s", component.charge_setter)
|
||||
continue
|
||||
|
||||
components[component.charge_setter] = component.value
|
||||
components[component.charge_setter] = float(component.value * 100)
|
||||
|
||||
return {
|
||||
ATTR_START_AT: self.coordinator.data.start_at,
|
||||
|
||||
@@ -15,7 +15,7 @@ from homeassistant.helpers import aiohttp_client
|
||||
from .const import API_TIMEOUT, CONF_EUROPE, CONF_REGION, REGION_DEFAULT, REGION_EU
|
||||
from .coordinator import FGLairCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE]
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.SENSOR]
|
||||
|
||||
type FGLairConfigEntry = ConfigEntry[FGLairCoordinator]
|
||||
|
||||
|
||||
@@ -25,13 +25,11 @@ from homeassistant.components.climate import (
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_HALVES, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import FGLairConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import FGLairCoordinator
|
||||
from .entity import FGLairEntity
|
||||
|
||||
HA_TO_FUJI_FAN = {
|
||||
FAN_LOW: FanSpeed.LOW,
|
||||
@@ -72,28 +70,19 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
class FGLairDevice(CoordinatorEntity[FGLairCoordinator], ClimateEntity):
|
||||
class FGLairDevice(FGLairEntity, ClimateEntity):
|
||||
"""Represent a Fujitsu HVAC device."""
|
||||
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_precision = PRECISION_HALVES
|
||||
_attr_target_temperature_step = 0.5
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, coordinator: FGLairCoordinator, device: FujitsuHVAC) -> None:
|
||||
"""Store the representation of the device and set the static attributes."""
|
||||
super().__init__(coordinator, context=device.device_serial_number)
|
||||
super().__init__(coordinator, device)
|
||||
|
||||
self._attr_unique_id = device.device_serial_number
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.device_serial_number)},
|
||||
name=device.device_name,
|
||||
manufacturer="Fujitsu",
|
||||
model=device.property_values["model_name"],
|
||||
serial_number=device.device_serial_number,
|
||||
sw_version=device.property_values["mcu_firmware_version"],
|
||||
)
|
||||
|
||||
self._attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
@@ -109,11 +98,6 @@ class FGLairDevice(CoordinatorEntity[FGLairCoordinator], ClimateEntity):
|
||||
self._attr_supported_features |= ClimateEntityFeature.SWING_MODE
|
||||
self._set_attr()
|
||||
|
||||
@property
|
||||
def device(self) -> FujitsuHVAC:
|
||||
"""Return the device object from the coordinator data."""
|
||||
return self.coordinator.data[self.coordinator_context]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the device is available."""
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
"""Fujitsu FGlair base entity."""
|
||||
|
||||
from ayla_iot_unofficial.fujitsu_hvac import FujitsuHVAC
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import FGLairCoordinator
|
||||
|
||||
|
||||
class FGLairEntity(CoordinatorEntity[FGLairCoordinator]):
|
||||
"""Generic Fglair entity (base class)."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, coordinator: FGLairCoordinator, device: FujitsuHVAC) -> None:
|
||||
"""Store the representation of the device."""
|
||||
super().__init__(coordinator, context=device.device_serial_number)
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.device_serial_number)},
|
||||
name=device.device_name,
|
||||
manufacturer="Fujitsu",
|
||||
model=device.property_values["model_name"],
|
||||
serial_number=device.device_serial_number,
|
||||
sw_version=device.property_values["mcu_firmware_version"],
|
||||
)
|
||||
|
||||
@property
|
||||
def device(self) -> FujitsuHVAC:
|
||||
"""Return the device object from the coordinator data."""
|
||||
return self.coordinator.data[self.coordinator_context]
|
||||
@@ -0,0 +1,47 @@
|
||||
"""Outside temperature sensor for Fujitsu FGlair HVAC systems."""
|
||||
|
||||
from ayla_iot_unofficial.fujitsu_hvac import FujitsuHVAC
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .climate import FGLairConfigEntry
|
||||
from .coordinator import FGLairCoordinator
|
||||
from .entity import FGLairEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: FGLairConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up one Fujitsu HVAC device."""
|
||||
async_add_entities(
|
||||
FGLairOutsideTemperature(entry.runtime_data, device)
|
||||
for device in entry.runtime_data.data.values()
|
||||
)
|
||||
|
||||
|
||||
class FGLairOutsideTemperature(FGLairEntity, SensorEntity):
|
||||
"""Entity representing outside temperature sensed by the outside unit of a Fujitsu Heatpump."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.TEMPERATURE
|
||||
_attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
_attr_translation_key = "fglair_outside_temp"
|
||||
|
||||
def __init__(self, coordinator: FGLairCoordinator, device: FujitsuHVAC) -> None:
|
||||
"""Store the representation of the device."""
|
||||
super().__init__(coordinator, device)
|
||||
self._attr_unique_id = f"{device.device_serial_number}_outside_temperature"
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the sensed outdoor temperature un celsius."""
|
||||
return self.device.outdoor_temperature # type: ignore[no-any-return]
|
||||
@@ -35,5 +35,12 @@
|
||||
"cn": "China"
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"fglair_outside_temp": {
|
||||
"name": "Outside temperature"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ from .coordinator import FytaCoordinator
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.SENSOR,
|
||||
]
|
||||
type FytaConfigEntry = ConfigEntry[FytaCoordinator]
|
||||
|
||||
@@ -0,0 +1,117 @@
|
||||
"""Binary sensors for Fyta."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Final
|
||||
|
||||
from fyta_cli.fyta_models import Plant
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import FytaConfigEntry
|
||||
from .entity import FytaPlantEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class FytaBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Describes Fyta binary sensor entity."""
|
||||
|
||||
value_fn: Callable[[Plant], bool]
|
||||
|
||||
|
||||
BINARY_SENSORS: Final[list[FytaBinarySensorEntityDescription]] = [
|
||||
FytaBinarySensorEntityDescription(
|
||||
key="low_battery",
|
||||
device_class=BinarySensorDeviceClass.BATTERY,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda plant: plant.low_battery,
|
||||
),
|
||||
FytaBinarySensorEntityDescription(
|
||||
key="notification_light",
|
||||
translation_key="notification_light",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda plant: plant.notification_light,
|
||||
),
|
||||
FytaBinarySensorEntityDescription(
|
||||
key="notification_nutrition",
|
||||
translation_key="notification_nutrition",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda plant: plant.notification_nutrition,
|
||||
),
|
||||
FytaBinarySensorEntityDescription(
|
||||
key="notification_temperature",
|
||||
translation_key="notification_temperature",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda plant: plant.notification_temperature,
|
||||
),
|
||||
FytaBinarySensorEntityDescription(
|
||||
key="notification_water",
|
||||
translation_key="notification_water",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda plant: plant.notification_water,
|
||||
),
|
||||
FytaBinarySensorEntityDescription(
|
||||
key="sensor_update_available",
|
||||
device_class=BinarySensorDeviceClass.UPDATE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda plant: plant.sensor_update_available,
|
||||
),
|
||||
FytaBinarySensorEntityDescription(
|
||||
key="productive_plant",
|
||||
translation_key="productive_plant",
|
||||
value_fn=lambda plant: plant.productive_plant,
|
||||
),
|
||||
FytaBinarySensorEntityDescription(
|
||||
key="repotted",
|
||||
translation_key="repotted",
|
||||
value_fn=lambda plant: plant.repotted,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: FytaConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Set up the FYTA binary sensors."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
FytaPlantBinarySensor(coordinator, entry, sensor, plant_id)
|
||||
for plant_id in coordinator.fyta.plant_list
|
||||
for sensor in BINARY_SENSORS
|
||||
if sensor.key in dir(coordinator.data.get(plant_id))
|
||||
)
|
||||
|
||||
def _async_add_new_device(plant_id: int) -> None:
|
||||
async_add_entities(
|
||||
FytaPlantBinarySensor(coordinator, entry, sensor, plant_id)
|
||||
for sensor in BINARY_SENSORS
|
||||
if sensor.key in dir(coordinator.data.get(plant_id))
|
||||
)
|
||||
|
||||
coordinator.new_device_callbacks.append(_async_add_new_device)
|
||||
|
||||
|
||||
class FytaPlantBinarySensor(FytaPlantEntity, BinarySensorEntity):
|
||||
"""Represents a Fyta binary sensor."""
|
||||
|
||||
entity_description: FytaBinarySensorEntityDescription
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return value of the binary sensor."""
|
||||
|
||||
return self.entity_description.value_fn(self.plant)
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
from fyta_cli.fyta_models import Plant
|
||||
|
||||
from homeassistant.components.sensor import SensorEntityDescription
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import FytaConfigEntry
|
||||
@@ -20,7 +20,7 @@ class FytaPlantEntity(CoordinatorEntity[FytaCoordinator]):
|
||||
self,
|
||||
coordinator: FytaCoordinator,
|
||||
entry: FytaConfigEntry,
|
||||
description: SensorEntityDescription,
|
||||
description: EntityDescription,
|
||||
plant_id: int,
|
||||
) -> None:
|
||||
"""Initialize the Fyta sensor."""
|
||||
|
||||
@@ -1,5 +1,25 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"notification_light": {
|
||||
"default": "mdi:lightbulb-alert-outline"
|
||||
},
|
||||
"notification_nutrition": {
|
||||
"default": "mdi:beaker-alert-outline"
|
||||
},
|
||||
"notification_temperature": {
|
||||
"default": "mdi:thermometer-alert"
|
||||
},
|
||||
"notification_water": {
|
||||
"default": "mdi:watering-can-outline"
|
||||
},
|
||||
"productive_plant": {
|
||||
"default": "mdi:fruit-grapes"
|
||||
},
|
||||
"repotted": {
|
||||
"default": "mdi:shovel"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"status": {
|
||||
"default": "mdi:flower"
|
||||
@@ -13,6 +33,9 @@
|
||||
"moisture_status": {
|
||||
"default": "mdi:water-percent-alert"
|
||||
},
|
||||
"nutrients_status": {
|
||||
"default": "mdi:emoticon-poop"
|
||||
},
|
||||
"salinity_status": {
|
||||
"default": "mdi:sprout-outline"
|
||||
},
|
||||
@@ -21,6 +44,12 @@
|
||||
},
|
||||
"salinity": {
|
||||
"default": "mdi:sprout-outline"
|
||||
},
|
||||
"last_fertilised": {
|
||||
"default": "mdi:calendar-check"
|
||||
},
|
||||
"next_fertilisation": {
|
||||
"default": "mdi:calendar-end"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -82,6 +82,13 @@ SENSORS: Final[list[FytaSensorEntityDescription]] = [
|
||||
options=PLANT_MEASUREMENT_STATUS_LIST,
|
||||
value_fn=lambda plant: plant.moisture_status.name.lower(),
|
||||
),
|
||||
FytaSensorEntityDescription(
|
||||
key="nutrients_status",
|
||||
translation_key="nutrients_status",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=PLANT_MEASUREMENT_STATUS_LIST,
|
||||
value_fn=lambda plant: plant.nutrients_status.name.lower(),
|
||||
),
|
||||
FytaSensorEntityDescription(
|
||||
key="salinity_status",
|
||||
translation_key="salinity_status",
|
||||
@@ -124,6 +131,18 @@ SENSORS: Final[list[FytaSensorEntityDescription]] = [
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda plant: plant.ph,
|
||||
),
|
||||
FytaSensorEntityDescription(
|
||||
key="fertilise_last",
|
||||
translation_key="last_fertilised",
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
value_fn=lambda plant: plant.fertilise_last,
|
||||
),
|
||||
FytaSensorEntityDescription(
|
||||
key="fertilise_next",
|
||||
translation_key="next_fertilisation",
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
value_fn=lambda plant: plant.fertilise_next,
|
||||
),
|
||||
FytaSensorEntityDescription(
|
||||
key="battery_level",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
|
||||
@@ -38,6 +38,29 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"notification_light": {
|
||||
"name": "Light notification"
|
||||
},
|
||||
"notification_nutrition": {
|
||||
"name": "Nutrition notification"
|
||||
},
|
||||
"notification_temperature": {
|
||||
"name": "Temperature notification"
|
||||
},
|
||||
"notification_water": {
|
||||
"name": "Water notification"
|
||||
},
|
||||
"productive_plant": {
|
||||
"name": "Productive plant"
|
||||
},
|
||||
"repotted": {
|
||||
"name": "Repotted"
|
||||
},
|
||||
"sensor_update_available": {
|
||||
"name": "Sensor update available"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"scientific_name": {
|
||||
"name": "Scientific name"
|
||||
@@ -84,6 +107,17 @@
|
||||
"too_high": "[%key:component::fyta::entity::sensor::temperature_status::state::too_high%]"
|
||||
}
|
||||
},
|
||||
"nutrients_status": {
|
||||
"name": "Nutrients state",
|
||||
"state": {
|
||||
"no_data": "[%key:component::fyta::entity::sensor::temperature_status::state::no_data%]",
|
||||
"too_low": "[%key:component::fyta::entity::sensor::temperature_status::state::too_low%]",
|
||||
"low": "[%key:component::fyta::entity::sensor::temperature_status::state::low%]",
|
||||
"perfect": "[%key:component::fyta::entity::sensor::temperature_status::state::perfect%]",
|
||||
"high": "[%key:component::fyta::entity::sensor::temperature_status::state::high%]",
|
||||
"too_high": "[%key:component::fyta::entity::sensor::temperature_status::state::too_high%]"
|
||||
}
|
||||
},
|
||||
"salinity_status": {
|
||||
"name": "Salinity state",
|
||||
"state": {
|
||||
@@ -100,6 +134,12 @@
|
||||
},
|
||||
"salinity": {
|
||||
"name": "Salinity"
|
||||
},
|
||||
"last_fertilised": {
|
||||
"name": "Last fertilized"
|
||||
},
|
||||
"next_fertilisation": {
|
||||
"name": "Next fertilization"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -88,6 +88,7 @@ SUPPORT_LANGUAGES = [
|
||||
"uk",
|
||||
"ur",
|
||||
"vi",
|
||||
"yue",
|
||||
# dialects
|
||||
"zh-CN",
|
||||
"zh-cn",
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google_translate",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["gtts"],
|
||||
"requirements": ["gTTS==2.2.4"]
|
||||
"requirements": ["gTTS==2.5.3"]
|
||||
}
|
||||
|
||||
@@ -275,11 +275,11 @@ class GroupManager:
|
||||
|
||||
player_id_to_entity_id_map = self.entity_id_map
|
||||
for group in groups.values():
|
||||
leader_entity_id = player_id_to_entity_id_map.get(group.leader.player_id)
|
||||
leader_entity_id = player_id_to_entity_id_map.get(group.lead_player_id)
|
||||
member_entity_ids = [
|
||||
player_id_to_entity_id_map[member.player_id]
|
||||
for member in group.members
|
||||
if member.player_id in player_id_to_entity_id_map
|
||||
player_id_to_entity_id_map[member]
|
||||
for member in group.member_player_ids
|
||||
if member in player_id_to_entity_id_map
|
||||
]
|
||||
# Make sure the group leader is always the first element
|
||||
group_info = [leader_entity_id, *member_entity_ids]
|
||||
@@ -422,7 +422,7 @@ class SourceManager:
|
||||
None,
|
||||
)
|
||||
if index is not None:
|
||||
await player.play_favorite(index)
|
||||
await player.play_preset_station(index)
|
||||
return
|
||||
|
||||
input_source = next(
|
||||
@@ -434,7 +434,7 @@ class SourceManager:
|
||||
None,
|
||||
)
|
||||
if input_source is not None:
|
||||
await player.play_input_source(input_source)
|
||||
await player.play_input_source(input_source.media_id)
|
||||
return
|
||||
|
||||
_LOGGER.error("Unknown source: %s", source)
|
||||
@@ -447,7 +447,7 @@ class SourceManager:
|
||||
(
|
||||
input_source.name
|
||||
for input_source in self.inputs
|
||||
if input_source.input_name == now_playing_media.media_id
|
||||
if input_source.media_id == now_playing_media.media_id
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/heos",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyheos"],
|
||||
"requirements": ["pyheos==0.8.0"],
|
||||
"requirements": ["pyheos==0.9.0"],
|
||||
"single_config_entry": true,
|
||||
"ssdp": [
|
||||
{
|
||||
|
||||
@@ -47,9 +47,9 @@ BASE_SUPPORTED_FEATURES = (
|
||||
)
|
||||
|
||||
PLAY_STATE_TO_STATE = {
|
||||
heos_const.PLAY_STATE_PLAY: MediaPlayerState.PLAYING,
|
||||
heos_const.PLAY_STATE_STOP: MediaPlayerState.IDLE,
|
||||
heos_const.PLAY_STATE_PAUSE: MediaPlayerState.PAUSED,
|
||||
heos_const.PlayState.PLAY: MediaPlayerState.PLAYING,
|
||||
heos_const.PlayState.STOP: MediaPlayerState.IDLE,
|
||||
heos_const.PlayState.PAUSE: MediaPlayerState.PAUSED,
|
||||
}
|
||||
|
||||
CONTROL_TO_SUPPORT = {
|
||||
@@ -61,11 +61,11 @@ CONTROL_TO_SUPPORT = {
|
||||
}
|
||||
|
||||
HA_HEOS_ENQUEUE_MAP = {
|
||||
None: heos_const.ADD_QUEUE_REPLACE_AND_PLAY,
|
||||
MediaPlayerEnqueue.ADD: heos_const.ADD_QUEUE_ADD_TO_END,
|
||||
MediaPlayerEnqueue.REPLACE: heos_const.ADD_QUEUE_REPLACE_AND_PLAY,
|
||||
MediaPlayerEnqueue.NEXT: heos_const.ADD_QUEUE_PLAY_NEXT,
|
||||
MediaPlayerEnqueue.PLAY: heos_const.ADD_QUEUE_PLAY_NOW,
|
||||
None: heos_const.AddCriteriaType.REPLACE_AND_PLAY,
|
||||
MediaPlayerEnqueue.ADD: heos_const.AddCriteriaType.ADD_TO_END,
|
||||
MediaPlayerEnqueue.REPLACE: heos_const.AddCriteriaType.REPLACE_AND_PLAY,
|
||||
MediaPlayerEnqueue.NEXT: heos_const.AddCriteriaType.PLAY_NEXT,
|
||||
MediaPlayerEnqueue.PLAY: heos_const.AddCriteriaType.PLAY_NOW,
|
||||
}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -268,7 +268,7 @@ class HeosMediaPlayer(MediaPlayerEntity):
|
||||
)
|
||||
if index is None:
|
||||
raise ValueError(f"Invalid favorite '{media_id}'")
|
||||
await self._player.play_favorite(index)
|
||||
await self._player.play_preset_station(index)
|
||||
return
|
||||
|
||||
raise ValueError(f"Unsupported media type '{media_type}'")
|
||||
|
||||
@@ -14,7 +14,7 @@ rules:
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: todo
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
@@ -60,14 +60,12 @@ rules:
|
||||
status: todo
|
||||
comment: Explore if this is possible.
|
||||
discovery: done
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting:
|
||||
status: todo
|
||||
comment: Has some troublehsooting setps, but needs to be improved
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
|
||||
@@ -19,6 +19,7 @@ from homeassistant.core import callback
|
||||
from homeassistant.helpers.selector import (
|
||||
CountrySelector,
|
||||
CountrySelectorConfig,
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
@@ -30,6 +31,30 @@ from .const import CONF_CATEGORIES, CONF_PROVINCE, DOMAIN
|
||||
SUPPORTED_COUNTRIES = list_supported_countries(include_aliases=False)
|
||||
|
||||
|
||||
def get_optional_provinces(country: str) -> list[Any]:
|
||||
"""Return the country provinces (territories).
|
||||
|
||||
Some territories can have extra or different holidays
|
||||
from another within the same country.
|
||||
Some territories can have different names (aliases).
|
||||
"""
|
||||
province_options: list[Any] = []
|
||||
|
||||
if provinces := SUPPORTED_COUNTRIES[country]:
|
||||
country_data = country_holidays(country, years=dt_util.utcnow().year)
|
||||
if country_data.subdivisions_aliases and (
|
||||
subdiv_aliases := country_data.get_subdivision_aliases()
|
||||
):
|
||||
province_options = [
|
||||
SelectOptionDict(value=k, label=", ".join(v))
|
||||
for k, v in subdiv_aliases.items()
|
||||
]
|
||||
else:
|
||||
province_options = provinces
|
||||
|
||||
return province_options
|
||||
|
||||
|
||||
def get_optional_categories(country: str) -> list[str]:
|
||||
"""Return the country categories.
|
||||
|
||||
@@ -45,7 +70,7 @@ def get_optional_categories(country: str) -> list[str]:
|
||||
def get_options_schema(country: str) -> vol.Schema:
|
||||
"""Return the options schema."""
|
||||
schema = {}
|
||||
if provinces := SUPPORTED_COUNTRIES[country]:
|
||||
if provinces := get_optional_provinces(country):
|
||||
schema[vol.Optional(CONF_PROVINCE)] = SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=provinces,
|
||||
|
||||
@@ -361,7 +361,7 @@
|
||||
},
|
||||
"suspend_integration": {
|
||||
"name": "Suspend integration",
|
||||
"description": "Suspends integration. Suspending logs the integration out from the router, and stops accessing it. Useful e.g. if accessing the router web interface from another source such as a web browser is temporarily required. Invoke the resume_integration action to resume.\n.",
|
||||
"description": "Suspends integration. Suspending logs the integration out from the router, and stops accessing it. Useful e.g. if accessing the router web interface from another source such as a web browser is temporarily required. Invoke the 'Resume integration' action to resume.",
|
||||
"fields": {
|
||||
"url": {
|
||||
"name": "[%key:common::config_flow::data::url%]",
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioautomower"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioautomower==2024.12.0"]
|
||||
"requirements": ["aioautomower==2025.1.0"]
|
||||
}
|
||||
|
||||
@@ -4,8 +4,9 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import asdict, dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyipma.api import IPMA_API
|
||||
from pyipma.location import Location
|
||||
@@ -28,23 +29,41 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class IPMASensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes a IPMA sensor entity."""
|
||||
|
||||
value_fn: Callable[[Location, IPMA_API], Coroutine[Location, IPMA_API, int | None]]
|
||||
value_fn: Callable[
|
||||
[Location, IPMA_API], Coroutine[Location, IPMA_API, tuple[Any, dict[str, Any]]]
|
||||
]
|
||||
|
||||
|
||||
async def async_retrieve_rcm(location: Location, api: IPMA_API) -> int | None:
|
||||
async def async_retrieve_rcm(
|
||||
location: Location, api: IPMA_API
|
||||
) -> tuple[int, dict[str, Any]] | tuple[None, dict[str, Any]]:
|
||||
"""Retrieve RCM."""
|
||||
fire_risk: RCM = await location.fire_risk(api)
|
||||
if fire_risk:
|
||||
return fire_risk.rcm
|
||||
return None
|
||||
return fire_risk.rcm, {}
|
||||
return None, {}
|
||||
|
||||
|
||||
async def async_retrieve_uvi(location: Location, api: IPMA_API) -> int | None:
|
||||
async def async_retrieve_uvi(
|
||||
location: Location, api: IPMA_API
|
||||
) -> tuple[int, dict[str, Any]] | tuple[None, dict[str, Any]]:
|
||||
"""Retrieve UV."""
|
||||
uv_risk: UV = await location.uv_risk(api)
|
||||
if uv_risk:
|
||||
return round(uv_risk.iUv)
|
||||
return None
|
||||
return round(uv_risk.iUv), {}
|
||||
return None, {}
|
||||
|
||||
|
||||
async def async_retrieve_warning(
|
||||
location: Location, api: IPMA_API
|
||||
) -> tuple[Any, dict[str, str]]:
|
||||
"""Retrieve Warning."""
|
||||
warnings = await location.warnings(api)
|
||||
if len(warnings):
|
||||
return warnings[0].awarenessLevelID, {
|
||||
k: str(v) for k, v in asdict(warnings[0]).items()
|
||||
}
|
||||
return "green", {}
|
||||
|
||||
|
||||
SENSOR_TYPES: tuple[IPMASensorEntityDescription, ...] = (
|
||||
@@ -58,6 +77,11 @@ SENSOR_TYPES: tuple[IPMASensorEntityDescription, ...] = (
|
||||
translation_key="uv_index",
|
||||
value_fn=async_retrieve_uvi,
|
||||
),
|
||||
IPMASensorEntityDescription(
|
||||
key="alert",
|
||||
translation_key="weather_alert",
|
||||
value_fn=async_retrieve_warning,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -94,6 +118,8 @@ class IPMASensor(SensorEntity, IPMADevice):
|
||||
async def async_update(self) -> None:
|
||||
"""Update sensors."""
|
||||
async with asyncio.timeout(10):
|
||||
self._attr_native_value = await self.entity_description.value_fn(
|
||||
state, attrs = await self.entity_description.value_fn(
|
||||
self._location, self._api
|
||||
)
|
||||
self._attr_native_value = state
|
||||
self._attr_extra_state_attributes = attrs
|
||||
|
||||
@@ -31,6 +31,15 @@
|
||||
},
|
||||
"uv_index": {
|
||||
"name": "UV index"
|
||||
},
|
||||
"weather_alert": {
|
||||
"name": "Weather Alert",
|
||||
"state": {
|
||||
"red": "Red",
|
||||
"yellow": "Yellow",
|
||||
"orange": "Orange",
|
||||
"green": "Green"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -91,7 +91,7 @@ from .schema import (
|
||||
WeatherSchema,
|
||||
)
|
||||
from .services import register_knx_services
|
||||
from .storage.config_store import KNXConfigStore
|
||||
from .storage.config_store import STORAGE_KEY as CONFIG_STORAGE_KEY, KNXConfigStore
|
||||
from .telegrams import STORAGE_KEY as TELEGRAMS_STORAGE_KEY, Telegrams
|
||||
from .websocket import register_panel
|
||||
|
||||
@@ -226,6 +226,8 @@ async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
if knxkeys_filename is not None:
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
(storage_dir / knxkeys_filename).unlink()
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
(storage_dir / CONFIG_STORAGE_KEY).unlink()
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
(storage_dir / PROJECT_STORAGE_KEY).unlink()
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
"step": {
|
||||
"connection_type": {
|
||||
"title": "KNX connection",
|
||||
"description": "'Automatic' performs a gateway scan on start, to find a KNX IP interface. It will connect via a tunnel. (Not available if a gateway scan was not successful.) \n\n 'Tunneling' will connect to a specific KNX IP interface over a tunnel. \n\n 'Routing' will use Multicast to communicate with KNX IP routers.",
|
||||
"description": "'Automatic' performs a gateway scan on start, to find a KNX IP interface. It will connect via a tunnel. (Not available if a gateway scan was not successful.)\n\n'Tunneling' will connect to a specific KNX IP interface over a tunnel.\n\n'Routing' will use Multicast to communicate with KNX IP routers.",
|
||||
"data": {
|
||||
"connection_type": "KNX Connection Type"
|
||||
"connection_type": "KNX connection type"
|
||||
},
|
||||
"data_description": {
|
||||
"connection_type": "Please select the connection type you want to use for your KNX connection."
|
||||
@@ -33,7 +33,7 @@
|
||||
"title": "Tunnel settings",
|
||||
"description": "Please enter the connection information of your tunneling device.",
|
||||
"data": {
|
||||
"tunneling_type": "KNX Tunneling Type",
|
||||
"tunneling_type": "KNX tunneling type",
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"route_back": "Route back / NAT mode",
|
||||
@@ -48,11 +48,11 @@
|
||||
}
|
||||
},
|
||||
"secure_key_source_menu_tunnel": {
|
||||
"title": "KNX IP-Secure",
|
||||
"title": "KNX IP Secure",
|
||||
"description": "How do you want to configure KNX/IP Secure?",
|
||||
"menu_options": {
|
||||
"secure_knxkeys": "Use a `.knxkeys` file providing IP secure keys",
|
||||
"secure_tunnel_manual": "Configure IP secure credentials manually"
|
||||
"secure_knxkeys": "Use a `.knxkeys` file providing IP Secure keys",
|
||||
"secure_tunnel_manual": "Configure IP Secure credentials manually"
|
||||
}
|
||||
},
|
||||
"secure_key_source_menu_routing": {
|
||||
@@ -60,7 +60,7 @@
|
||||
"description": "[%key:component::knx::config::step::secure_key_source_menu_tunnel::description%]",
|
||||
"menu_options": {
|
||||
"secure_knxkeys": "[%key:component::knx::config::step::secure_key_source_menu_tunnel::menu_options::secure_knxkeys%]",
|
||||
"secure_routing_manual": "Configure IP secure backbone key manually"
|
||||
"secure_routing_manual": "Configure IP Secure backbone key manually"
|
||||
}
|
||||
},
|
||||
"secure_knxkeys": {
|
||||
@@ -86,7 +86,7 @@
|
||||
},
|
||||
"secure_tunnel_manual": {
|
||||
"title": "Secure tunnelling",
|
||||
"description": "Please enter your IP secure information.",
|
||||
"description": "Please enter your IP Secure information.",
|
||||
"data": {
|
||||
"user_id": "User ID",
|
||||
"user_password": "User password",
|
||||
@@ -443,7 +443,7 @@
|
||||
},
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
"description": "Entity id whose state or attribute shall be exposed."
|
||||
"description": "Entity ID whose state or attribute shall be exposed."
|
||||
},
|
||||
"attribute": {
|
||||
"name": "Entity attribute",
|
||||
|
||||
@@ -0,0 +1,94 @@
|
||||
"""The LetPot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
from letpot.client import LetPotClient
|
||||
from letpot.converters import CONVERTERS
|
||||
from letpot.exceptions import LetPotAuthenticationException, LetPotException
|
||||
from letpot.models import AuthenticationInfo
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_EMAIL, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import (
|
||||
CONF_ACCESS_TOKEN_EXPIRES,
|
||||
CONF_REFRESH_TOKEN,
|
||||
CONF_REFRESH_TOKEN_EXPIRES,
|
||||
CONF_USER_ID,
|
||||
)
|
||||
from .coordinator import LetPotDeviceCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.TIME]
|
||||
|
||||
type LetPotConfigEntry = ConfigEntry[list[LetPotDeviceCoordinator]]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: LetPotConfigEntry) -> bool:
|
||||
"""Set up LetPot from a config entry."""
|
||||
|
||||
auth = AuthenticationInfo(
|
||||
access_token=entry.data[CONF_ACCESS_TOKEN],
|
||||
access_token_expires=entry.data[CONF_ACCESS_TOKEN_EXPIRES],
|
||||
refresh_token=entry.data[CONF_REFRESH_TOKEN],
|
||||
refresh_token_expires=entry.data[CONF_REFRESH_TOKEN_EXPIRES],
|
||||
user_id=entry.data[CONF_USER_ID],
|
||||
email=entry.data[CONF_EMAIL],
|
||||
)
|
||||
websession = async_get_clientsession(hass)
|
||||
client = LetPotClient(websession, auth)
|
||||
|
||||
if not auth.is_valid:
|
||||
try:
|
||||
auth = await client.refresh_token()
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={
|
||||
CONF_ACCESS_TOKEN: auth.access_token,
|
||||
CONF_ACCESS_TOKEN_EXPIRES: auth.access_token_expires,
|
||||
CONF_REFRESH_TOKEN: auth.refresh_token,
|
||||
CONF_REFRESH_TOKEN_EXPIRES: auth.refresh_token_expires,
|
||||
CONF_USER_ID: auth.user_id,
|
||||
CONF_EMAIL: auth.email,
|
||||
},
|
||||
)
|
||||
except LetPotAuthenticationException as exc:
|
||||
raise ConfigEntryError from exc
|
||||
|
||||
try:
|
||||
devices = await client.get_devices()
|
||||
except LetPotAuthenticationException as exc:
|
||||
raise ConfigEntryError from exc
|
||||
except LetPotException as exc:
|
||||
raise ConfigEntryNotReady from exc
|
||||
|
||||
coordinators: list[LetPotDeviceCoordinator] = [
|
||||
LetPotDeviceCoordinator(hass, auth, device)
|
||||
for device in devices
|
||||
if any(converter.supports_type(device.device_type) for converter in CONVERTERS)
|
||||
]
|
||||
|
||||
await asyncio.gather(
|
||||
*[
|
||||
coordinator.async_config_entry_first_refresh()
|
||||
for coordinator in coordinators
|
||||
]
|
||||
)
|
||||
|
||||
entry.runtime_data = coordinators
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: LetPotConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
for coordinator in entry.runtime_data:
|
||||
coordinator.device_client.disconnect()
|
||||
return unload_ok
|
||||
@@ -0,0 +1,92 @@
|
||||
"""Config flow for the LetPot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from letpot.client import LetPotClient
|
||||
from letpot.exceptions import LetPotAuthenticationException, LetPotConnectionException
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_EMAIL, CONF_PASSWORD
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from .const import (
|
||||
CONF_ACCESS_TOKEN_EXPIRES,
|
||||
CONF_REFRESH_TOKEN,
|
||||
CONF_REFRESH_TOKEN_EXPIRES,
|
||||
CONF_USER_ID,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_EMAIL): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.EMAIL,
|
||||
),
|
||||
),
|
||||
vol.Required(CONF_PASSWORD): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.PASSWORD,
|
||||
),
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class LetPotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for LetPot."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def _async_validate_credentials(
|
||||
self, email: str, password: str
|
||||
) -> dict[str, Any]:
|
||||
websession = async_get_clientsession(self.hass)
|
||||
client = LetPotClient(websession)
|
||||
auth = await client.login(email, password)
|
||||
return {
|
||||
CONF_ACCESS_TOKEN: auth.access_token,
|
||||
CONF_ACCESS_TOKEN_EXPIRES: auth.access_token_expires,
|
||||
CONF_REFRESH_TOKEN: auth.refresh_token,
|
||||
CONF_REFRESH_TOKEN_EXPIRES: auth.refresh_token_expires,
|
||||
CONF_USER_ID: auth.user_id,
|
||||
CONF_EMAIL: auth.email,
|
||||
}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
data_dict = await self._async_validate_credentials(
|
||||
user_input[CONF_EMAIL], user_input[CONF_PASSWORD]
|
||||
)
|
||||
except LetPotConnectionException:
|
||||
errors["base"] = "cannot_connect"
|
||||
except LetPotAuthenticationException:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(data_dict[CONF_USER_ID])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=data_dict[CONF_EMAIL], data=data_dict
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
@@ -0,0 +1,10 @@
|
||||
"""Constants for the LetPot integration."""
|
||||
|
||||
DOMAIN = "letpot"
|
||||
|
||||
CONF_ACCESS_TOKEN_EXPIRES = "access_token_expires"
|
||||
CONF_REFRESH_TOKEN = "refresh_token"
|
||||
CONF_REFRESH_TOKEN_EXPIRES = "refresh_token_expires"
|
||||
CONF_USER_ID = "user_id"
|
||||
|
||||
REQUEST_UPDATE_TIMEOUT = 10
|
||||
@@ -0,0 +1,67 @@
|
||||
"""Coordinator for the LetPot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from letpot.deviceclient import LetPotDeviceClient
|
||||
from letpot.exceptions import LetPotAuthenticationException, LetPotException
|
||||
from letpot.models import AuthenticationInfo, LetPotDevice, LetPotDeviceStatus
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import REQUEST_UPDATE_TIMEOUT
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import LetPotConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LetPotDeviceCoordinator(DataUpdateCoordinator[LetPotDeviceStatus]):
|
||||
"""Class to handle data updates for a specific garden."""
|
||||
|
||||
config_entry: LetPotConfigEntry
|
||||
|
||||
device: LetPotDevice
|
||||
device_client: LetPotDeviceClient
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, info: AuthenticationInfo, device: LetPotDevice
|
||||
) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=f"LetPot {device.serial_number}",
|
||||
)
|
||||
self._info = info
|
||||
self.device = device
|
||||
self.device_client = LetPotDeviceClient(info, device.serial_number)
|
||||
|
||||
def _handle_status_update(self, status: LetPotDeviceStatus) -> None:
|
||||
"""Distribute status update to entities."""
|
||||
self.async_set_updated_data(data=status)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Set up subscription for coordinator."""
|
||||
try:
|
||||
await self.device_client.subscribe(self._handle_status_update)
|
||||
except LetPotAuthenticationException as exc:
|
||||
raise ConfigEntryError from exc
|
||||
|
||||
async def _async_update_data(self) -> LetPotDeviceStatus:
|
||||
"""Request an update from the device and wait for a status update or timeout."""
|
||||
try:
|
||||
async with asyncio.timeout(REQUEST_UPDATE_TIMEOUT):
|
||||
await self.device_client.get_current_status()
|
||||
except LetPotException as exc:
|
||||
raise UpdateFailed(exc) from exc
|
||||
|
||||
# The subscription task will have updated coordinator.data, so return that data.
|
||||
# If we don't return anything here, coordinator.data will be set to None.
|
||||
return self.data
|
||||
@@ -0,0 +1,25 @@
|
||||
"""Base class for LetPot entities."""
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import LetPotDeviceCoordinator
|
||||
|
||||
|
||||
class LetPotEntity(CoordinatorEntity[LetPotDeviceCoordinator]):
|
||||
"""Defines a base LetPot entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, coordinator: LetPotDeviceCoordinator) -> None:
|
||||
"""Initialize a LetPot entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, coordinator.device.serial_number)},
|
||||
name=coordinator.device.name,
|
||||
manufacturer="LetPot",
|
||||
model=coordinator.device_client.device_model_name,
|
||||
model_id=coordinator.device_client.device_model_code,
|
||||
serial_number=coordinator.device.serial_number,
|
||||
)
|
||||
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"domain": "letpot",
|
||||
"name": "LetPot",
|
||||
"codeowners": ["@jpelgrom"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/letpot",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["letpot==0.2.0"]
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration only receives push-based updates.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading:
|
||||
status: done
|
||||
comment: |
|
||||
Push connection connects in coordinator _async_setup, disconnects in init async_unload_entry.
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration does not have configuration options.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: todo
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
||||
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"email": "The email address of your LetPot account.",
|
||||
"password": "The password of your LetPot account."
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"time": {
|
||||
"light_schedule_end": {
|
||||
"name": "Light off"
|
||||
},
|
||||
"light_schedule_start": {
|
||||
"name": "Light on"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,93 @@
|
||||
"""Support for LetPot time entities."""
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from datetime import time
|
||||
from typing import Any
|
||||
|
||||
from letpot.deviceclient import LetPotDeviceClient
|
||||
from letpot.models import LetPotDeviceStatus
|
||||
|
||||
from homeassistant.components.time import TimeEntity, TimeEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import LetPotConfigEntry
|
||||
from .coordinator import LetPotDeviceCoordinator
|
||||
from .entity import LetPotEntity
|
||||
|
||||
# Each change pushes a 'full' device status with the change. The library will cache
|
||||
# pending changes to avoid overwriting, but try to avoid a lot of parallelism.
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class LetPotTimeEntityDescription(TimeEntityDescription):
|
||||
"""Describes a LetPot time entity."""
|
||||
|
||||
value_fn: Callable[[LetPotDeviceStatus], time | None]
|
||||
set_value_fn: Callable[[LetPotDeviceClient, time], Coroutine[Any, Any, None]]
|
||||
|
||||
|
||||
TIME_SENSORS: tuple[LetPotTimeEntityDescription, ...] = (
|
||||
LetPotTimeEntityDescription(
|
||||
key="light_schedule_end",
|
||||
translation_key="light_schedule_end",
|
||||
value_fn=lambda status: None if status is None else status.light_schedule_end,
|
||||
set_value_fn=lambda deviceclient, value: deviceclient.set_light_schedule(
|
||||
start=None, end=value
|
||||
),
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
LetPotTimeEntityDescription(
|
||||
key="light_schedule_start",
|
||||
translation_key="light_schedule_start",
|
||||
value_fn=lambda status: None if status is None else status.light_schedule_start,
|
||||
set_value_fn=lambda deviceclient, value: deviceclient.set_light_schedule(
|
||||
start=value, end=None
|
||||
),
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: LetPotConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up LetPot time entities based on a config entry."""
|
||||
coordinators = entry.runtime_data
|
||||
async_add_entities(
|
||||
LetPotTimeEntity(coordinator, description)
|
||||
for description in TIME_SENSORS
|
||||
for coordinator in coordinators
|
||||
)
|
||||
|
||||
|
||||
class LetPotTimeEntity(LetPotEntity, TimeEntity):
|
||||
"""Defines a LetPot time entity."""
|
||||
|
||||
entity_description: LetPotTimeEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: LetPotDeviceCoordinator,
|
||||
description: LetPotTimeEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize LetPot time entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{coordinator.device.serial_number}_{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> time | None:
|
||||
"""Return the time."""
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
||||
|
||||
async def async_set_value(self, value: time) -> None:
|
||||
"""Set the time."""
|
||||
await self.entity_description.set_value_fn(
|
||||
self.coordinator.device_client, value
|
||||
)
|
||||
@@ -99,6 +99,7 @@ class LutronCasetaTiltOnlyBlind(LutronCasetaUpdatableEntity, CoverEntity):
|
||||
|
||||
PYLUTRON_TYPE_TO_CLASSES = {
|
||||
"SerenaTiltOnlyWoodBlind": LutronCasetaTiltOnlyBlind,
|
||||
"Tilt": LutronCasetaTiltOnlyBlind,
|
||||
"SerenaHoneycombShade": LutronCasetaShade,
|
||||
"SerenaRollerShade": LutronCasetaShade,
|
||||
"TriathlonHoneycombShade": LutronCasetaShade,
|
||||
|
||||
@@ -6,6 +6,7 @@ import logging
|
||||
from meteofrance_api.client import MeteoFranceClient
|
||||
from meteofrance_api.helpers import is_valid_warning_department
|
||||
from meteofrance_api.model import CurrentPhenomenons, Forecast, Rain
|
||||
from requests import RequestException
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -83,7 +84,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
update_method=_async_update_data_rain,
|
||||
update_interval=SCAN_INTERVAL_RAIN,
|
||||
)
|
||||
await coordinator_rain.async_config_entry_first_refresh()
|
||||
try:
|
||||
await coordinator_rain._async_refresh(log_failures=False) # noqa: SLF001
|
||||
except RequestException:
|
||||
_LOGGER.warning(
|
||||
"1 hour rain forecast not available: %s is not in covered zone",
|
||||
entry.title,
|
||||
)
|
||||
|
||||
department = coordinator_forecast.data.position.get("dept")
|
||||
_LOGGER.debug(
|
||||
@@ -128,8 +135,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
hass.data[DOMAIN][entry.entry_id] = {
|
||||
UNDO_UPDATE_LISTENER: undo_listener,
|
||||
COORDINATOR_FORECAST: coordinator_forecast,
|
||||
COORDINATOR_RAIN: coordinator_rain,
|
||||
}
|
||||
if coordinator_rain and coordinator_rain.last_update_success:
|
||||
hass.data[DOMAIN][entry.entry_id][COORDINATOR_RAIN] = coordinator_rain
|
||||
if coordinator_alert and coordinator_alert.last_update_success:
|
||||
hass.data[DOMAIN][entry.entry_id][COORDINATOR_ALERT] = coordinator_alert
|
||||
|
||||
|
||||
@@ -187,7 +187,7 @@ async def async_setup_entry(
|
||||
"""Set up the Meteo-France sensor platform."""
|
||||
data = hass.data[DOMAIN][entry.entry_id]
|
||||
coordinator_forecast: DataUpdateCoordinator[Forecast] = data[COORDINATOR_FORECAST]
|
||||
coordinator_rain: DataUpdateCoordinator[Rain] | None = data[COORDINATOR_RAIN]
|
||||
coordinator_rain: DataUpdateCoordinator[Rain] | None = data.get(COORDINATOR_RAIN)
|
||||
coordinator_alert: DataUpdateCoordinator[CurrentPhenomenons] | None = data.get(
|
||||
COORDINATOR_ALERT
|
||||
)
|
||||
|
||||
@@ -0,0 +1,114 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration doesn't provide any service actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: Check removal and replacement of name in config flow with the title (server address).
|
||||
config-flow-test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
Merge test_show_config_form with full flow test.
|
||||
Move full flow test to the top of all tests.
|
||||
All test cases should end in either CREATE_ENTRY or ABORT.
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: Integration doesn't provide any service actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: todo
|
||||
entity-event-setup:
|
||||
status: done
|
||||
comment: Handled by coordinator.
|
||||
entity-unique-id:
|
||||
status: done
|
||||
comment: Using confid entry ID as the dependency mcstatus doesn't provide a unique information.
|
||||
has-entity-name: done
|
||||
runtime-data: todo
|
||||
test-before-configure: done
|
||||
test-before-setup:
|
||||
status: done
|
||||
comment: |
|
||||
Raising ConfigEntryNotReady, if either the initialization or
|
||||
refresh of coordinator isn't successful.
|
||||
unique-config-entry:
|
||||
status: done
|
||||
comment: |
|
||||
As there is no unique information available from the dependency mcstatus,
|
||||
the server address is used to identify that the same service is already configured.
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: Integration doesn't provide any service actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: Integration doesn't support any configuration parameters.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable:
|
||||
status: done
|
||||
comment: Handled by coordinator.
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: done
|
||||
comment: Handled by coordinator.
|
||||
parallel-updates:
|
||||
status: todo
|
||||
comment: |
|
||||
Although this is handled by the coordinator and no service actions are provided,
|
||||
PARALLEL_UPDATES should still be set to 0 in binary_sensor and sensor according to the rule.
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: No authentication is required for the integration.
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: No discovery possible.
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
No discovery possible. Users can use the (local or public) hostname instead of an IP address,
|
||||
if static IP addresses cannot be configured.
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: A minecraft server can only have one device.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: No repair use-cases for this integration.
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency:
|
||||
status: done
|
||||
comment: |
|
||||
Lookup API of the dependency mcstatus for Bedrock Edition servers is not async,
|
||||
but is non-blocking and therefore OK to be called. Refer to mcstatus FAQ
|
||||
https://mcstatus.readthedocs.io/en/stable/pages/faq/#why-doesn-t-bedrockserver-have-an-async-lookup-method
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: Integration isn't making any HTTP requests.
|
||||
strict-typing: done
|
||||
@@ -35,7 +35,7 @@ async def async_setup_entry(
|
||||
@callback
|
||||
def _create_entity(netatmo_device: NetatmoDevice) -> None:
|
||||
entity = NetatmoFan(netatmo_device)
|
||||
_LOGGER.debug("Adding cover %s", entity)
|
||||
_LOGGER.debug("Adding fan %s", entity)
|
||||
async_add_entities([entity])
|
||||
|
||||
entry.async_on_unload(
|
||||
|
||||
@@ -4,16 +4,22 @@ import logging
|
||||
|
||||
from pyownet import protocol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
from .onewirehub import CannotConnect, OneWireHub
|
||||
from .const import DOMAIN
|
||||
from .onewirehub import CannotConnect, OneWireConfigEntry, OneWireHub
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
type OneWireConfigEntry = ConfigEntry[OneWireHub]
|
||||
|
||||
_PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: OneWireConfigEntry) -> bool:
|
||||
@@ -29,7 +35,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: OneWireConfigEntry) -> b
|
||||
|
||||
entry.runtime_data = onewire_hub
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(options_update_listener))
|
||||
|
||||
@@ -50,7 +56,7 @@ async def async_unload_entry(
|
||||
hass: HomeAssistant, config_entry: OneWireConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS)
|
||||
return await hass.config_entries.async_unload_platforms(config_entry, _PLATFORMS)
|
||||
|
||||
|
||||
async def options_update_listener(
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import os
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
@@ -14,10 +15,14 @@ from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import OneWireConfigEntry
|
||||
from .const import DEVICE_KEYS_0_3, DEVICE_KEYS_0_7, DEVICE_KEYS_A_B, READ_MODE_BOOL
|
||||
from .entity import OneWireEntity, OneWireEntityDescription
|
||||
from .onewirehub import OneWireHub
|
||||
from .onewirehub import OneWireConfigEntry, OneWireHub
|
||||
|
||||
# the library uses non-persistent connections
|
||||
# and concurrent access to the bus is managed by the server
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
|
||||
@@ -7,12 +7,7 @@ from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
@@ -29,7 +24,7 @@ from .const import (
|
||||
OPTION_ENTRY_SENSOR_PRECISION,
|
||||
PRECISION_MAPPING_FAMILY_28,
|
||||
)
|
||||
from .onewirehub import CannotConnect, OneWireHub
|
||||
from .onewirehub import CannotConnect, OneWireConfigEntry, OneWireHub
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -39,21 +34,16 @@ DATA_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]:
|
||||
"""Validate the user input allows us to connect.
|
||||
|
||||
Data has the keys from DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
async def validate_input(
|
||||
hass: HomeAssistant, data: dict[str, Any], errors: dict[str, str]
|
||||
) -> None:
|
||||
"""Validate the user input allows us to connect."""
|
||||
|
||||
hub = OneWireHub(hass)
|
||||
|
||||
host = data[CONF_HOST]
|
||||
port = data[CONF_PORT]
|
||||
# Raises CannotConnect exception on failure
|
||||
await hub.connect(host, port)
|
||||
|
||||
# Return info that you want to store in the config entry.
|
||||
return {"title": host}
|
||||
try:
|
||||
await hub.connect(data[CONF_HOST], data[CONF_PORT])
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
|
||||
class OneWireFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
@@ -61,48 +51,58 @@ class OneWireFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize 1-Wire config flow."""
|
||||
self.onewire_config: dict[str, Any] = {}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle 1-Wire config flow start.
|
||||
|
||||
Let user manually input configuration.
|
||||
"""
|
||||
"""Handle 1-Wire config flow start."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input:
|
||||
# Prevent duplicate entries
|
||||
self._async_abort_entries_match(
|
||||
{
|
||||
CONF_HOST: user_input[CONF_HOST],
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
}
|
||||
{CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT]}
|
||||
)
|
||||
|
||||
self.onewire_config.update(user_input)
|
||||
|
||||
try:
|
||||
info = await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
await validate_input(self.hass, user_input, errors)
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=info["title"], data=self.onewire_config
|
||||
title=user_input[CONF_HOST], data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=DATA_SCHEMA,
|
||||
data_schema=self.add_suggested_values_to_schema(DATA_SCHEMA, user_input),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle 1-Wire reconfiguration."""
|
||||
errors: dict[str, str] = {}
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
if user_input:
|
||||
self._async_abort_entries_match(
|
||||
{CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT]}
|
||||
)
|
||||
|
||||
await validate_input(self.hass, user_input, errors)
|
||||
if not errors:
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry, data_updates=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
DATA_SCHEMA, reconfigure_entry.data | (user_input or {})
|
||||
),
|
||||
description_placeholders={"name": reconfigure_entry.title},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: OneWireConfigEntry,
|
||||
) -> OnewireOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return OnewireOptionsFlowHandler(config_entry)
|
||||
@@ -126,7 +126,7 @@ class OnewireOptionsFlowHandler(OptionsFlow):
|
||||
current_device: str
|
||||
"""Friendly name of the currently selected device."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
def __init__(self, config_entry: OneWireConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.options = deepcopy(dict(config_entry.options))
|
||||
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DEFAULT_HOST = "localhost"
|
||||
DEFAULT_PORT = 4304
|
||||
|
||||
@@ -54,9 +52,3 @@ MANUFACTURER_EDS = "Embedded Data Systems"
|
||||
READ_MODE_BOOL = "bool"
|
||||
READ_MODE_FLOAT = "float"
|
||||
READ_MODE_INT = "int"
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
@@ -9,7 +9,7 @@ from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import OneWireConfigEntry
|
||||
from .onewirehub import OneWireConfigEntry
|
||||
|
||||
TO_REDACT = {CONF_HOST}
|
||||
|
||||
|
||||
@@ -54,6 +54,7 @@ class OneWireEntity(Entity):
|
||||
"""Return the state attributes of the entity."""
|
||||
return {
|
||||
"device_file": self._device_file,
|
||||
# raw_value attribute is deprecated and can be removed in 2025.8
|
||||
"raw_value": self._value_raw,
|
||||
}
|
||||
|
||||
@@ -84,4 +85,4 @@ class OneWireEntity(Entity):
|
||||
elif self.entity_description.read_mode == READ_MODE_BOOL:
|
||||
self._state = int(self._value_raw) == 1
|
||||
else:
|
||||
self._state = round(self._value_raw, 1)
|
||||
self._state = self._value_raw
|
||||
|
||||
@@ -44,6 +44,8 @@ DEVICE_MANUFACTURER = {
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type OneWireConfigEntry = ConfigEntry[OneWireHub]
|
||||
|
||||
|
||||
def _is_known_device(device_family: str, device_type: str | None) -> bool:
|
||||
"""Check if device family/type is known to the library."""
|
||||
@@ -70,7 +72,7 @@ class OneWireHub:
|
||||
except protocol.ConnError as exc:
|
||||
raise CannotConnect from exc
|
||||
|
||||
async def initialize(self, config_entry: ConfigEntry) -> None:
|
||||
async def initialize(self, config_entry: OneWireConfigEntry) -> None:
|
||||
"""Initialize a config entry."""
|
||||
host = config_entry.data[CONF_HOST]
|
||||
port = config_entry.data[CONF_PORT]
|
||||
|
||||
@@ -0,0 +1,126 @@
|
||||
rules:
|
||||
## Bronze
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: missing data_description on options flow
|
||||
test-before-configure: done
|
||||
unique-config-entry:
|
||||
status: done
|
||||
comment: unique ID is not available, but duplicates are prevented based on host/port
|
||||
config-flow-test-coverage: done
|
||||
runtime-data: done
|
||||
test-before-setup: done
|
||||
appropriate-polling: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: entities do not subscribe to events
|
||||
dependency-transparency:
|
||||
status: todo
|
||||
comment: The package is not built and published inside a CI pipeline
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: No service actions currently available
|
||||
common-modules:
|
||||
status: done
|
||||
comment: base entity available, but no coordinator
|
||||
docs-high-level-description:
|
||||
status: todo
|
||||
comment: Under review
|
||||
docs-installation-instructions:
|
||||
status: todo
|
||||
comment: Under review
|
||||
docs-removal-instructions:
|
||||
status: todo
|
||||
comment: Under review
|
||||
docs-actions:
|
||||
status: todo
|
||||
comment: Under review
|
||||
brands: done
|
||||
|
||||
## Silver
|
||||
config-entry-unloading: done
|
||||
log-when-unavailable: done
|
||||
entity-unavailable: done
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: No service actions currently available
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: Local polling without authentication
|
||||
parallel-updates: done
|
||||
test-coverage: done
|
||||
integration-owner: done
|
||||
docs-installation-parameters:
|
||||
status: todo
|
||||
comment: Under review
|
||||
docs-configuration-parameters:
|
||||
status: todo
|
||||
comment: Under review
|
||||
|
||||
## Gold
|
||||
entity-translations: done
|
||||
entity-device-class: done
|
||||
devices: done
|
||||
entity-category: done
|
||||
entity-disabled-by-default: done
|
||||
discovery:
|
||||
status: todo
|
||||
comment: mDNS should be possible - https://owfs.org/index_php_page_avahi-discovery.html
|
||||
stale-devices:
|
||||
status: done
|
||||
comment: >
|
||||
Manual removal, as it is not possible to distinguish
|
||||
between a flaky device and a device that has been removed
|
||||
diagnostics:
|
||||
status: todo
|
||||
comment: config-entry diagnostics level available, might be nice to have device-level diagnostics
|
||||
exception-translations:
|
||||
status: todo
|
||||
comment: Under review
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: It doesn't make sense to override defaults
|
||||
reconfiguration-flow: done
|
||||
dynamic-devices:
|
||||
status: todo
|
||||
comment: Not yet implemented
|
||||
discovery-update-info:
|
||||
status: todo
|
||||
comment: Under review
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: No repairs available
|
||||
docs-use-cases:
|
||||
status: todo
|
||||
comment: Under review
|
||||
docs-supported-devices:
|
||||
status: todo
|
||||
comment: Under review
|
||||
docs-supported-functions:
|
||||
status: todo
|
||||
comment: Under review
|
||||
docs-data-update:
|
||||
status: todo
|
||||
comment: Under review
|
||||
docs-known-limitations:
|
||||
status: todo
|
||||
comment: Under review
|
||||
docs-troubleshooting:
|
||||
status: todo
|
||||
comment: Under review
|
||||
docs-examples:
|
||||
status: todo
|
||||
comment: Under review
|
||||
|
||||
## Platinum
|
||||
async-dependency:
|
||||
status: todo
|
||||
comment: The dependency is not async
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: No websession
|
||||
strict-typing:
|
||||
status: todo
|
||||
comment: The dependency is not typed
|
||||
@@ -0,0 +1,95 @@
|
||||
"""Support for 1-Wire environment select entities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import os
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import READ_MODE_INT
|
||||
from .entity import OneWireEntity, OneWireEntityDescription
|
||||
from .onewirehub import OneWireConfigEntry, OneWireHub
|
||||
|
||||
# the library uses non-persistent connections
|
||||
# and concurrent access to the bus is managed by the server
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class OneWireSelectEntityDescription(OneWireEntityDescription, SelectEntityDescription):
|
||||
"""Class describing OneWire select entities."""
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS: dict[str, tuple[OneWireEntityDescription, ...]] = {
|
||||
"28": (
|
||||
OneWireSelectEntityDescription(
|
||||
key="tempres",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
read_mode=READ_MODE_INT,
|
||||
options=["9", "10", "11", "12"],
|
||||
translation_key="tempres",
|
||||
),
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: OneWireConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up 1-Wire platform."""
|
||||
entities = await hass.async_add_executor_job(
|
||||
get_entities, config_entry.runtime_data
|
||||
)
|
||||
async_add_entities(entities, True)
|
||||
|
||||
|
||||
def get_entities(onewire_hub: OneWireHub) -> list[OneWireSelectEntity]:
|
||||
"""Get a list of entities."""
|
||||
if not onewire_hub.devices:
|
||||
return []
|
||||
|
||||
entities: list[OneWireSelectEntity] = []
|
||||
|
||||
for device in onewire_hub.devices:
|
||||
family = device.family
|
||||
device_id = device.id
|
||||
device_info = device.device_info
|
||||
|
||||
if family not in ENTITY_DESCRIPTIONS:
|
||||
continue
|
||||
for description in ENTITY_DESCRIPTIONS[family]:
|
||||
device_file = os.path.join(os.path.split(device.path)[0], description.key)
|
||||
entities.append(
|
||||
OneWireSelectEntity(
|
||||
description=description,
|
||||
device_id=device_id,
|
||||
device_file=device_file,
|
||||
device_info=device_info,
|
||||
owproxy=onewire_hub.owproxy,
|
||||
)
|
||||
)
|
||||
|
||||
return entities
|
||||
|
||||
|
||||
class OneWireSelectEntity(OneWireEntity, SelectEntity):
|
||||
"""Implementation of a 1-Wire switch."""
|
||||
|
||||
entity_description: OneWireSelectEntityDescription
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Return the selected entity option to represent the entity state."""
|
||||
return str(self._state)
|
||||
|
||||
def select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
self._write_value(option.encode("ascii"))
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Mapping
|
||||
import dataclasses
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
import os
|
||||
from types import MappingProxyType
|
||||
@@ -28,7 +29,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from . import OneWireConfigEntry
|
||||
from .const import (
|
||||
DEVICE_KEYS_0_3,
|
||||
DEVICE_KEYS_A_B,
|
||||
@@ -39,7 +39,12 @@ from .const import (
|
||||
READ_MODE_INT,
|
||||
)
|
||||
from .entity import OneWireEntity, OneWireEntityDescription
|
||||
from .onewirehub import OneWireHub
|
||||
from .onewirehub import OneWireConfigEntry, OneWireHub
|
||||
|
||||
# the library uses non-persistent connections
|
||||
# and concurrent access to the bus is managed by the server
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
|
||||
@@ -1,21 +1,34 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"step": {
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"port": "[%key:common::config_flow::data::port%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "[%key:component::onewire::config::step::user::data_description::host%]",
|
||||
"port": "[%key:component::onewire::config::step::user::data_description::port%]"
|
||||
},
|
||||
"description": "Update OWServer configuration for {name}"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"port": "[%key:common::config_flow::data::port%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your 1-Wire device."
|
||||
"host": "The hostname or IP address of your OWServer instance.",
|
||||
"port": "The port of your OWServer instance (default is 4304)."
|
||||
},
|
||||
"title": "Set server details"
|
||||
"title": "Set OWServer instance details"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -28,6 +41,17 @@
|
||||
"name": "Hub short on branch {id}"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"tempres": {
|
||||
"name": "Temperature resolution",
|
||||
"state": {
|
||||
"9": "9 bits (0.5°C, fastest, up to 93.75ms)",
|
||||
"10": "10 bits (0.25°C, up to 187.5ms)",
|
||||
"11": "11 bits (0.125°C, up to 375ms)",
|
||||
"12": "12 bits (0.0625°C, slowest, up to 750ms)"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"counter_id": {
|
||||
"name": "Counter {id}"
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
@@ -11,10 +12,14 @@ from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import OneWireConfigEntry
|
||||
from .const import DEVICE_KEYS_0_3, DEVICE_KEYS_0_7, DEVICE_KEYS_A_B, READ_MODE_BOOL
|
||||
from .entity import OneWireEntity, OneWireEntityDescription
|
||||
from .onewirehub import OneWireHub
|
||||
from .onewirehub import OneWireConfigEntry, OneWireHub
|
||||
|
||||
# the library uses non-persistent connections
|
||||
# and concurrent access to the bus is managed by the server
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
|
||||
@@ -41,6 +41,7 @@ from .const import (
|
||||
PLATFORMS,
|
||||
UPDATE_INTERVAL,
|
||||
UPDATE_INTERVAL_ALL_ASSUMED_STATE,
|
||||
UPDATE_INTERVAL_LOCAL,
|
||||
)
|
||||
from .coordinator import OverkizDataUpdateCoordinator
|
||||
|
||||
@@ -116,13 +117,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: OverkizDataConfigEntry)
|
||||
|
||||
if coordinator.is_stateless:
|
||||
LOGGER.debug(
|
||||
(
|
||||
"All devices have an assumed state. Update interval has been reduced"
|
||||
" to: %s"
|
||||
),
|
||||
"All devices have an assumed state. Update interval has been reduced to: %s",
|
||||
UPDATE_INTERVAL_ALL_ASSUMED_STATE,
|
||||
)
|
||||
coordinator.update_interval = UPDATE_INTERVAL_ALL_ASSUMED_STATE
|
||||
coordinator.set_update_interval(UPDATE_INTERVAL_ALL_ASSUMED_STATE)
|
||||
|
||||
if api_type == APIType.LOCAL:
|
||||
LOGGER.debug(
|
||||
"Devices connect via Local API. Update interval has been reduced to: %s",
|
||||
UPDATE_INTERVAL_LOCAL,
|
||||
)
|
||||
coordinator.set_update_interval(UPDATE_INTERVAL_LOCAL)
|
||||
|
||||
platforms: defaultdict[Platform, list[Device]] = defaultdict(list)
|
||||
|
||||
|
||||
@@ -44,6 +44,7 @@ DEFAULT_SERVER: Final = Server.SOMFY_EUROPE
|
||||
DEFAULT_HOST: Final = "gateway-xxxx-xxxx-xxxx.local:8443"
|
||||
|
||||
UPDATE_INTERVAL: Final = timedelta(seconds=30)
|
||||
UPDATE_INTERVAL_LOCAL: Final = timedelta(seconds=5)
|
||||
UPDATE_INTERVAL_ALL_ASSUMED_STATE: Final = timedelta(minutes=60)
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
|
||||
@@ -26,7 +26,7 @@ from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util.decorator import Registry
|
||||
|
||||
from .const import DOMAIN, LOGGER, UPDATE_INTERVAL
|
||||
from .const import DOMAIN, IGNORED_OVERKIZ_DEVICES, LOGGER
|
||||
|
||||
EVENT_HANDLERS: Registry[
|
||||
str, Callable[[OverkizDataUpdateCoordinator, Event], Coroutine[Any, Any, None]]
|
||||
@@ -36,6 +36,8 @@ EVENT_HANDLERS: Registry[
|
||||
class OverkizDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Device]]):
|
||||
"""Class to manage fetching data from Overkiz platform."""
|
||||
|
||||
_default_update_interval: timedelta
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
@@ -45,7 +47,7 @@ class OverkizDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Device]]):
|
||||
client: OverkizClient,
|
||||
devices: list[Device],
|
||||
places: Place | None,
|
||||
update_interval: timedelta | None = None,
|
||||
update_interval: timedelta,
|
||||
config_entry_id: str,
|
||||
) -> None:
|
||||
"""Initialize global data updater."""
|
||||
@@ -59,12 +61,17 @@ class OverkizDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Device]]):
|
||||
self.data = {}
|
||||
self.client = client
|
||||
self.devices: dict[str, Device] = {d.device_url: d for d in devices}
|
||||
self.is_stateless = all(
|
||||
device.protocol in (Protocol.RTS, Protocol.INTERNAL) for device in devices
|
||||
)
|
||||
self.executions: dict[str, dict[str, str]] = {}
|
||||
self.areas = self._places_to_area(places) if places else None
|
||||
self.config_entry_id = config_entry_id
|
||||
self._default_update_interval = update_interval
|
||||
|
||||
self.is_stateless = all(
|
||||
device.protocol in (Protocol.RTS, Protocol.INTERNAL)
|
||||
for device in devices
|
||||
if device.widget not in IGNORED_OVERKIZ_DEVICES
|
||||
and device.ui_class not in IGNORED_OVERKIZ_DEVICES
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Device]:
|
||||
"""Fetch Overkiz data via event listener."""
|
||||
@@ -102,8 +109,9 @@ class OverkizDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Device]]):
|
||||
if event_handler := EVENT_HANDLERS.get(event.name):
|
||||
await event_handler(self, event)
|
||||
|
||||
# Restore the default update interval if no executions are pending
|
||||
if not self.executions:
|
||||
self.update_interval = UPDATE_INTERVAL
|
||||
self.update_interval = self._default_update_interval
|
||||
|
||||
return self.devices
|
||||
|
||||
@@ -124,6 +132,11 @@ class OverkizDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Device]]):
|
||||
|
||||
return areas
|
||||
|
||||
def set_update_interval(self, update_interval: timedelta) -> None:
|
||||
"""Set the update interval and store this value."""
|
||||
self.update_interval = update_interval
|
||||
self._default_update_interval = update_interval
|
||||
|
||||
|
||||
@EVENT_HANDLERS.register(EventName.DEVICE_AVAILABLE)
|
||||
async def on_device_available(
|
||||
|
||||
@@ -6,7 +6,7 @@ from typing import Any, cast
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from pyoverkiz.enums import OverkizCommand, Protocol
|
||||
from pyoverkiz.exceptions import OverkizException
|
||||
from pyoverkiz.exceptions import BaseOverkizException
|
||||
from pyoverkiz.models import Command, Device, StateDefinition
|
||||
from pyoverkiz.types import StateType as OverkizStateType
|
||||
|
||||
@@ -105,7 +105,7 @@ class OverkizExecutor:
|
||||
"Home Assistant",
|
||||
)
|
||||
# Catch Overkiz exceptions to support `continue_on_error` functionality
|
||||
except OverkizException as exception:
|
||||
except BaseOverkizException as exception:
|
||||
raise HomeAssistantError(exception) from exception
|
||||
|
||||
# ExecutionRegisteredEvent doesn't contain the device_url, thus we need to register it here
|
||||
|
||||
@@ -17,14 +17,15 @@ from homeassistant.components.webhook import (
|
||||
from homeassistant.const import CONF_WEBHOOK_ID, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.http import HomeAssistantView
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN, JSON_PAYLOAD, LOGGER, REGISTERED_NOTIFICATIONS
|
||||
from .const import DOMAIN, EVENT_KEY, JSON_PAYLOAD, LOGGER, REGISTERED_NOTIFICATIONS
|
||||
from .coordinator import OverseerrConfigEntry, OverseerrCoordinator
|
||||
from .services import setup_services
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
PLATFORMS: list[Platform] = [Platform.EVENT, Platform.SENSOR]
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
@@ -129,6 +130,7 @@ class OverseerrWebhookManager:
|
||||
LOGGER.debug("Received webhook payload: %s", data)
|
||||
if data["notification_type"].startswith("MEDIA"):
|
||||
await self.entry.runtime_data.async_refresh()
|
||||
async_dispatcher_send(hass, EVENT_KEY, data)
|
||||
return HomeAssistantView.json({"message": "ok"})
|
||||
|
||||
async def unregister_webhook(self) -> None:
|
||||
|
||||
@@ -14,6 +14,8 @@ ATTR_STATUS = "status"
|
||||
ATTR_SORT_ORDER = "sort_order"
|
||||
ATTR_REQUESTED_BY = "requested_by"
|
||||
|
||||
EVENT_KEY = f"{DOMAIN}_event"
|
||||
|
||||
REGISTERED_NOTIFICATIONS = (
|
||||
NotificationType.REQUEST_PENDING_APPROVAL
|
||||
| NotificationType.REQUEST_APPROVED
|
||||
@@ -23,28 +25,24 @@ REGISTERED_NOTIFICATIONS = (
|
||||
| NotificationType.REQUEST_AUTOMATICALLY_APPROVED
|
||||
)
|
||||
JSON_PAYLOAD = (
|
||||
'"{\\"notification_type\\":\\"{{notification_type}}\\",\\"event\\":\\"'
|
||||
'{{event}}\\",\\"subject\\":\\"{{subject}}\\",\\"message\\":\\"{{messa'
|
||||
'ge}}\\",\\"image\\":\\"{{image}}\\",\\"{{media}}\\":{\\"media_type\\"'
|
||||
':\\"{{media_type}}\\",\\"tmdbId\\":\\"{{media_tmdbid}}\\",\\"tvdbId\\'
|
||||
'":\\"{{media_tvdbid}}\\",\\"status\\":\\"{{media_status}}\\",\\"statu'
|
||||
's4k\\":\\"{{media_status4k}}\\"},\\"{{request}}\\":{\\"request_id\\":'
|
||||
'\\"{{request_id}}\\",\\"requestedBy_email\\":\\"{{requestedBy_email}}'
|
||||
'\\",\\"requestedBy_username\\":\\"{{requestedBy_username}}\\",\\"requ'
|
||||
'estedBy_avatar\\":\\"{{requestedBy_avatar}}\\",\\"requestedBy_setting'
|
||||
's_discordId\\":\\"{{requestedBy_settings_discordId}}\\",\\"requestedB'
|
||||
'y_settings_telegramChatId\\":\\"{{requestedBy_settings_telegramChatId'
|
||||
'}}\\"},\\"{{issue}}\\":{\\"issue_id\\":\\"{{issue_id}}\\",\\"issue_ty'
|
||||
'pe\\":\\"{{issue_type}}\\",\\"issue_status\\":\\"{{issue_status}}\\",'
|
||||
'\\"reportedBy_email\\":\\"{{reportedBy_email}}\\",\\"reportedBy_usern'
|
||||
'ame\\":\\"{{reportedBy_username}}\\",\\"reportedBy_avatar\\":\\"{{rep'
|
||||
'ortedBy_avatar}}\\",\\"reportedBy_settings_discordId\\":\\"{{reported'
|
||||
'By_settings_discordId}}\\",\\"reportedBy_settings_telegramChatId\\":'
|
||||
'\\"{{reportedBy_settings_telegramChatId}}\\"},\\"{{comment}}\\":{\\"c'
|
||||
'omment_message\\":\\"{{comment_message}}\\",\\"commentedBy_email\\":'
|
||||
'\\"{{commentedBy_email}}\\",\\"commentedBy_username\\":\\"{{commented'
|
||||
'By_username}}\\",\\"commentedBy_avatar\\":\\"{{commentedBy_avatar}}'
|
||||
'\\",\\"commentedBy_settings_discordId\\":\\"{{commentedBy_settings_di'
|
||||
'scordId}}\\",\\"commentedBy_settings_telegramChatId\\":\\"{{commented'
|
||||
'By_settings_telegramChatId}}\\"},\\"{{extra}}\\":[]\\n}"'
|
||||
'"{\\"notification_type\\":\\"{{notification_type}}\\",\\"subject\\":\\"{{subject}'
|
||||
'}\\",\\"message\\":\\"{{message}}\\",\\"image\\":\\"{{image}}\\",\\"{{media}}\\":'
|
||||
'{\\"media_type\\":\\"{{media_type}}\\",\\"tmdb_idd\\":\\"{{media_tmdbid}}\\",\\"t'
|
||||
'vdb_id\\":\\"{{media_tvdbid}}\\",\\"status\\":\\"{{media_status}}\\",\\"status4k'
|
||||
'\\":\\"{{media_status4k}}\\"},\\"{{request}}\\":{\\"request_id\\":\\"{{request_id'
|
||||
'}}\\",\\"requested_by_email\\":\\"{{requestedBy_email}}\\",\\"requested_by_userna'
|
||||
'me\\":\\"{{requestedBy_username}}\\",\\"requested_by_avatar\\":\\"{{requestedBy_a'
|
||||
'vatar}}\\",\\"requested_by_settings_discord_id\\":\\"{{requestedBy_settings_disco'
|
||||
'rdId}}\\",\\"requested_by_settings_telegram_chat_id\\":\\"{{requestedBy_settings_'
|
||||
'telegramChatId}}\\"},\\"{{issue}}\\":{\\"issue_id\\":\\"{{issue_id}}\\",\\"issue_'
|
||||
'type\\":\\"{{issue_type}}\\",\\"issue_status\\":\\"{{issue_status}}\\",\\"reporte'
|
||||
'd_by_email\\":\\"{{reportedBy_email}}\\",\\"reported_by_username\\":\\"{{reported'
|
||||
'By_username}}\\",\\"reported_by_avatar\\":\\"{{reportedBy_avatar}}\\",\\"reported'
|
||||
'_by_settings_discord_id\\":\\"{{reportedBy_settings_discordId}}\\",\\"reported_by'
|
||||
'_settings_telegram_chat_id\\":\\"{{reportedBy_settings_telegramChatId}}\\"},\\"{{'
|
||||
'comment}}\\":{\\"comment_message\\":\\"{{comment_message}}\\",\\"commented_by_ema'
|
||||
'il\\":\\"{{commentedBy_email}}\\",\\"commented_by_username\\":\\"{{commentedBy_us'
|
||||
'ername}}\\",\\"commented_by_avatar\\":\\"{{commentedBy_avatar}}\\",\\"commented_b'
|
||||
'y_settings_discord_id\\":\\"{{commentedBy_settings_discordId}}\\",\\"commented_by'
|
||||
'_settings_telegram_chat_id\\":\\"{{commentedBy_settings_telegramChatId}}\\"}}"'
|
||||
)
|
||||
|
||||
@@ -0,0 +1,99 @@
|
||||
"""Support for Overseerr events."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.event import EventEntity, EventEntityDescription
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import EVENT_KEY
|
||||
from .coordinator import OverseerrConfigEntry, OverseerrCoordinator
|
||||
from .entity import OverseerrEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class OverseerrEventEntityDescription(EventEntityDescription):
|
||||
"""Describes Overseerr config event entity."""
|
||||
|
||||
nullable_fields: list[str]
|
||||
|
||||
|
||||
EVENTS: tuple[OverseerrEventEntityDescription, ...] = (
|
||||
OverseerrEventEntityDescription(
|
||||
key="media",
|
||||
translation_key="last_media_event",
|
||||
event_types=[
|
||||
"pending",
|
||||
"approved",
|
||||
"available",
|
||||
"failed",
|
||||
"declined",
|
||||
"auto_approved",
|
||||
],
|
||||
nullable_fields=["comment", "issue"],
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: OverseerrConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Overseerr sensor entities based on a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(
|
||||
OverseerrEvent(coordinator, description) for description in EVENTS
|
||||
)
|
||||
|
||||
|
||||
class OverseerrEvent(OverseerrEntity, EventEntity):
|
||||
"""Defines a Overseerr event entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: OverseerrCoordinator,
|
||||
description: OverseerrEventEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize Overseerr event entity."""
|
||||
super().__init__(coordinator, description.key)
|
||||
self.entity_description = description
|
||||
self._attr_available = True
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to updates."""
|
||||
await super().async_added_to_hass()
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(self.hass, EVENT_KEY, self._handle_update)
|
||||
)
|
||||
|
||||
async def _handle_update(self, event: dict[str, Any]) -> None:
|
||||
"""Handle incoming event."""
|
||||
event_type = event["notification_type"].lower()
|
||||
if event_type.split("_")[0] == self.entity_description.key:
|
||||
self._trigger_event(event_type[6:], event)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
if super().available != self._attr_available:
|
||||
self._attr_available = super().available
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self._attr_available
|
||||
|
||||
|
||||
def parse_event(event: dict[str, Any], nullable_fields: list[str]) -> dict[str, Any]:
|
||||
"""Parse event."""
|
||||
event.pop("notification_type")
|
||||
for field in nullable_fields:
|
||||
event.pop(field)
|
||||
return event
|
||||
@@ -21,6 +21,19 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"event": {
|
||||
"last_media_event": {
|
||||
"name": "Last media event",
|
||||
"state": {
|
||||
"pending": "Pending",
|
||||
"approved": "Approved",
|
||||
"available": "Available",
|
||||
"failed": "Failed",
|
||||
"declined": "Declined",
|
||||
"auto_approved": "Auto-approved"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"total_requests": {
|
||||
"name": "Total requests"
|
||||
|
||||
@@ -16,10 +16,6 @@
|
||||
"backup_failed_out_of_resources": {
|
||||
"title": "Database backup failed due to lack of resources",
|
||||
"description": "The database backup stated at {start_time} failed due to lack of resources. The backup cannot be trusted and must be restarted. This can happen if the database is too large or if the system is under heavy load. Consider upgrading the system hardware or reducing the size of the database by decreasing the number of history days to keep or creating a filter."
|
||||
},
|
||||
"sqlite_too_old": {
|
||||
"title": "Update SQLite to {min_version} or later to continue using the recorder",
|
||||
"description": "Support for version {server_version} of SQLite is ending; the minimum supported version is {min_version}. Please upgrade your database software."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -95,9 +95,8 @@ RECOMMENDED_MIN_VERSION_MARIA_DB_108 = _simple_version("10.8.4")
|
||||
MARIADB_WITH_FIXED_IN_QUERIES_108 = _simple_version("10.8.4")
|
||||
MIN_VERSION_MYSQL = _simple_version("8.0.0")
|
||||
MIN_VERSION_PGSQL = _simple_version("12.0")
|
||||
MIN_VERSION_SQLITE = _simple_version("3.31.0")
|
||||
UPCOMING_MIN_VERSION_SQLITE = _simple_version("3.40.1")
|
||||
MIN_VERSION_SQLITE_MODERN_BIND_VARS = _simple_version("3.32.0")
|
||||
MIN_VERSION_SQLITE = _simple_version("3.40.1")
|
||||
MIN_VERSION_SQLITE_MODERN_BIND_VARS = _simple_version("3.40.1")
|
||||
|
||||
|
||||
# This is the maximum time after the recorder ends the session
|
||||
@@ -376,37 +375,6 @@ def _raise_if_version_unsupported(
|
||||
raise UnsupportedDialect
|
||||
|
||||
|
||||
@callback
|
||||
def _async_delete_issue_deprecated_version(
|
||||
hass: HomeAssistant, dialect_name: str
|
||||
) -> None:
|
||||
"""Delete the issue about upcoming unsupported database version."""
|
||||
ir.async_delete_issue(hass, DOMAIN, f"{dialect_name}_too_old")
|
||||
|
||||
|
||||
@callback
|
||||
def _async_create_issue_deprecated_version(
|
||||
hass: HomeAssistant,
|
||||
server_version: AwesomeVersion,
|
||||
dialect_name: str,
|
||||
min_version: AwesomeVersion,
|
||||
) -> None:
|
||||
"""Warn about upcoming unsupported database version."""
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"{dialect_name}_too_old",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.CRITICAL,
|
||||
translation_key=f"{dialect_name}_too_old",
|
||||
translation_placeholders={
|
||||
"server_version": str(server_version),
|
||||
"min_version": str(min_version),
|
||||
},
|
||||
breaks_in_ha_version="2025.2.0",
|
||||
)
|
||||
|
||||
|
||||
def _extract_version_from_server_response_or_raise(
|
||||
server_response: str,
|
||||
) -> AwesomeVersion:
|
||||
@@ -523,20 +491,6 @@ def setup_connection_for_dialect(
|
||||
version or version_string, "SQLite", MIN_VERSION_SQLITE
|
||||
)
|
||||
|
||||
# No elif here since _raise_if_version_unsupported raises
|
||||
if version < UPCOMING_MIN_VERSION_SQLITE:
|
||||
instance.hass.add_job(
|
||||
_async_create_issue_deprecated_version,
|
||||
instance.hass,
|
||||
version or version_string,
|
||||
dialect_name,
|
||||
UPCOMING_MIN_VERSION_SQLITE,
|
||||
)
|
||||
else:
|
||||
instance.hass.add_job(
|
||||
_async_delete_issue_deprecated_version, instance.hass, dialect_name
|
||||
)
|
||||
|
||||
if version and version > MIN_VERSION_SQLITE_MODERN_BIND_VARS:
|
||||
max_bind_vars = SQLITE_MODERN_MAX_BIND_VARS
|
||||
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["renault_api"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["renault-api==0.2.8"]
|
||||
"requirements": ["renault-api==0.2.9"]
|
||||
}
|
||||
|
||||
@@ -89,6 +89,9 @@
|
||||
"timeout": {
|
||||
"message": "Timeout waiting on a response: {err}"
|
||||
},
|
||||
"unexpected": {
|
||||
"message": "Unexpected Reolink error: {err}"
|
||||
},
|
||||
"firmware_install_error": {
|
||||
"message": "Error trying to update Reolink firmware: {err}"
|
||||
},
|
||||
|
||||
@@ -82,7 +82,8 @@ def get_device_uid_and_ch(
|
||||
ch = int(device_uid[1][5:])
|
||||
is_chime = True
|
||||
else:
|
||||
ch = host.api.channel_for_uid(device_uid[1])
|
||||
device_uid_part = "_".join(device_uid[1:])
|
||||
ch = host.api.channel_for_uid(device_uid_part)
|
||||
return (device_uid, ch, is_chime)
|
||||
|
||||
|
||||
@@ -167,6 +168,10 @@ def raise_translated_error(
|
||||
translation_placeholders={"err": str(err)},
|
||||
) from err
|
||||
except ReolinkError as err:
|
||||
raise HomeAssistantError(err) from err
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="unexpected",
|
||||
translation_placeholders={"err": str(err)},
|
||||
) from err
|
||||
|
||||
return decorator_raise_translated_error
|
||||
|
||||
@@ -205,14 +205,6 @@ async def setup_device_v1(
|
||||
coordinator = RoborockDataUpdateCoordinator(
|
||||
hass, device, networking, product_info, mqtt_client, home_data_rooms
|
||||
)
|
||||
# Verify we can communicate locally - if we can't, switch to cloud api
|
||||
await coordinator.verify_api()
|
||||
coordinator.api.is_available = True
|
||||
try:
|
||||
await coordinator.get_maps()
|
||||
except RoborockException as err:
|
||||
_LOGGER.warning("Failed to get map data")
|
||||
_LOGGER.debug(err)
|
||||
try:
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
except ConfigEntryNotReady as ex:
|
||||
|
||||
@@ -73,7 +73,27 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
self.maps: dict[int, RoborockMapInfo] = {}
|
||||
self._home_data_rooms = {str(room.id): room.name for room in home_data_rooms}
|
||||
|
||||
async def verify_api(self) -> None:
|
||||
async def _async_setup(self) -> None:
|
||||
"""Set up the coordinator."""
|
||||
# Verify we can communicate locally - if we can't, switch to cloud api
|
||||
await self._verify_api()
|
||||
self.api.is_available = True
|
||||
|
||||
try:
|
||||
maps = await self.api.get_multi_maps_list()
|
||||
except RoborockException as err:
|
||||
raise UpdateFailed("Failed to get map data: {err}") from err
|
||||
# Rooms names populated later with calls to `set_current_map_rooms` for each map
|
||||
self.maps = {
|
||||
roborock_map.mapFlag: RoborockMapInfo(
|
||||
flag=roborock_map.mapFlag,
|
||||
name=roborock_map.name or f"Map {roborock_map.mapFlag}",
|
||||
rooms={},
|
||||
)
|
||||
for roborock_map in (maps.map_info if (maps and maps.map_info) else ())
|
||||
}
|
||||
|
||||
async def _verify_api(self) -> None:
|
||||
"""Verify that the api is reachable. If it is not, switch clients."""
|
||||
if isinstance(self.api, RoborockLocalClientV1):
|
||||
try:
|
||||
@@ -96,12 +116,8 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
|
||||
async def _update_device_prop(self) -> None:
|
||||
"""Update device properties."""
|
||||
device_prop = await self.api.get_prop()
|
||||
if device_prop:
|
||||
if self.roborock_device_info.props:
|
||||
self.roborock_device_info.props.update(device_prop)
|
||||
else:
|
||||
self.roborock_device_info.props = device_prop
|
||||
if (device_prop := await self.api.get_prop()) is not None:
|
||||
self.roborock_device_info.props.update(device_prop)
|
||||
|
||||
async def _async_update_data(self) -> DeviceProp:
|
||||
"""Update data via library."""
|
||||
@@ -111,7 +127,7 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
# Set the new map id from the updated device props
|
||||
self._set_current_map()
|
||||
# Get the rooms for that map id.
|
||||
await self.get_rooms()
|
||||
await self.set_current_map_rooms()
|
||||
except RoborockException as ex:
|
||||
raise UpdateFailed(ex) from ex
|
||||
return self.roborock_device_info.props
|
||||
@@ -127,29 +143,18 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
self.roborock_device_info.props.status.map_status - 3
|
||||
) // 4
|
||||
|
||||
async def get_maps(self) -> None:
|
||||
"""Add a map to the coordinators mapping."""
|
||||
maps = await self.api.get_multi_maps_list()
|
||||
if maps and maps.map_info:
|
||||
for roborock_map in maps.map_info:
|
||||
self.maps[roborock_map.mapFlag] = RoborockMapInfo(
|
||||
flag=roborock_map.mapFlag,
|
||||
name=roborock_map.name or f"Map {roborock_map.mapFlag}",
|
||||
rooms={},
|
||||
)
|
||||
|
||||
async def get_rooms(self) -> None:
|
||||
"""Get all of the rooms for the current map."""
|
||||
async def set_current_map_rooms(self) -> None:
|
||||
"""Fetch all of the rooms for the current map and set on RoborockMapInfo."""
|
||||
# The api is only able to access rooms for the currently selected map
|
||||
# So it is important this is only called when you have the map you care
|
||||
# about selected.
|
||||
if self.current_map in self.maps:
|
||||
iot_rooms = await self.api.get_room_mapping()
|
||||
if iot_rooms is not None:
|
||||
for room in iot_rooms:
|
||||
self.maps[self.current_map].rooms[room.segment_id] = (
|
||||
self._home_data_rooms.get(room.iot_id, "Unknown")
|
||||
)
|
||||
if self.current_map is None or self.current_map not in self.maps:
|
||||
return
|
||||
room_mapping = await self.api.get_room_mapping()
|
||||
self.maps[self.current_map].rooms = {
|
||||
room.segment_id: self._home_data_rooms.get(room.iot_id, "Unknown")
|
||||
for room in room_mapping or ()
|
||||
}
|
||||
|
||||
@cached_property
|
||||
def duid(self) -> str:
|
||||
|
||||
@@ -61,6 +61,9 @@
|
||||
"total_cleaning_area": {
|
||||
"default": "mdi:texture-box"
|
||||
},
|
||||
"total_cleaning_count": {
|
||||
"default": "mdi:counter"
|
||||
},
|
||||
"vacuum_error": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
|
||||
@@ -121,7 +121,10 @@ class RoborockMap(RoborockCoordinatedEntityV1, ImageEntity):
|
||||
"""Update the image if it is not cached."""
|
||||
if self.is_map_valid():
|
||||
response = await asyncio.gather(
|
||||
*(self.cloud_api.get_map_v1(), self.coordinator.get_rooms()),
|
||||
*(
|
||||
self.cloud_api.get_map_v1(),
|
||||
self.coordinator.set_current_map_rooms(),
|
||||
),
|
||||
return_exceptions=True,
|
||||
)
|
||||
if not isinstance(response[0], bytes):
|
||||
@@ -174,7 +177,8 @@ async def create_coordinator_maps(
|
||||
await asyncio.sleep(MAP_SLEEP)
|
||||
# Get the map data
|
||||
map_update = await asyncio.gather(
|
||||
*[coord.cloud_api.get_map_v1(), coord.get_rooms()], return_exceptions=True
|
||||
*[coord.cloud_api.get_map_v1(), coord.set_current_map_rooms()],
|
||||
return_exceptions=True,
|
||||
)
|
||||
# If we fail to get the map, we should set it to empty byte,
|
||||
# still create it, and set it as unavailable.
|
||||
|
||||
@@ -24,6 +24,7 @@ from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfArea, UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -112,6 +113,13 @@ SENSOR_DESCRIPTIONS = [
|
||||
value_fn=lambda data: data.clean_summary.clean_time,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
RoborockSensorDescription(
|
||||
key="total_cleaning_count",
|
||||
translation_key="total_cleaning_count",
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=lambda data: data.clean_summary.clean_count,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
RoborockSensorDescription(
|
||||
key="status",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
|
||||
@@ -228,6 +228,9 @@
|
||||
"total_cleaning_area": {
|
||||
"name": "Total cleaning area"
|
||||
},
|
||||
"total_cleaning_count": {
|
||||
"name": "Total cleaning count"
|
||||
},
|
||||
"vacuum_error": {
|
||||
"name": "Vacuum error",
|
||||
"state": {
|
||||
|
||||
@@ -69,6 +69,7 @@ class SatelIntegraAlarmPanel(AlarmControlPanelEntity):
|
||||
def __init__(self, controller, name, arm_home_mode, partition_id):
|
||||
"""Initialize the alarm panel."""
|
||||
self._attr_name = name
|
||||
self._attr_unique_id = f"satel_alarm_panel_{partition_id}"
|
||||
self._arm_home_mode = arm_home_mode
|
||||
self._partition_id = partition_id
|
||||
self._satel = controller
|
||||
|
||||
@@ -58,6 +58,7 @@ class SatelIntegraSwitch(SwitchEntity):
|
||||
def __init__(self, controller, device_number, device_name, code):
|
||||
"""Initialize the binary_sensor."""
|
||||
self._device_number = device_number
|
||||
self._attr_unique_id = f"satel_switch_{device_number}"
|
||||
self._name = device_name
|
||||
self._state = False
|
||||
self._code = code
|
||||
|
||||
@@ -9,7 +9,7 @@ from screenlogicpy.const.data import ATTR, DEVICE, GROUP, VALUE
|
||||
from screenlogicpy.const.msg import CODE
|
||||
from screenlogicpy.device_const.chemistry import DOSE_STATE
|
||||
from screenlogicpy.device_const.pump import PUMP_TYPE
|
||||
from screenlogicpy.device_const.system import EQUIPMENT_FLAG
|
||||
from screenlogicpy.device_const.system import CONTROLLER_STATE, EQUIPMENT_FLAG
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
DOMAIN as SENSOR_DOMAIN,
|
||||
@@ -41,7 +41,7 @@ class ScreenLogicSensorDescription(
|
||||
):
|
||||
"""Describes a ScreenLogic sensor."""
|
||||
|
||||
value_mod: Callable[[int | str], int | str] | None = None
|
||||
value_mod: Callable[[int | str], int | str | None] | None = None
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
@@ -60,6 +60,18 @@ SUPPORTED_CORE_SENSORS = [
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="air_temperature",
|
||||
),
|
||||
ScreenLogicPushSensorDescription(
|
||||
subscription_code=CODE.STATUS_CHANGED,
|
||||
data_root=(DEVICE.CONTROLLER, GROUP.SENSOR),
|
||||
key=VALUE.STATE,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["ready", "sync", "service"],
|
||||
value_mod=lambda val: (
|
||||
CONTROLLER_STATE(val).name.lower() if val in [1, 2, 3] else None
|
||||
),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
translation_key="controller_state",
|
||||
),
|
||||
]
|
||||
|
||||
SUPPORTED_PUMP_SENSORS = [
|
||||
@@ -344,7 +356,7 @@ class ScreenLogicSensor(ScreenLogicEntity, SensorEntity):
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | int | float:
|
||||
def native_value(self) -> str | int | float | None:
|
||||
"""State of the sensor."""
|
||||
val = self.entity_data[ATTR.VALUE]
|
||||
value_mod = self.entity_description.value_mod
|
||||
|
||||
@@ -184,6 +184,14 @@
|
||||
"air_temperature": {
|
||||
"name": "Air temperature"
|
||||
},
|
||||
"controller_state": {
|
||||
"name": "Controller state",
|
||||
"state": {
|
||||
"ready": "Ready",
|
||||
"sync": "Sync",
|
||||
"service": "Service"
|
||||
}
|
||||
},
|
||||
"chem_now": {
|
||||
"name": "{chem} now"
|
||||
},
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pysensibo.model import MotionSensor, SensiboDevice
|
||||
@@ -18,6 +19,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import SensiboConfigEntry
|
||||
from .const import LOGGER
|
||||
from .coordinator import SensiboDataUpdateCoordinator
|
||||
from .entity import SensiboDeviceBaseEntity, SensiboMotionBaseEntity
|
||||
|
||||
@@ -122,32 +124,55 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
entities: list[SensiboMotionSensor | SensiboDeviceSensor] = []
|
||||
added_devices: set[str] = set()
|
||||
|
||||
for device_id, device_data in coordinator.data.parsed.items():
|
||||
if device_data.motion_sensors:
|
||||
def _add_remove_devices() -> None:
|
||||
"""Handle additions of devices and sensors."""
|
||||
entities: list[SensiboMotionSensor | SensiboDeviceSensor] = []
|
||||
nonlocal added_devices
|
||||
new_devices, remove_devices, added_devices = coordinator.get_devices(
|
||||
added_devices
|
||||
)
|
||||
|
||||
if LOGGER.isEnabledFor(logging.DEBUG):
|
||||
LOGGER.debug(
|
||||
"New devices: %s, Removed devices: %s, Existing devices: %s",
|
||||
new_devices,
|
||||
remove_devices,
|
||||
added_devices,
|
||||
)
|
||||
|
||||
if new_devices:
|
||||
entities.extend(
|
||||
SensiboMotionSensor(
|
||||
coordinator, device_id, sensor_id, sensor_data, description
|
||||
)
|
||||
for device_id, device_data in coordinator.data.parsed.items()
|
||||
if device_data.motion_sensors
|
||||
for sensor_id, sensor_data in device_data.motion_sensors.items()
|
||||
if sensor_id in new_devices
|
||||
for description in MOTION_SENSOR_TYPES
|
||||
)
|
||||
entities.extend(
|
||||
SensiboDeviceSensor(coordinator, device_id, description)
|
||||
for description in MOTION_DEVICE_SENSOR_TYPES
|
||||
for device_id, device_data in coordinator.data.parsed.items()
|
||||
if device_data.motion_sensors
|
||||
)
|
||||
entities.extend(
|
||||
SensiboDeviceSensor(coordinator, device_id, description)
|
||||
for device_id, device_data in coordinator.data.parsed.items()
|
||||
for description in DESCRIPTION_BY_MODELS.get(
|
||||
device_data.model, DEVICE_SENSOR_TYPES
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
entities.extend(
|
||||
SensiboDeviceSensor(coordinator, device_id, description)
|
||||
for device_id, device_data in coordinator.data.parsed.items()
|
||||
if device_data.motion_sensors and device_id in new_devices
|
||||
for description in MOTION_DEVICE_SENSOR_TYPES
|
||||
)
|
||||
entities.extend(
|
||||
SensiboDeviceSensor(coordinator, device_id, description)
|
||||
for device_id, device_data in coordinator.data.parsed.items()
|
||||
if device_id in new_devices
|
||||
for description in DESCRIPTION_BY_MODELS.get(
|
||||
device_data.model, DEVICE_SENSOR_TYPES
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
entry.async_on_unload(coordinator.async_add_listener(_add_remove_devices))
|
||||
_add_remove_devices()
|
||||
|
||||
|
||||
class SensiboMotionSensor(SensiboMotionBaseEntity, BinarySensorEntity):
|
||||
|
||||
@@ -41,10 +41,22 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
SensiboDeviceButton(coordinator, device_id, DEVICE_BUTTON_TYPES)
|
||||
for device_id, device_data in coordinator.data.parsed.items()
|
||||
)
|
||||
added_devices: set[str] = set()
|
||||
|
||||
def _add_remove_devices() -> None:
|
||||
"""Handle additions of devices and sensors."""
|
||||
nonlocal added_devices
|
||||
new_devices, _, added_devices = coordinator.get_devices(added_devices)
|
||||
|
||||
if new_devices:
|
||||
async_add_entities(
|
||||
SensiboDeviceButton(coordinator, device_id, DEVICE_BUTTON_TYPES)
|
||||
for device_id in coordinator.data.parsed
|
||||
if device_id in new_devices
|
||||
)
|
||||
|
||||
entry.async_on_unload(coordinator.async_add_listener(_add_remove_devices))
|
||||
_add_remove_devices()
|
||||
|
||||
|
||||
class SensiboDeviceButton(SensiboDeviceBaseEntity, ButtonEntity):
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from bisect import bisect_left
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -144,12 +144,22 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
entities = [
|
||||
SensiboClimate(coordinator, device_id)
|
||||
for device_id, device_data in coordinator.data.parsed.items()
|
||||
]
|
||||
added_devices: set[str] = set()
|
||||
|
||||
async_add_entities(entities)
|
||||
def _add_remove_devices() -> None:
|
||||
"""Handle additions of devices and sensors."""
|
||||
nonlocal added_devices
|
||||
new_devices, _, added_devices = coordinator.get_devices(added_devices)
|
||||
|
||||
if new_devices:
|
||||
async_add_entities(
|
||||
SensiboClimate(coordinator, device_id)
|
||||
for device_id in coordinator.data.parsed
|
||||
if device_id in new_devices
|
||||
)
|
||||
|
||||
entry.async_on_unload(coordinator.async_add_listener(_add_remove_devices))
|
||||
_add_remove_devices()
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(
|
||||
@@ -199,7 +209,7 @@ async def async_setup_entry(
|
||||
vol.Required(ATTR_LOW_TEMPERATURE_THRESHOLD): vol.Coerce(float),
|
||||
vol.Required(ATTR_LOW_TEMPERATURE_STATE): dict,
|
||||
vol.Required(ATTR_SMART_TYPE): vol.In(
|
||||
["temperature", "feelsLike", "humidity"]
|
||||
["temperature", "feelslike", "humidity"]
|
||||
),
|
||||
},
|
||||
"async_enable_climate_react",
|
||||
@@ -255,8 +265,8 @@ class SensiboClimate(SensiboDeviceBaseEntity, ClimateEntity):
|
||||
@property
|
||||
def hvac_modes(self) -> list[HVACMode]:
|
||||
"""Return the list of available hvac operation modes."""
|
||||
if not self.device_data.hvac_modes:
|
||||
return [HVACMode.OFF]
|
||||
if TYPE_CHECKING:
|
||||
assert self.device_data.hvac_modes
|
||||
return [SENSIBO_TO_HA[mode] for mode in self.device_data.hvac_modes]
|
||||
|
||||
@property
|
||||
|
||||
@@ -12,6 +12,7 @@ from pysensibo.model import SensiboData
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
@@ -48,6 +49,25 @@ class SensiboDataUpdateCoordinator(DataUpdateCoordinator[SensiboData]):
|
||||
session=async_get_clientsession(hass),
|
||||
timeout=TIMEOUT,
|
||||
)
|
||||
self.previous_devices: set[str] = set()
|
||||
|
||||
def get_devices(
|
||||
self, added_devices: set[str]
|
||||
) -> tuple[set[str], set[str], set[str]]:
|
||||
"""Addition and removal of devices."""
|
||||
data = self.data
|
||||
motion_sensors = {
|
||||
sensor_id
|
||||
for device_data in data.parsed.values()
|
||||
if device_data.motion_sensors
|
||||
for sensor_id in device_data.motion_sensors
|
||||
}
|
||||
devices: set[str] = set(data.parsed)
|
||||
new_devices: set[str] = motion_sensors | devices - added_devices
|
||||
remove_devices = added_devices - devices - motion_sensors
|
||||
added_devices = (added_devices - remove_devices) | new_devices
|
||||
|
||||
return (new_devices, remove_devices, added_devices)
|
||||
|
||||
async def _async_update_data(self) -> SensiboData:
|
||||
"""Fetch data from Sensibo."""
|
||||
@@ -67,4 +87,23 @@ class SensiboDataUpdateCoordinator(DataUpdateCoordinator[SensiboData]):
|
||||
|
||||
if not data.raw:
|
||||
raise UpdateFailed(translation_domain=DOMAIN, translation_key="no_data")
|
||||
|
||||
current_devices = set(data.parsed)
|
||||
for device_data in data.parsed.values():
|
||||
if device_data.motion_sensors:
|
||||
for motion_sensor_id in device_data.motion_sensors:
|
||||
current_devices.add(motion_sensor_id)
|
||||
|
||||
if stale_devices := self.previous_devices - current_devices:
|
||||
LOGGER.debug("Removing stale devices: %s", stale_devices)
|
||||
device_registry = dr.async_get(self.hass)
|
||||
for _id in stale_devices:
|
||||
device = device_registry.async_get_device(identifiers={(DOMAIN, _id)})
|
||||
if device:
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=self.config_entry.entry_id,
|
||||
)
|
||||
self.previous_devices = current_devices
|
||||
|
||||
return data
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user