forked from home-assistant/core
Compare commits
80 Commits
2025.1.0b2
...
2025.1.0b7
| Author | SHA1 | Date | |
|---|---|---|---|
| 47190e4ac1 | |||
| 7fa1983da0 | |||
| 9b906e94c7 | |||
| 5ac4d5bef7 | |||
| 995e222959 | |||
| 61ac8e7e8c | |||
| 67ec71031d | |||
| 59f866bcf7 | |||
| d75d970fc7 | |||
| 0a13516ddd | |||
| 21aca3c146 | |||
| faf9c2ee40 | |||
| e89a1da462 | |||
| 8ace126d9f | |||
| ca6bae6b15 | |||
| c9ba267fec | |||
| 0e79c17cb8 | |||
| 4cb413521d | |||
| f97439eaab | |||
| 568b637dc5 | |||
| 3a8f71a64a | |||
| fea3dfda94 | |||
| 554cdd1784 | |||
| ce7a0650e4 | |||
| 5895aa4cde | |||
| bd5477729a | |||
| 2e21ac7001 | |||
| ab6394b26c | |||
| 0ae4a9a911 | |||
| f709989717 | |||
| 952363eca3 | |||
| a7995e0093 | |||
| 1064ef9dc6 | |||
| c2f06fbd47 | |||
| a36fd09644 | |||
| b89995a79f | |||
| c908f823c5 | |||
| 229c32b0da | |||
| e303a9a2b5 | |||
| 54fa30c2b8 | |||
| fbd6cf7244 | |||
| c10175e25c | |||
| 82f0e8cc19 | |||
| 623e1b08b8 | |||
| 0c73251004 | |||
| d9057fc43e | |||
| 077c9e62b4 | |||
| 7456ce1c01 | |||
| a627fa70a7 | |||
| c402eaec3f | |||
| ea51ecd384 | |||
| 0873d27d7b | |||
| 45fd7fb6d5 | |||
| e22685640c | |||
| 5756166545 | |||
| 2f8a92c725 | |||
| cf9ccc6fb4 | |||
| b05b9b9a33 | |||
| 352d5d14a3 | |||
| 52e47f55c8 | |||
| 0470bff9a2 | |||
| a38839b420 | |||
| 394b2be40a | |||
| 291dd6dc66 | |||
| ef87366346 | |||
| bd243f68a4 | |||
| 951baa3972 | |||
| 1874eec8b3 | |||
| 3120a90f26 | |||
| 7032361bf5 | |||
| bd786b53ee | |||
| f6a9cd38c0 | |||
| 1a909d3a8a | |||
| b84ae2abc3 | |||
| 15b80c59fc | |||
| c11bdcc949 | |||
| 1957ab1ccf | |||
| ef2af44795 | |||
| f0e8360401 | |||
| 03fb136218 |
@@ -89,7 +89,7 @@ from .helpers import (
|
||||
)
|
||||
from .helpers.dispatcher import async_dispatcher_send_internal
|
||||
from .helpers.storage import get_internal_store_manager
|
||||
from .helpers.system_info import async_get_system_info, is_official_image
|
||||
from .helpers.system_info import async_get_system_info
|
||||
from .helpers.typing import ConfigType
|
||||
from .setup import (
|
||||
# _setup_started is marked as protected to make it clear
|
||||
@@ -106,6 +106,7 @@ from .util.async_ import create_eager_task
|
||||
from .util.hass_dict import HassKey
|
||||
from .util.logging import async_activate_log_queue_handler
|
||||
from .util.package import async_get_user_site, is_docker_env, is_virtual_env
|
||||
from .util.system_info import is_official_image
|
||||
|
||||
with contextlib.suppress(ImportError):
|
||||
# Ensure anyio backend is imported to avoid it being imported in the event loop
|
||||
|
||||
@@ -26,5 +26,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioacaia"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioacaia==0.1.11"]
|
||||
"requirements": ["aioacaia==0.1.12"]
|
||||
}
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["python_homeassistant_analytics"],
|
||||
"requirements": ["python-homeassistant-analytics==0.8.0"],
|
||||
"requirements": ["python-homeassistant-analytics==0.8.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -156,7 +156,12 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# and one of them, which could end up being in discovery_info.host, is from a
|
||||
# different device. If any of the discovery_info.ip_addresses matches the
|
||||
# existing host, don't update the host.
|
||||
if existing_config_entry and len(discovery_info.ip_addresses) > 1:
|
||||
if (
|
||||
existing_config_entry
|
||||
# Ignored entries don't have host
|
||||
and CONF_HOST in existing_config_entry.data
|
||||
and len(discovery_info.ip_addresses) > 1
|
||||
):
|
||||
existing_host = existing_config_entry.data[CONF_HOST]
|
||||
if existing_host != self.host:
|
||||
if existing_host in [
|
||||
|
||||
@@ -98,7 +98,6 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
VERSION = 1
|
||||
|
||||
scan_filter: str | None = None
|
||||
all_identifiers: set[str]
|
||||
atv: BaseConfig | None = None
|
||||
atv_identifiers: list[str] | None = None
|
||||
_host: str # host in zeroconf discovery info, should not be accessed by other flows
|
||||
@@ -118,6 +117,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a new AppleTVConfigFlow."""
|
||||
self.credentials: dict[int, str | None] = {} # Protocol -> credentials
|
||||
self.all_identifiers: set[str] = set()
|
||||
|
||||
@property
|
||||
def device_identifier(self) -> str | None:
|
||||
|
||||
@@ -120,6 +120,8 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
"""Wait for the client to be ready."""
|
||||
|
||||
if not self.data or Attribute.MAC_ADDRESS not in self.data:
|
||||
await self.client.read_mac_address()
|
||||
|
||||
data = await self.client.wait_for_response(
|
||||
FunctionalDomain.IDENTIFICATION, 2, WAIT_TIMEOUT
|
||||
)
|
||||
@@ -130,12 +132,9 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
|
||||
return False
|
||||
|
||||
if not self.data or Attribute.NAME not in self.data:
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.IDENTIFICATION, 4, WAIT_TIMEOUT
|
||||
)
|
||||
|
||||
if not self.data or Attribute.THERMOSTAT_MODES not in self.data:
|
||||
await self.client.read_thermostat_iaq_available()
|
||||
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.CONTROL, 7, WAIT_TIMEOUT
|
||||
)
|
||||
@@ -144,10 +143,16 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
not self.data
|
||||
or Attribute.INDOOR_TEMPERATURE_CONTROLLING_SENSOR_STATUS not in self.data
|
||||
):
|
||||
await self.client.read_sensors()
|
||||
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.SENSORS, 2, WAIT_TIMEOUT
|
||||
)
|
||||
|
||||
await self.client.read_thermostat_status()
|
||||
|
||||
await self.client.read_iaq_status()
|
||||
|
||||
await ready_callback(True)
|
||||
|
||||
return True
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyaprilaire"],
|
||||
"requirements": ["pyaprilaire==0.7.4"]
|
||||
"requirements": ["pyaprilaire==0.7.7"]
|
||||
}
|
||||
|
||||
@@ -75,7 +75,7 @@ class AudioBuffer:
|
||||
class VoiceCommandSegmenter:
|
||||
"""Segments an audio stream into voice commands."""
|
||||
|
||||
speech_seconds: float = 0.1
|
||||
speech_seconds: float = 0.3
|
||||
"""Seconds of speech before voice command has started."""
|
||||
|
||||
command_seconds: float = 1.0
|
||||
|
||||
@@ -21,8 +21,10 @@ from .manager import (
|
||||
BackupManager,
|
||||
BackupPlatformProtocol,
|
||||
BackupReaderWriter,
|
||||
BackupReaderWriterError,
|
||||
CoreBackupReaderWriter,
|
||||
CreateBackupEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
NewBackup,
|
||||
WrittenBackup,
|
||||
@@ -39,8 +41,10 @@ __all__ = [
|
||||
"BackupAgentPlatformProtocol",
|
||||
"BackupPlatformProtocol",
|
||||
"BackupReaderWriter",
|
||||
"BackupReaderWriterError",
|
||||
"CreateBackupEvent",
|
||||
"Folder",
|
||||
"IncorrectPasswordError",
|
||||
"LocalBackupAgent",
|
||||
"NewBackup",
|
||||
"WrittenBackup",
|
||||
|
||||
@@ -17,7 +17,7 @@ from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import LOGGER
|
||||
from .models import Folder
|
||||
from .models import BackupManagerError, Folder
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .manager import BackupManager, ManagerBackup
|
||||
@@ -318,9 +318,9 @@ class BackupSchedule:
|
||||
password=config_data.create_backup.password,
|
||||
with_automatic_settings=True,
|
||||
)
|
||||
except BackupManagerError as err:
|
||||
LOGGER.error("Error creating backup: %s", err)
|
||||
except Exception: # noqa: BLE001
|
||||
# another more specific exception will be added
|
||||
# and handled in the future
|
||||
LOGGER.exception("Unexpected error creating automatic backup")
|
||||
|
||||
manager.remove_next_backup_event = async_track_point_in_time(
|
||||
|
||||
@@ -46,15 +46,11 @@ from .const import (
|
||||
EXCLUDE_FROM_BACKUP,
|
||||
LOGGER,
|
||||
)
|
||||
from .models import AgentBackup, Folder
|
||||
from .models import AgentBackup, BackupManagerError, Folder
|
||||
from .store import BackupStore
|
||||
from .util import make_backup_dir, read_backup, validate_password
|
||||
|
||||
|
||||
class IncorrectPasswordError(HomeAssistantError):
|
||||
"""Raised when the password is incorrect."""
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
class NewBackup:
|
||||
"""New backup class."""
|
||||
@@ -245,6 +241,14 @@ class BackupReaderWriter(abc.ABC):
|
||||
"""Restore a backup."""
|
||||
|
||||
|
||||
class BackupReaderWriterError(HomeAssistantError):
|
||||
"""Backup reader/writer error."""
|
||||
|
||||
|
||||
class IncorrectPasswordError(BackupReaderWriterError):
|
||||
"""Raised when the password is incorrect."""
|
||||
|
||||
|
||||
class BackupManager:
|
||||
"""Define the format that backup managers can have."""
|
||||
|
||||
@@ -373,7 +377,9 @@ class BackupManager:
|
||||
)
|
||||
for result in pre_backup_results:
|
||||
if isinstance(result, Exception):
|
||||
raise result
|
||||
raise BackupManagerError(
|
||||
f"Error during pre-backup: {result}"
|
||||
) from result
|
||||
|
||||
async def async_post_backup_actions(self) -> None:
|
||||
"""Perform post backup actions."""
|
||||
@@ -386,7 +392,9 @@ class BackupManager:
|
||||
)
|
||||
for result in post_backup_results:
|
||||
if isinstance(result, Exception):
|
||||
raise result
|
||||
raise BackupManagerError(
|
||||
f"Error during post-backup: {result}"
|
||||
) from result
|
||||
|
||||
async def load_platforms(self) -> None:
|
||||
"""Load backup platforms."""
|
||||
@@ -422,11 +430,21 @@ class BackupManager:
|
||||
return_exceptions=True,
|
||||
)
|
||||
for idx, result in enumerate(sync_backup_results):
|
||||
if isinstance(result, Exception):
|
||||
if isinstance(result, BackupReaderWriterError):
|
||||
# writer errors will affect all agents
|
||||
# no point in continuing
|
||||
raise BackupManagerError(str(result)) from result
|
||||
if isinstance(result, BackupAgentError):
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
LOGGER.exception(
|
||||
"Error during backup upload - %s", result, exc_info=result
|
||||
)
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
# trap bugs from agents
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
LOGGER.error("Unexpected error: %s", result, exc_info=result)
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
|
||||
return agent_errors
|
||||
|
||||
async def async_get_backups(
|
||||
@@ -449,7 +467,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
for agent_backup in result:
|
||||
if (backup_id := agent_backup.backup_id) not in backups:
|
||||
if known_backup := self.known_backups.get(backup_id):
|
||||
@@ -499,7 +517,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
if not result:
|
||||
continue
|
||||
if backup is None:
|
||||
@@ -563,7 +581,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
|
||||
if not agent_errors:
|
||||
self.known_backups.remove(backup_id)
|
||||
@@ -578,7 +596,7 @@ class BackupManager:
|
||||
) -> None:
|
||||
"""Receive and store a backup file from upload."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
self.async_on_backup_event(
|
||||
ReceiveBackupEvent(stage=None, state=ReceiveBackupState.IN_PROGRESS)
|
||||
)
|
||||
@@ -652,6 +670,7 @@ class BackupManager:
|
||||
include_homeassistant=include_homeassistant,
|
||||
name=name,
|
||||
password=password,
|
||||
raise_task_error=True,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
assert self._backup_finish_task
|
||||
@@ -669,11 +688,12 @@ class BackupManager:
|
||||
include_homeassistant: bool,
|
||||
name: str | None,
|
||||
password: str | None,
|
||||
raise_task_error: bool = False,
|
||||
with_automatic_settings: bool = False,
|
||||
) -> NewBackup:
|
||||
"""Initiate generating a backup."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
|
||||
if with_automatic_settings:
|
||||
self.config.data.last_attempted_automatic_backup = dt_util.now()
|
||||
@@ -692,6 +712,7 @@ class BackupManager:
|
||||
include_homeassistant=include_homeassistant,
|
||||
name=name,
|
||||
password=password,
|
||||
raise_task_error=raise_task_error,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
except Exception:
|
||||
@@ -714,57 +735,83 @@ class BackupManager:
|
||||
include_homeassistant: bool,
|
||||
name: str | None,
|
||||
password: str | None,
|
||||
raise_task_error: bool,
|
||||
with_automatic_settings: bool,
|
||||
) -> NewBackup:
|
||||
"""Initiate generating a backup."""
|
||||
if not agent_ids:
|
||||
raise HomeAssistantError("At least one agent must be selected")
|
||||
if any(agent_id not in self.backup_agents for agent_id in agent_ids):
|
||||
raise HomeAssistantError("Invalid agent selected")
|
||||
raise BackupManagerError("At least one agent must be selected")
|
||||
if invalid_agents := [
|
||||
agent_id for agent_id in agent_ids if agent_id not in self.backup_agents
|
||||
]:
|
||||
raise BackupManagerError(f"Invalid agents selected: {invalid_agents}")
|
||||
if include_all_addons and include_addons:
|
||||
raise HomeAssistantError(
|
||||
raise BackupManagerError(
|
||||
"Cannot include all addons and specify specific addons"
|
||||
)
|
||||
|
||||
backup_name = (
|
||||
name
|
||||
or f"{"Automatic" if with_automatic_settings else "Custom"} {HAVERSION}"
|
||||
or f"{"Automatic" if with_automatic_settings else "Custom"} backup {HAVERSION}"
|
||||
)
|
||||
new_backup, self._backup_task = await self._reader_writer.async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
backup_name=backup_name,
|
||||
extra_metadata={
|
||||
"instance_id": await instance_id.async_get(self.hass),
|
||||
"with_automatic_settings": with_automatic_settings,
|
||||
},
|
||||
include_addons=include_addons,
|
||||
include_all_addons=include_all_addons,
|
||||
include_database=include_database,
|
||||
include_folders=include_folders,
|
||||
include_homeassistant=include_homeassistant,
|
||||
on_progress=self.async_on_backup_event,
|
||||
password=password,
|
||||
)
|
||||
self._backup_finish_task = self.hass.async_create_task(
|
||||
|
||||
try:
|
||||
(
|
||||
new_backup,
|
||||
self._backup_task,
|
||||
) = await self._reader_writer.async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
backup_name=backup_name,
|
||||
extra_metadata={
|
||||
"instance_id": await instance_id.async_get(self.hass),
|
||||
"with_automatic_settings": with_automatic_settings,
|
||||
},
|
||||
include_addons=include_addons,
|
||||
include_all_addons=include_all_addons,
|
||||
include_database=include_database,
|
||||
include_folders=include_folders,
|
||||
include_homeassistant=include_homeassistant,
|
||||
on_progress=self.async_on_backup_event,
|
||||
password=password,
|
||||
)
|
||||
except BackupReaderWriterError as err:
|
||||
raise BackupManagerError(str(err)) from err
|
||||
|
||||
backup_finish_task = self._backup_finish_task = self.hass.async_create_task(
|
||||
self._async_finish_backup(agent_ids, with_automatic_settings),
|
||||
name="backup_manager_finish_backup",
|
||||
)
|
||||
if not raise_task_error:
|
||||
|
||||
def log_finish_task_error(task: asyncio.Task[None]) -> None:
|
||||
if task.done() and not task.cancelled() and (err := task.exception()):
|
||||
if isinstance(err, BackupManagerError):
|
||||
LOGGER.error("Error creating backup: %s", err)
|
||||
else:
|
||||
LOGGER.error("Unexpected error: %s", err, exc_info=err)
|
||||
|
||||
backup_finish_task.add_done_callback(log_finish_task_error)
|
||||
|
||||
return new_backup
|
||||
|
||||
async def _async_finish_backup(
|
||||
self, agent_ids: list[str], with_automatic_settings: bool
|
||||
) -> None:
|
||||
"""Finish a backup."""
|
||||
if TYPE_CHECKING:
|
||||
assert self._backup_task is not None
|
||||
try:
|
||||
written_backup = await self._backup_task
|
||||
except Exception as err: # noqa: BLE001
|
||||
LOGGER.debug("Generating backup failed", exc_info=err)
|
||||
except Exception as err:
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.FAILED)
|
||||
)
|
||||
if with_automatic_settings:
|
||||
self._update_issue_backup_failed()
|
||||
|
||||
if isinstance(err, BackupReaderWriterError):
|
||||
raise BackupManagerError(str(err)) from err
|
||||
raise # unexpected error
|
||||
else:
|
||||
LOGGER.debug(
|
||||
"Generated new backup with backup_id %s, uploading to agents %s",
|
||||
@@ -777,25 +824,47 @@ class BackupManager:
|
||||
state=CreateBackupState.IN_PROGRESS,
|
||||
)
|
||||
)
|
||||
agent_errors = await self._async_upload_backup(
|
||||
backup=written_backup.backup,
|
||||
agent_ids=agent_ids,
|
||||
open_stream=written_backup.open_stream,
|
||||
)
|
||||
await written_backup.release_stream()
|
||||
if with_automatic_settings:
|
||||
# create backup was successful, update last_completed_automatic_backup
|
||||
self.config.data.last_completed_automatic_backup = dt_util.now()
|
||||
self.store.save()
|
||||
self._update_issue_after_agent_upload(agent_errors)
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
|
||||
try:
|
||||
agent_errors = await self._async_upload_backup(
|
||||
backup=written_backup.backup,
|
||||
agent_ids=agent_ids,
|
||||
open_stream=written_backup.open_stream,
|
||||
)
|
||||
except BaseException:
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.FAILED)
|
||||
)
|
||||
raise # manager or unexpected error
|
||||
finally:
|
||||
try:
|
||||
await written_backup.release_stream()
|
||||
except Exception:
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.FAILED)
|
||||
)
|
||||
raise
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
if agent_errors:
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.FAILED)
|
||||
)
|
||||
else:
|
||||
if with_automatic_settings:
|
||||
# create backup was successful, update last_completed_automatic_backup
|
||||
self.config.data.last_completed_automatic_backup = dt_util.now()
|
||||
self.store.save()
|
||||
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED)
|
||||
)
|
||||
|
||||
if with_automatic_settings:
|
||||
self._update_issue_after_agent_upload(agent_errors)
|
||||
# delete old backups more numerous than copies
|
||||
# try this regardless of agent errors above
|
||||
await delete_backups_exceeding_configured_count(self)
|
||||
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED)
|
||||
)
|
||||
finally:
|
||||
self._backup_task = None
|
||||
self._backup_finish_task = None
|
||||
@@ -814,7 +883,7 @@ class BackupManager:
|
||||
) -> None:
|
||||
"""Initiate restoring a backup."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.IN_PROGRESS)
|
||||
@@ -829,6 +898,9 @@ class BackupManager:
|
||||
restore_folders=restore_folders,
|
||||
restore_homeassistant=restore_homeassistant,
|
||||
)
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.COMPLETED)
|
||||
)
|
||||
except Exception:
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.FAILED)
|
||||
@@ -851,7 +923,7 @@ class BackupManager:
|
||||
"""Initiate restoring a backup."""
|
||||
agent = self.backup_agents[agent_id]
|
||||
if not await agent.async_get_backup(backup_id):
|
||||
raise HomeAssistantError(
|
||||
raise BackupManagerError(
|
||||
f"Backup {backup_id} not found in agent {agent_id}"
|
||||
)
|
||||
|
||||
@@ -1024,11 +1096,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
backup_id = _generate_backup_id(date_str, backup_name)
|
||||
|
||||
if include_addons or include_all_addons or include_folders:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Addons and folders are not supported by core backup"
|
||||
)
|
||||
if not include_homeassistant:
|
||||
raise HomeAssistantError("Home Assistant must be included in backup")
|
||||
raise BackupReaderWriterError("Home Assistant must be included in backup")
|
||||
|
||||
backup_task = self._hass.async_create_task(
|
||||
self._async_create_backup(
|
||||
@@ -1099,6 +1171,13 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
password,
|
||||
local_agent_tar_file_path,
|
||||
)
|
||||
except (BackupManagerError, OSError, tarfile.TarError, ValueError) as err:
|
||||
# BackupManagerError from async_pre_backup_actions
|
||||
# OSError from file operations
|
||||
# TarError from tarfile
|
||||
# ValueError from json_bytes
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
else:
|
||||
backup = AgentBackup(
|
||||
addons=[],
|
||||
backup_id=backup_id,
|
||||
@@ -1116,12 +1195,15 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
async_add_executor_job = self._hass.async_add_executor_job
|
||||
|
||||
async def send_backup() -> AsyncIterator[bytes]:
|
||||
f = await async_add_executor_job(tar_file_path.open, "rb")
|
||||
try:
|
||||
while chunk := await async_add_executor_job(f.read, 2**20):
|
||||
yield chunk
|
||||
finally:
|
||||
await async_add_executor_job(f.close)
|
||||
f = await async_add_executor_job(tar_file_path.open, "rb")
|
||||
try:
|
||||
while chunk := await async_add_executor_job(f.read, 2**20):
|
||||
yield chunk
|
||||
finally:
|
||||
await async_add_executor_job(f.close)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
async def open_backup() -> AsyncIterator[bytes]:
|
||||
return send_backup()
|
||||
@@ -1129,14 +1211,20 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
async def remove_backup() -> None:
|
||||
if local_agent_tar_file_path:
|
||||
return
|
||||
await async_add_executor_job(tar_file_path.unlink, True)
|
||||
try:
|
||||
await async_add_executor_job(tar_file_path.unlink, True)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
return WrittenBackup(
|
||||
backup=backup, open_stream=open_backup, release_stream=remove_backup
|
||||
)
|
||||
finally:
|
||||
# Inform integrations the backup is done
|
||||
await manager.async_post_backup_actions()
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
def _mkdir_and_generate_backup_contents(
|
||||
self,
|
||||
@@ -1249,11 +1337,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
"""
|
||||
|
||||
if restore_addons or restore_folders:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Addons and folders are not supported in core restore"
|
||||
)
|
||||
if not restore_homeassistant and not restore_database:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Home Assistant or database must be included in restore"
|
||||
)
|
||||
|
||||
|
||||
@@ -6,6 +6,8 @@ from dataclasses import asdict, dataclass
|
||||
from enum import StrEnum
|
||||
from typing import Any, Self
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AddonInfo:
|
||||
@@ -67,3 +69,7 @@ class AgentBackup:
|
||||
protected=data["protected"],
|
||||
size=data["size"],
|
||||
)
|
||||
|
||||
|
||||
class BackupManagerError(HomeAssistantError):
|
||||
"""Backup manager error."""
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
"description": "The automatic backup could not be created. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_upload_agents": {
|
||||
"title": "Automatic backup could not be uploaded to agents",
|
||||
"description": "The automatic backup could not be uploaded to agents {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
"title": "Automatic backup could not be uploaded to the configured locations",
|
||||
"description": "The automatic backup could not be uploaded to the configured locations {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"requirements": ["aiocomelit==0.9.1"]
|
||||
"requirements": ["aiocomelit==0.10.1"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.20"]
|
||||
"requirements": ["hassil==2.1.0", "home-assistant-intents==2025.1.1"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==10.0.1"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==10.1.0"]
|
||||
}
|
||||
|
||||
@@ -6,11 +6,16 @@ from dataclasses import dataclass
|
||||
|
||||
from elevenlabs import AsyncElevenLabs, Model
|
||||
from elevenlabs.core import ApiError
|
||||
from httpx import ConnectError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryError,
|
||||
ConfigEntryNotReady,
|
||||
)
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
|
||||
from .const import CONF_MODEL
|
||||
@@ -48,6 +53,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ElevenLabsConfigEntry) -
|
||||
model_id = entry.options[CONF_MODEL]
|
||||
try:
|
||||
model = await get_model_by_id(client, model_id)
|
||||
except ConnectError as err:
|
||||
raise ConfigEntryNotReady("Failed to connect") from err
|
||||
except ApiError as err:
|
||||
raise ConfigEntryAuthFailed("Auth failed") from err
|
||||
|
||||
|
||||
@@ -151,7 +151,9 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
port=self._panel_direct_port,
|
||||
)
|
||||
)
|
||||
ssl_context = build_direct_ssl_context(cadata=self._panel_direct_ssl_cert)
|
||||
ssl_context = await self.hass.async_add_executor_job(
|
||||
build_direct_ssl_context, self._panel_direct_ssl_cert
|
||||
)
|
||||
|
||||
# Attempt the connection to make sure the pin works. Also, take the chance to retrieve the panel ID via APIs.
|
||||
client_api_url = get_direct_api_url(
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/elmax",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["elmax_api"],
|
||||
"requirements": ["elmax-api==0.0.6.3"],
|
||||
"requirements": ["elmax-api==0.0.6.4rc0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_elmax-ssl._tcp.local."
|
||||
|
||||
@@ -14,6 +14,7 @@ import feedparser
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import dt as dt_util
|
||||
@@ -101,7 +102,11 @@ class FeedReaderCoordinator(
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the feed manager."""
|
||||
feed = await self._async_fetch_feed()
|
||||
try:
|
||||
feed = await self._async_fetch_feed()
|
||||
except UpdateFailed as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
self.logger.debug("Feed data fetched from %s : %s", self.url, feed["feed"])
|
||||
if feed_author := feed["feed"].get("author"):
|
||||
self.feed_author = html.unescape(feed_author)
|
||||
|
||||
@@ -23,10 +23,10 @@ from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.system_info import is_official_image
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.signal_type import SignalType
|
||||
from homeassistant.util.system_info import is_official_image
|
||||
|
||||
DOMAIN = "ffmpeg"
|
||||
|
||||
|
||||
@@ -20,7 +20,8 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
from .const import CONF_TOKEN_EXPIRY, DOMAIN
|
||||
from .const import CONF_ACCOUNT_ID, CONF_SUPPLY_NODE_REF, CONF_TOKEN_EXPIRY
|
||||
from .coordinator import FlickConfigEntry, FlickElectricDataCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -29,24 +30,67 @@ CONF_ID_TOKEN = "id_token"
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: FlickConfigEntry) -> bool:
|
||||
"""Set up Flick Electric from a config entry."""
|
||||
auth = HassFlickAuth(hass, entry)
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
hass.data[DOMAIN][entry.entry_id] = FlickAPI(auth)
|
||||
coordinator = FlickElectricDataCoordinator(
|
||||
hass, FlickAPI(auth), entry.data[CONF_SUPPLY_NODE_REF]
|
||||
)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: FlickConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
_LOGGER.debug(
|
||||
"Migrating configuration from version %s.%s",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
if config_entry.version > 2:
|
||||
return False
|
||||
|
||||
if config_entry.version == 1:
|
||||
api = FlickAPI(HassFlickAuth(hass, config_entry))
|
||||
|
||||
accounts = await api.getCustomerAccounts()
|
||||
active_accounts = [
|
||||
account for account in accounts if account["status"] == "active"
|
||||
]
|
||||
|
||||
# A single active account can be auto-migrated
|
||||
if (len(active_accounts)) == 1:
|
||||
account = active_accounts[0]
|
||||
|
||||
new_data = {**config_entry.data}
|
||||
new_data[CONF_ACCOUNT_ID] = account["id"]
|
||||
new_data[CONF_SUPPLY_NODE_REF] = account["main_consumer"]["supply_node_ref"]
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry,
|
||||
title=account["address"],
|
||||
unique_id=account["id"],
|
||||
data=new_data,
|
||||
version=2,
|
||||
)
|
||||
return True
|
||||
|
||||
config_entry.async_start_reauth(hass, data={**config_entry.data})
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class HassFlickAuth(AbstractFlickAuth):
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
"""Config Flow for Flick Electric integration."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyflick.authentication import AuthException, SimpleFlickAuth
|
||||
from aiohttp import ClientResponseError
|
||||
from pyflick import FlickAPI
|
||||
from pyflick.authentication import AbstractFlickAuth, SimpleFlickAuth
|
||||
from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET
|
||||
from pyflick.types import APIException, AuthException, CustomerAccount
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import (
|
||||
CONF_CLIENT_ID,
|
||||
CONF_CLIENT_SECRET,
|
||||
@@ -17,12 +21,18 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import CONF_ACCOUNT_ID, CONF_SUPPLY_NODE_REF, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
LOGIN_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
@@ -35,10 +45,13 @@ DATA_SCHEMA = vol.Schema(
|
||||
class FlickConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Flick config flow."""
|
||||
|
||||
VERSION = 1
|
||||
VERSION = 2
|
||||
auth: AbstractFlickAuth
|
||||
accounts: list[CustomerAccount]
|
||||
data: dict[str, Any]
|
||||
|
||||
async def _validate_input(self, user_input):
|
||||
auth = SimpleFlickAuth(
|
||||
async def _validate_auth(self, user_input: Mapping[str, Any]) -> bool:
|
||||
self.auth = SimpleFlickAuth(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
websession=aiohttp_client.async_get_clientsession(self.hass),
|
||||
@@ -48,22 +61,83 @@ class FlickConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
token = await auth.async_get_access_token()
|
||||
except TimeoutError as err:
|
||||
token = await self.auth.async_get_access_token()
|
||||
except (TimeoutError, ClientResponseError) as err:
|
||||
raise CannotConnect from err
|
||||
except AuthException as err:
|
||||
raise InvalidAuth from err
|
||||
|
||||
return token is not None
|
||||
|
||||
async def async_step_select_account(
|
||||
self, user_input: Mapping[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Ask user to select account."""
|
||||
|
||||
errors = {}
|
||||
if user_input is not None and CONF_ACCOUNT_ID in user_input:
|
||||
self.data[CONF_ACCOUNT_ID] = user_input[CONF_ACCOUNT_ID]
|
||||
self.data[CONF_SUPPLY_NODE_REF] = self._get_supply_node_ref(
|
||||
user_input[CONF_ACCOUNT_ID]
|
||||
)
|
||||
try:
|
||||
# Ensure supply node is active
|
||||
await FlickAPI(self.auth).getPricing(self.data[CONF_SUPPLY_NODE_REF])
|
||||
except (APIException, ClientResponseError):
|
||||
errors["base"] = "cannot_connect"
|
||||
except AuthException:
|
||||
# We should never get here as we have a valid token
|
||||
return self.async_abort(reason="no_permissions")
|
||||
else:
|
||||
# Supply node is active
|
||||
return await self._async_create_entry()
|
||||
|
||||
try:
|
||||
self.accounts = await FlickAPI(self.auth).getCustomerAccounts()
|
||||
except (APIException, ClientResponseError):
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
active_accounts = [a for a in self.accounts if a["status"] == "active"]
|
||||
|
||||
if len(active_accounts) == 0:
|
||||
return self.async_abort(reason="no_accounts")
|
||||
|
||||
if len(active_accounts) == 1:
|
||||
self.data[CONF_ACCOUNT_ID] = active_accounts[0]["id"]
|
||||
self.data[CONF_SUPPLY_NODE_REF] = self._get_supply_node_ref(
|
||||
active_accounts[0]["id"]
|
||||
)
|
||||
|
||||
return await self._async_create_entry()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="select_account",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ACCOUNT_ID): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
SelectOptionDict(
|
||||
value=account["id"], label=account["address"]
|
||||
)
|
||||
for account in active_accounts
|
||||
],
|
||||
mode=SelectSelectorMode.LIST,
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
self, user_input: Mapping[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle gathering login info."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
await self._validate_input(user_input)
|
||||
await self._validate_auth(user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
@@ -72,20 +146,61 @@ class FlickConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(
|
||||
f"flick_electric_{user_input[CONF_USERNAME]}"
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=f"Flick Electric: {user_input[CONF_USERNAME]}",
|
||||
data=user_input,
|
||||
)
|
||||
self.data = dict(user_input)
|
||||
return await self.async_step_select_account(user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors
|
||||
step_id="user", data_schema=LOGIN_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, user_input: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle re-authentication."""
|
||||
|
||||
self.data = {**user_input}
|
||||
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
async def _async_create_entry(self) -> ConfigFlowResult:
|
||||
"""Create an entry for the flow."""
|
||||
|
||||
await self.async_set_unique_id(self.data[CONF_ACCOUNT_ID])
|
||||
|
||||
account = self._get_account(self.data[CONF_ACCOUNT_ID])
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
# Migration completed
|
||||
if self._get_reauth_entry().version == 1:
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self._get_reauth_entry(),
|
||||
unique_id=self.unique_id,
|
||||
data=self.data,
|
||||
version=self.VERSION,
|
||||
)
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
unique_id=self.unique_id,
|
||||
title=account["address"],
|
||||
data=self.data,
|
||||
)
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=account["address"],
|
||||
data=self.data,
|
||||
)
|
||||
|
||||
def _get_account(self, account_id: str) -> CustomerAccount:
|
||||
"""Get the account for the account ID."""
|
||||
return next(a for a in self.accounts if a["id"] == account_id)
|
||||
|
||||
def _get_supply_node_ref(self, account_id: str) -> str:
|
||||
"""Get the supply node ref for the account."""
|
||||
return self._get_account(account_id)["main_consumer"][CONF_SUPPLY_NODE_REF]
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
DOMAIN = "flick_electric"
|
||||
|
||||
CONF_TOKEN_EXPIRY = "expires"
|
||||
CONF_ACCOUNT_ID = "account_id"
|
||||
CONF_SUPPLY_NODE_REF = "supply_node_ref"
|
||||
|
||||
ATTR_START_AT = "start_at"
|
||||
ATTR_END_AT = "end_at"
|
||||
|
||||
@@ -0,0 +1,47 @@
|
||||
"""Data Coordinator for Flick Electric."""
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from pyflick import FlickAPI, FlickPrice
|
||||
from pyflick.types import APIException, AuthException
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
type FlickConfigEntry = ConfigEntry[FlickElectricDataCoordinator]
|
||||
|
||||
|
||||
class FlickElectricDataCoordinator(DataUpdateCoordinator[FlickPrice]):
|
||||
"""Coordinator for flick power price."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, api: FlickAPI, supply_node_ref: str
|
||||
) -> None:
|
||||
"""Initialize FlickElectricDataCoordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name="Flick Electric",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
self.supply_node_ref = supply_node_ref
|
||||
self._api = api
|
||||
|
||||
async def _async_update_data(self) -> FlickPrice:
|
||||
"""Fetch pricing data from Flick Electric."""
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
return await self._api.getPricing(self.supply_node_ref)
|
||||
except AuthException as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except (APIException, aiohttp.ClientResponseError) as err:
|
||||
raise UpdateFailed from err
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyflick"],
|
||||
"requirements": ["PyFlick==0.0.2"]
|
||||
"requirements": ["PyFlick==1.1.2"]
|
||||
}
|
||||
|
||||
@@ -1,74 +1,72 @@
|
||||
"""Support for Flick Electric Pricing data."""
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from decimal import Decimal
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyflick import FlickAPI, FlickPrice
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CURRENCY_CENT, UnitOfEnergy
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util.dt import utcnow
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import ATTR_COMPONENTS, ATTR_END_AT, ATTR_START_AT, DOMAIN
|
||||
from .const import ATTR_COMPONENTS, ATTR_END_AT, ATTR_START_AT
|
||||
from .coordinator import FlickConfigEntry, FlickElectricDataCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
hass: HomeAssistant,
|
||||
entry: FlickConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Flick Sensor Setup."""
|
||||
api: FlickAPI = hass.data[DOMAIN][entry.entry_id]
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities([FlickPricingSensor(api)], True)
|
||||
async_add_entities([FlickPricingSensor(coordinator)])
|
||||
|
||||
|
||||
class FlickPricingSensor(SensorEntity):
|
||||
class FlickPricingSensor(CoordinatorEntity[FlickElectricDataCoordinator], SensorEntity):
|
||||
"""Entity object for Flick Electric sensor."""
|
||||
|
||||
_attr_attribution = "Data provided by Flick Electric"
|
||||
_attr_native_unit_of_measurement = f"{CURRENCY_CENT}/{UnitOfEnergy.KILO_WATT_HOUR}"
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "power_price"
|
||||
_attributes: dict[str, Any] = {}
|
||||
|
||||
def __init__(self, api: FlickAPI) -> None:
|
||||
def __init__(self, coordinator: FlickElectricDataCoordinator) -> None:
|
||||
"""Entity object for Flick Electric sensor."""
|
||||
self._api: FlickAPI = api
|
||||
self._price: FlickPrice = None
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.supply_node_ref}_pricing"
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
def native_value(self) -> Decimal:
|
||||
"""Return the state of the sensor."""
|
||||
return self._price.price
|
||||
# The API should return a unit price with quantity of 1.0 when no start/end time is provided
|
||||
if self.coordinator.data.quantity != 1:
|
||||
_LOGGER.warning(
|
||||
"Unexpected quantity for unit price: %s", self.coordinator.data
|
||||
)
|
||||
return self.coordinator.data.cost
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the state attributes."""
|
||||
return self._attributes
|
||||
components: dict[str, Decimal] = {}
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the Flick Pricing data from the web service."""
|
||||
if self._price and self._price.end_at >= utcnow():
|
||||
return # Power price data is still valid
|
||||
|
||||
async with asyncio.timeout(60):
|
||||
self._price = await self._api.getPricing()
|
||||
|
||||
_LOGGER.debug("Pricing data: %s", self._price)
|
||||
|
||||
self._attributes[ATTR_START_AT] = self._price.start_at
|
||||
self._attributes[ATTR_END_AT] = self._price.end_at
|
||||
for component in self._price.components:
|
||||
for component in self.coordinator.data.components:
|
||||
if component.charge_setter not in ATTR_COMPONENTS:
|
||||
_LOGGER.warning("Found unknown component: %s", component.charge_setter)
|
||||
continue
|
||||
|
||||
self._attributes[component.charge_setter] = float(component.value)
|
||||
components[component.charge_setter] = component.value
|
||||
|
||||
return {
|
||||
ATTR_START_AT: self.coordinator.data.start_at,
|
||||
ATTR_END_AT: self.coordinator.data.end_at,
|
||||
**components,
|
||||
}
|
||||
|
||||
@@ -9,6 +9,12 @@
|
||||
"client_id": "Client ID (optional)",
|
||||
"client_secret": "Client Secret (optional)"
|
||||
}
|
||||
},
|
||||
"select_account": {
|
||||
"title": "Select account",
|
||||
"data": {
|
||||
"account_id": "Account"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@@ -17,7 +23,10 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"no_permissions": "Cannot get pricing for this account. Please check user permissions.",
|
||||
"no_accounts": "No services are active on this Flick account"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20241224.0"]
|
||||
"requirements": ["home-assistant-frontend==20250102.0"]
|
||||
}
|
||||
|
||||
@@ -349,7 +349,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the start of the config flow."""
|
||||
errors = {}
|
||||
description_placeholders = {}
|
||||
hass = self.hass
|
||||
if user_input:
|
||||
# Secondary validation because serialised vol can't seem to handle this complexity:
|
||||
@@ -365,8 +364,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
except InvalidStreamException as err:
|
||||
errors[CONF_STREAM_SOURCE] = str(err)
|
||||
if err.details:
|
||||
errors["error_details"] = err.details
|
||||
self.preview_stream = None
|
||||
if not errors:
|
||||
user_input[CONF_CONTENT_TYPE] = still_format
|
||||
@@ -385,8 +382,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# temporary preview for user to check the image
|
||||
self.preview_cam = user_input
|
||||
return await self.async_step_user_confirm()
|
||||
if "error_details" in errors:
|
||||
description_placeholders["error"] = errors.pop("error_details")
|
||||
elif self.user_input:
|
||||
user_input = self.user_input
|
||||
else:
|
||||
@@ -394,7 +389,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=build_schema(user_input),
|
||||
description_placeholders=description_placeholders,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -412,7 +406,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self.title, data={}, options=self.user_input
|
||||
)
|
||||
register_preview(self.hass)
|
||||
preview_url = f"/api/generic/preview_flow_image/{self.flow_id}?t={datetime.now().isoformat()}"
|
||||
return self.async_show_form(
|
||||
step_id="user_confirm",
|
||||
data_schema=vol.Schema(
|
||||
@@ -420,7 +413,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
vol.Required(CONF_CONFIRMED_OK, default=False): bool,
|
||||
}
|
||||
),
|
||||
description_placeholders={"preview_url": preview_url},
|
||||
errors=None,
|
||||
preview="generic_camera",
|
||||
)
|
||||
@@ -437,6 +429,7 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
def __init__(self) -> None:
|
||||
"""Initialize Generic IP Camera options flow."""
|
||||
self.preview_cam: dict[str, Any] = {}
|
||||
self.preview_stream: Stream | None = None
|
||||
self.user_input: dict[str, Any] = {}
|
||||
|
||||
async def async_step_init(
|
||||
@@ -444,42 +437,45 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage Generic IP Camera options."""
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders = {}
|
||||
hass = self.hass
|
||||
|
||||
if user_input is not None:
|
||||
errors, still_format = await async_test_still(
|
||||
hass, self.config_entry.options | user_input
|
||||
)
|
||||
try:
|
||||
await async_test_and_preview_stream(hass, user_input)
|
||||
except InvalidStreamException as err:
|
||||
errors[CONF_STREAM_SOURCE] = str(err)
|
||||
if err.details:
|
||||
errors["error_details"] = err.details
|
||||
# Stream preview during options flow not yet implemented
|
||||
|
||||
still_url = user_input.get(CONF_STILL_IMAGE_URL)
|
||||
if not errors:
|
||||
if still_url is None:
|
||||
# If user didn't specify a still image URL,
|
||||
# The automatically generated still image that stream generates
|
||||
# is always jpeg
|
||||
still_format = "image/jpeg"
|
||||
data = {
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS: self.config_entry.options.get(
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False
|
||||
),
|
||||
**user_input,
|
||||
CONF_CONTENT_TYPE: still_format
|
||||
or self.config_entry.options.get(CONF_CONTENT_TYPE),
|
||||
}
|
||||
self.user_input = data
|
||||
# temporary preview for user to check the image
|
||||
self.preview_cam = data
|
||||
return await self.async_step_confirm_still()
|
||||
if "error_details" in errors:
|
||||
description_placeholders["error"] = errors.pop("error_details")
|
||||
if user_input:
|
||||
# Secondary validation because serialised vol can't seem to handle this complexity:
|
||||
if not user_input.get(CONF_STILL_IMAGE_URL) and not user_input.get(
|
||||
CONF_STREAM_SOURCE
|
||||
):
|
||||
errors["base"] = "no_still_image_or_stream_url"
|
||||
else:
|
||||
errors, still_format = await async_test_still(hass, user_input)
|
||||
try:
|
||||
self.preview_stream = await async_test_and_preview_stream(
|
||||
hass, user_input
|
||||
)
|
||||
except InvalidStreamException as err:
|
||||
errors[CONF_STREAM_SOURCE] = str(err)
|
||||
self.preview_stream = None
|
||||
if not errors:
|
||||
user_input[CONF_CONTENT_TYPE] = still_format
|
||||
still_url = user_input.get(CONF_STILL_IMAGE_URL)
|
||||
if still_url is None:
|
||||
# If user didn't specify a still image URL,
|
||||
# The automatically generated still image that stream generates
|
||||
# is always jpeg
|
||||
still_format = "image/jpeg"
|
||||
data = {
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS: self.config_entry.options.get(
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False
|
||||
),
|
||||
**user_input,
|
||||
CONF_CONTENT_TYPE: still_format
|
||||
or self.config_entry.options.get(CONF_CONTENT_TYPE),
|
||||
}
|
||||
self.user_input = data
|
||||
# temporary preview for user to check the image
|
||||
self.preview_cam = data
|
||||
return await self.async_step_user_confirm()
|
||||
elif self.user_input:
|
||||
user_input = self.user_input
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=build_schema(
|
||||
@@ -487,15 +483,17 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
True,
|
||||
self.show_advanced_options,
|
||||
),
|
||||
description_placeholders=description_placeholders,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_confirm_still(
|
||||
async def async_step_user_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle user clicking confirm after still preview."""
|
||||
if user_input:
|
||||
if ha_stream := self.preview_stream:
|
||||
# Kill off the temp stream we created.
|
||||
await ha_stream.stop()
|
||||
if not user_input.get(CONF_CONFIRMED_OK):
|
||||
return await self.async_step_init()
|
||||
return self.async_create_entry(
|
||||
@@ -503,18 +501,22 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
data=self.user_input,
|
||||
)
|
||||
register_preview(self.hass)
|
||||
preview_url = f"/api/generic/preview_flow_image/{self.flow_id}?t={datetime.now().isoformat()}"
|
||||
return self.async_show_form(
|
||||
step_id="confirm_still",
|
||||
step_id="user_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_CONFIRMED_OK, default=False): bool,
|
||||
}
|
||||
),
|
||||
description_placeholders={"preview_url": preview_url},
|
||||
errors=None,
|
||||
preview="generic_camera",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def async_setup_preview(hass: HomeAssistant) -> None:
|
||||
"""Set up preview WS API."""
|
||||
websocket_api.async_register_command(hass, ws_start_preview)
|
||||
|
||||
|
||||
class CameraImagePreview(HomeAssistantView):
|
||||
"""Camera view to temporarily serve an image."""
|
||||
@@ -556,7 +558,7 @@ class CameraImagePreview(HomeAssistantView):
|
||||
{
|
||||
vol.Required("type"): "generic_camera/start_preview",
|
||||
vol.Required("flow_id"): str,
|
||||
vol.Optional("flow_type"): vol.Any("config_flow"),
|
||||
vol.Optional("flow_type"): vol.Any("config_flow", "options_flow"),
|
||||
vol.Optional("user_input"): dict,
|
||||
}
|
||||
)
|
||||
@@ -570,10 +572,17 @@ async def ws_start_preview(
|
||||
_LOGGER.debug("Generating websocket handler for generic camera preview")
|
||||
|
||||
flow_id = msg["flow_id"]
|
||||
flow = cast(
|
||||
GenericIPCamConfigFlow,
|
||||
hass.config_entries.flow._progress.get(flow_id), # noqa: SLF001
|
||||
)
|
||||
flow: GenericIPCamConfigFlow | GenericOptionsFlowHandler
|
||||
if msg.get("flow_type", "config_flow") == "config_flow":
|
||||
flow = cast(
|
||||
GenericIPCamConfigFlow,
|
||||
hass.config_entries.flow._progress.get(flow_id), # noqa: SLF001
|
||||
)
|
||||
else: # (flow type == "options flow")
|
||||
flow = cast(
|
||||
GenericOptionsFlowHandler,
|
||||
hass.config_entries.options._progress.get(flow_id), # noqa: SLF001
|
||||
)
|
||||
user_input = flow.preview_cam
|
||||
|
||||
# Create an EntityPlatform, needed for name translations
|
||||
|
||||
@@ -67,11 +67,11 @@
|
||||
"use_wallclock_as_timestamps": "This option may correct segmenting or crashing issues arising from buggy timestamp implementations on some cameras"
|
||||
}
|
||||
},
|
||||
"confirm_still": {
|
||||
"title": "Preview",
|
||||
"description": "",
|
||||
"user_confirm": {
|
||||
"title": "Confirmation",
|
||||
"description": "Please wait for previews to load...",
|
||||
"data": {
|
||||
"confirmed_ok": "This image looks good."
|
||||
"confirmed_ok": "Everything looks good."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -34,6 +34,18 @@
|
||||
"moderate": "Moderate",
|
||||
"good": "Good",
|
||||
"very_good": "Very good"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"c6h6": {
|
||||
@@ -51,6 +63,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"o3_index": {
|
||||
@@ -62,6 +86,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"pm10_index": {
|
||||
@@ -73,6 +109,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"pm25_index": {
|
||||
@@ -84,6 +132,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"so2_index": {
|
||||
@@ -95,6 +155,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,6 +20,10 @@ CONF_GAIN = "gain"
|
||||
CONF_PROFILES = "profiles"
|
||||
CONF_TEXT_TYPE = "text_type"
|
||||
|
||||
DEFAULT_SPEED = 1.0
|
||||
DEFAULT_PITCH = 0
|
||||
DEFAULT_GAIN = 0
|
||||
|
||||
# STT constants
|
||||
CONF_STT_MODEL = "stt_model"
|
||||
|
||||
|
||||
@@ -31,7 +31,10 @@ from .const import (
|
||||
CONF_SPEED,
|
||||
CONF_TEXT_TYPE,
|
||||
CONF_VOICE,
|
||||
DEFAULT_GAIN,
|
||||
DEFAULT_LANG,
|
||||
DEFAULT_PITCH,
|
||||
DEFAULT_SPEED,
|
||||
)
|
||||
|
||||
DEFAULT_VOICE = ""
|
||||
@@ -104,15 +107,15 @@ def tts_options_schema(
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_SPEED,
|
||||
default=defaults.get(CONF_SPEED, 1.0),
|
||||
default=defaults.get(CONF_SPEED, DEFAULT_SPEED),
|
||||
): NumberSelector(NumberSelectorConfig(min=0.25, max=4.0, step=0.01)),
|
||||
vol.Optional(
|
||||
CONF_PITCH,
|
||||
default=defaults.get(CONF_PITCH, 0),
|
||||
default=defaults.get(CONF_PITCH, DEFAULT_PITCH),
|
||||
): NumberSelector(NumberSelectorConfig(min=-20.0, max=20.0, step=0.1)),
|
||||
vol.Optional(
|
||||
CONF_GAIN,
|
||||
default=defaults.get(CONF_GAIN, 0),
|
||||
default=defaults.get(CONF_GAIN, DEFAULT_GAIN),
|
||||
): NumberSelector(NumberSelectorConfig(min=-96.0, max=16.0, step=0.1)),
|
||||
vol.Optional(
|
||||
CONF_PROFILES,
|
||||
|
||||
@@ -35,7 +35,10 @@ from .const import (
|
||||
CONF_SPEED,
|
||||
CONF_TEXT_TYPE,
|
||||
CONF_VOICE,
|
||||
DEFAULT_GAIN,
|
||||
DEFAULT_LANG,
|
||||
DEFAULT_PITCH,
|
||||
DEFAULT_SPEED,
|
||||
DOMAIN,
|
||||
)
|
||||
from .helpers import async_tts_voices, tts_options_schema, tts_platform_schema
|
||||
@@ -191,11 +194,23 @@ class BaseGoogleCloudProvider:
|
||||
ssml_gender=gender,
|
||||
name=voice,
|
||||
),
|
||||
# Avoid: "This voice does not support speaking rate or pitch parameters at this time."
|
||||
# by not specifying the fields unless they differ from the defaults
|
||||
audio_config=texttospeech.AudioConfig(
|
||||
audio_encoding=encoding,
|
||||
speaking_rate=options[CONF_SPEED],
|
||||
pitch=options[CONF_PITCH],
|
||||
volume_gain_db=options[CONF_GAIN],
|
||||
speaking_rate=(
|
||||
options[CONF_SPEED]
|
||||
if options[CONF_SPEED] != DEFAULT_SPEED
|
||||
else None
|
||||
),
|
||||
pitch=(
|
||||
options[CONF_PITCH]
|
||||
if options[CONF_PITCH] != DEFAULT_PITCH
|
||||
else None
|
||||
),
|
||||
volume_gain_db=(
|
||||
options[CONF_GAIN] if options[CONF_GAIN] != DEFAULT_GAIN else None
|
||||
),
|
||||
effects_profile_id=options[CONF_PROFILES],
|
||||
),
|
||||
)
|
||||
|
||||
@@ -204,9 +204,7 @@ class GoogleGenerativeAIConversationEntity(
|
||||
"""Process a sentence."""
|
||||
result = conversation.ConversationResult(
|
||||
response=intent.IntentResponse(language=user_input.language),
|
||||
conversation_id=user_input.conversation_id
|
||||
if user_input.conversation_id in self.history
|
||||
else ulid.ulid_now(),
|
||||
conversation_id=user_input.conversation_id or ulid.ulid_now(),
|
||||
)
|
||||
assert result.conversation_id
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ from google.oauth2.credentials import Credentials
|
||||
from googleapiclient.discovery import Resource, build
|
||||
from googleapiclient.errors import HttpError
|
||||
from googleapiclient.http import BatchHttpRequest, HttpRequest
|
||||
from httplib2 import ServerNotFoundError
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -115,7 +116,7 @@ class AsyncConfigEntryAuth:
|
||||
def response_handler(_, response, exception: HttpError) -> None:
|
||||
if exception is not None:
|
||||
raise GoogleTasksApiError(
|
||||
f"Google Tasks API responded with error ({exception.status_code})"
|
||||
f"Google Tasks API responded with error ({exception.reason or exception.status_code})"
|
||||
) from exception
|
||||
if response:
|
||||
data = json.loads(response)
|
||||
@@ -150,9 +151,9 @@ class AsyncConfigEntryAuth:
|
||||
async def _execute(self, request: HttpRequest | BatchHttpRequest) -> Any:
|
||||
try:
|
||||
result = await self._hass.async_add_executor_job(request.execute)
|
||||
except HttpError as err:
|
||||
except (HttpError, ServerNotFoundError) as err:
|
||||
raise GoogleTasksApiError(
|
||||
f"Google Tasks API responded with error ({err.status_code})"
|
||||
f"Google Tasks API responded with: {err.reason or err.status_code})"
|
||||
) from err
|
||||
if result:
|
||||
_raise_if_error(result)
|
||||
|
||||
@@ -10,6 +10,7 @@ from typing import Any, cast
|
||||
|
||||
from aiohasupervisor.exceptions import (
|
||||
SupervisorBadRequestError,
|
||||
SupervisorError,
|
||||
SupervisorNotFoundError,
|
||||
)
|
||||
from aiohasupervisor.models import (
|
||||
@@ -23,8 +24,10 @@ from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupReaderWriter,
|
||||
BackupReaderWriterError,
|
||||
CreateBackupEvent,
|
||||
Folder,
|
||||
IncorrectPasswordError,
|
||||
NewBackup,
|
||||
WrittenBackup,
|
||||
)
|
||||
@@ -213,6 +216,10 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
password: str | None,
|
||||
) -> tuple[NewBackup, asyncio.Task[WrittenBackup]]:
|
||||
"""Create a backup."""
|
||||
if not include_homeassistant and include_database:
|
||||
raise HomeAssistantError(
|
||||
"Cannot create a backup with database but without Home Assistant"
|
||||
)
|
||||
manager = self._hass.data[DATA_MANAGER]
|
||||
|
||||
include_addons_set: supervisor_backups.AddonSet | set[str] | None = None
|
||||
@@ -233,20 +240,23 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
]
|
||||
locations = [agent.location for agent in hassio_agents]
|
||||
|
||||
backup = await self._client.backups.partial_backup(
|
||||
supervisor_backups.PartialBackupOptions(
|
||||
addons=include_addons_set,
|
||||
folders=include_folders_set,
|
||||
homeassistant=include_homeassistant,
|
||||
name=backup_name,
|
||||
password=password,
|
||||
compressed=True,
|
||||
location=locations or LOCATION_CLOUD_BACKUP,
|
||||
homeassistant_exclude_database=not include_database,
|
||||
background=True,
|
||||
extra=extra_metadata,
|
||||
try:
|
||||
backup = await self._client.backups.partial_backup(
|
||||
supervisor_backups.PartialBackupOptions(
|
||||
addons=include_addons_set,
|
||||
folders=include_folders_set,
|
||||
homeassistant=include_homeassistant,
|
||||
name=backup_name,
|
||||
password=password,
|
||||
compressed=True,
|
||||
location=locations or LOCATION_CLOUD_BACKUP,
|
||||
homeassistant_exclude_database=not include_database,
|
||||
background=True,
|
||||
extra=extra_metadata,
|
||||
)
|
||||
)
|
||||
)
|
||||
except SupervisorError as err:
|
||||
raise BackupReaderWriterError(f"Error creating backup: {err}") from err
|
||||
backup_task = self._hass.async_create_task(
|
||||
self._async_wait_for_backup(
|
||||
backup, remove_after_upload=not bool(locations)
|
||||
@@ -278,22 +288,35 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
finally:
|
||||
unsub()
|
||||
if not backup_id:
|
||||
raise HomeAssistantError("Backup failed")
|
||||
raise BackupReaderWriterError("Backup failed")
|
||||
|
||||
async def open_backup() -> AsyncIterator[bytes]:
|
||||
return await self._client.backups.download_backup(backup_id)
|
||||
try:
|
||||
return await self._client.backups.download_backup(backup_id)
|
||||
except SupervisorError as err:
|
||||
raise BackupReaderWriterError(
|
||||
f"Error downloading backup: {err}"
|
||||
) from err
|
||||
|
||||
async def remove_backup() -> None:
|
||||
if not remove_after_upload:
|
||||
return
|
||||
await self._client.backups.remove_backup(
|
||||
backup_id,
|
||||
options=supervisor_backups.RemoveBackupOptions(
|
||||
location={LOCATION_CLOUD_BACKUP}
|
||||
),
|
||||
)
|
||||
try:
|
||||
await self._client.backups.remove_backup(
|
||||
backup_id,
|
||||
options=supervisor_backups.RemoveBackupOptions(
|
||||
location={LOCATION_CLOUD_BACKUP}
|
||||
),
|
||||
)
|
||||
except SupervisorError as err:
|
||||
raise BackupReaderWriterError(f"Error removing backup: {err}") from err
|
||||
|
||||
details = await self._client.backups.backup_info(backup_id)
|
||||
try:
|
||||
details = await self._client.backups.backup_info(backup_id)
|
||||
except SupervisorError as err:
|
||||
raise BackupReaderWriterError(
|
||||
f"Error getting backup details: {err}"
|
||||
) from err
|
||||
|
||||
return WrittenBackup(
|
||||
backup=_backup_details_to_agent_backup(details),
|
||||
@@ -359,8 +382,16 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
restore_homeassistant: bool,
|
||||
) -> None:
|
||||
"""Restore a backup."""
|
||||
if restore_homeassistant and not restore_database:
|
||||
raise HomeAssistantError("Cannot restore Home Assistant without database")
|
||||
manager = self._hass.data[DATA_MANAGER]
|
||||
# The backup manager has already checked that the backup exists so we don't need to
|
||||
# check that here.
|
||||
backup = await manager.backup_agents[agent_id].async_get_backup(backup_id)
|
||||
if (
|
||||
backup
|
||||
and restore_homeassistant
|
||||
and restore_database != backup.database_included
|
||||
):
|
||||
raise HomeAssistantError("Restore database must match backup")
|
||||
if not restore_homeassistant and restore_database:
|
||||
raise HomeAssistantError("Cannot restore database without Home Assistant")
|
||||
restore_addons_set = set(restore_addons) if restore_addons else None
|
||||
@@ -370,7 +401,6 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
else None
|
||||
)
|
||||
|
||||
manager = self._hass.data[DATA_MANAGER]
|
||||
restore_location: str | None
|
||||
if manager.backup_agents[agent_id].domain != DOMAIN:
|
||||
# Download the backup to the supervisor. Supervisor will clean up the backup
|
||||
@@ -385,17 +415,24 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
agent = cast(SupervisorBackupAgent, manager.backup_agents[agent_id])
|
||||
restore_location = agent.location
|
||||
|
||||
job = await self._client.backups.partial_restore(
|
||||
backup_id,
|
||||
supervisor_backups.PartialRestoreOptions(
|
||||
addons=restore_addons_set,
|
||||
folders=restore_folders_set,
|
||||
homeassistant=restore_homeassistant,
|
||||
password=password,
|
||||
background=True,
|
||||
location=restore_location,
|
||||
),
|
||||
)
|
||||
try:
|
||||
job = await self._client.backups.partial_restore(
|
||||
backup_id,
|
||||
supervisor_backups.PartialRestoreOptions(
|
||||
addons=restore_addons_set,
|
||||
folders=restore_folders_set,
|
||||
homeassistant=restore_homeassistant,
|
||||
password=password,
|
||||
background=True,
|
||||
location=restore_location,
|
||||
),
|
||||
)
|
||||
except SupervisorBadRequestError as err:
|
||||
# Supervisor currently does not transmit machine parsable error types
|
||||
message = err.args[0]
|
||||
if message.startswith("Invalid password for backup"):
|
||||
raise IncorrectPasswordError(message) from err
|
||||
raise HomeAssistantError(message) from err
|
||||
|
||||
restore_complete = asyncio.Event()
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"requirements": [
|
||||
"xknx==3.4.0",
|
||||
"xknxproject==3.8.1",
|
||||
"knx-frontend==2024.11.16.205004"
|
||||
"knx-frontend==2024.12.26.233449"
|
||||
],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -3,23 +3,30 @@
|
||||
"step": {
|
||||
"connection_type": {
|
||||
"title": "KNX connection",
|
||||
"description": "Please enter the connection type we should use for your KNX connection. \n AUTOMATIC - The integration takes care of the connectivity to your KNX Bus by performing a gateway scan. \n TUNNELING - The integration will connect to your KNX bus via tunneling. \n ROUTING - The integration will connect to your KNX bus via routing.",
|
||||
"description": "'Automatic' performs a gateway scan on start, to find a KNX IP interface. It will connect via a tunnel. (Not available if a gateway scan was not successful.) \n\n 'Tunneling' will connect to a specific KNX IP interface over a tunnel. \n\n 'Routing' will use Multicast to communicate with KNX IP routers.",
|
||||
"data": {
|
||||
"connection_type": "KNX Connection Type"
|
||||
},
|
||||
"data_description": {
|
||||
"connection_type": "Please select the connection type you want to use for your KNX connection."
|
||||
}
|
||||
},
|
||||
"tunnel": {
|
||||
"title": "Tunnel",
|
||||
"description": "Please select a gateway from the list.",
|
||||
"data": {
|
||||
"gateway": "KNX Tunnel Connection"
|
||||
"gateway": "Please select a gateway from the list."
|
||||
},
|
||||
"data_description": {
|
||||
"gateway": "Select a KNX tunneling interface you want use for the connection."
|
||||
}
|
||||
},
|
||||
"tcp_tunnel_endpoint": {
|
||||
"title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]",
|
||||
"description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]",
|
||||
"title": "Tunnel endpoint",
|
||||
"data": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]"
|
||||
"tunnel_endpoint_ia": "Select the tunnel endpoint used for the connection."
|
||||
},
|
||||
"data_description": {
|
||||
"tunnel_endpoint_ia": "'Automatic' selects a free tunnel endpoint for you when connecting. If you're unsure, this is the best option."
|
||||
}
|
||||
},
|
||||
"manual_tunnel": {
|
||||
@@ -27,23 +34,24 @@
|
||||
"description": "Please enter the connection information of your tunneling device.",
|
||||
"data": {
|
||||
"tunneling_type": "KNX Tunneling Type",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"route_back": "Route back / NAT mode",
|
||||
"local_ip": "Local IP interface"
|
||||
},
|
||||
"data_description": {
|
||||
"port": "Port of the KNX/IP tunneling device.",
|
||||
"tunneling_type": "Select the tunneling type of your KNX/IP tunneling device. Older interfaces may only support `UDP`.",
|
||||
"host": "IP address or hostname of the KNX/IP tunneling device.",
|
||||
"port": "Port used by the KNX/IP tunneling device.",
|
||||
"route_back": "Enable if your KNXnet/IP tunneling server is behind NAT. Only applies for UDP connections.",
|
||||
"local_ip": "Local IP or interface name used for the connection from Home Assistant. Leave blank to use auto-discovery."
|
||||
}
|
||||
},
|
||||
"secure_key_source_menu_tunnel": {
|
||||
"title": "KNX IP-Secure",
|
||||
"description": "Select how you want to configure KNX/IP Secure.",
|
||||
"description": "How do you want to configure KNX/IP Secure?",
|
||||
"menu_options": {
|
||||
"secure_knxkeys": "Use a `.knxkeys` file containing IP secure keys",
|
||||
"secure_knxkeys": "Use a `.knxkeys` file providing IP secure keys",
|
||||
"secure_tunnel_manual": "Configure IP secure credentials manually"
|
||||
}
|
||||
},
|
||||
@@ -57,20 +65,23 @@
|
||||
},
|
||||
"secure_knxkeys": {
|
||||
"title": "Import KNX Keyring",
|
||||
"description": "Please select a `.knxkeys` file to import.",
|
||||
"description": "The Keyring is used to encrypt and decrypt KNX IP Secure communication.",
|
||||
"data": {
|
||||
"knxkeys_file": "Keyring file",
|
||||
"knxkeys_password": "The password to decrypt the `.knxkeys` file"
|
||||
"knxkeys_password": "Keyring password"
|
||||
},
|
||||
"data_description": {
|
||||
"knxkeys_password": "This was set when exporting the file from ETS."
|
||||
"knxkeys_file": "Select a `.knxkeys` file. This can be exported from ETS.",
|
||||
"knxkeys_password": "The password to open the `.knxkeys` file was set when exporting."
|
||||
}
|
||||
},
|
||||
"knxkeys_tunnel_select": {
|
||||
"title": "Tunnel endpoint",
|
||||
"description": "Select the tunnel endpoint used for the connection.",
|
||||
"title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]",
|
||||
"data": {
|
||||
"user_id": "'Automatic' selects a free tunnel endpoint for you when connecting. If you're unsure, this is the best option."
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]"
|
||||
},
|
||||
"data_description": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]"
|
||||
}
|
||||
},
|
||||
"secure_tunnel_manual": {
|
||||
@@ -82,7 +93,7 @@
|
||||
"device_authentication": "Device authentication password"
|
||||
},
|
||||
"data_description": {
|
||||
"user_id": "This is often tunnel number +1. So 'Tunnel 2' would have User-ID '3'.",
|
||||
"user_id": "This usually is tunnel number +1. So first tunnel in the list presented in ETS would have User-ID `2`.",
|
||||
"user_password": "Password for the specific tunnel connection set in the 'Properties' panel of the tunnel in ETS.",
|
||||
"device_authentication": "This is set in the 'IP' panel of the interface in ETS."
|
||||
}
|
||||
@@ -95,8 +106,8 @@
|
||||
"sync_latency_tolerance": "Network latency tolerance"
|
||||
},
|
||||
"data_description": {
|
||||
"backbone_key": "Can be seen in the 'Security' report of an ETS project. Eg. '00112233445566778899AABBCCDDEEFF'",
|
||||
"sync_latency_tolerance": "Default is 1000."
|
||||
"backbone_key": "Can be seen in the 'Security' report of your ETS project. Eg. `00112233445566778899AABBCCDDEEFF`",
|
||||
"sync_latency_tolerance": "Should be equal to the backbone configuration of your ETS project. Default is `1000`"
|
||||
}
|
||||
},
|
||||
"routing": {
|
||||
@@ -104,13 +115,16 @@
|
||||
"description": "Please configure the routing options.",
|
||||
"data": {
|
||||
"individual_address": "Individual address",
|
||||
"routing_secure": "Use KNX IP Secure",
|
||||
"routing_secure": "KNX IP Secure Routing",
|
||||
"multicast_group": "Multicast group",
|
||||
"multicast_port": "Multicast port",
|
||||
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data::local_ip%]"
|
||||
},
|
||||
"data_description": {
|
||||
"individual_address": "KNX address to be used by Home Assistant, e.g. `0.0.4`",
|
||||
"routing_secure": "Select if your installation uses encrypted communication according to the KNX IP Secure standard. This setting requires compatible devices and configuration. You'll be prompted for credentials in the next step.",
|
||||
"multicast_group": "Multicast group used by your installation. Default is `224.0.23.12`",
|
||||
"multicast_port": "Multicast port used by your installation. Default is `3671`",
|
||||
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data_description::local_ip%]"
|
||||
}
|
||||
}
|
||||
@@ -148,7 +162,7 @@
|
||||
},
|
||||
"data_description": {
|
||||
"state_updater": "Set default for reading states from the KNX Bus. When disabled, Home Assistant will not actively retrieve entity states from the KNX Bus. Can be overridden by `sync_state` entity options.",
|
||||
"rate_limit": "Maximum outgoing telegrams per second.\n`0` to disable limit. Recommended: 0 or 20 to 40",
|
||||
"rate_limit": "Maximum outgoing telegrams per second.\n`0` to disable limit. Recommended: `0` or between `20` and `40`",
|
||||
"telegram_log_size": "Telegrams to keep in memory for KNX panel group monitor. Maximum: {telegram_log_size_max}"
|
||||
}
|
||||
},
|
||||
@@ -157,20 +171,27 @@
|
||||
"description": "[%key:component::knx::config::step::connection_type::description%]",
|
||||
"data": {
|
||||
"connection_type": "[%key:component::knx::config::step::connection_type::data::connection_type%]"
|
||||
},
|
||||
"data_description": {
|
||||
"connection_type": "[%key:component::knx::config::step::connection_type::data_description::connection_type%]"
|
||||
}
|
||||
},
|
||||
"tunnel": {
|
||||
"title": "[%key:component::knx::config::step::tunnel::title%]",
|
||||
"description": "[%key:component::knx::config::step::tunnel::description%]",
|
||||
"data": {
|
||||
"gateway": "[%key:component::knx::config::step::tunnel::data::gateway%]"
|
||||
},
|
||||
"data_description": {
|
||||
"gateway": "[%key:component::knx::config::step::tunnel::data_description::gateway%]"
|
||||
}
|
||||
},
|
||||
"tcp_tunnel_endpoint": {
|
||||
"title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]",
|
||||
"description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]",
|
||||
"title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]",
|
||||
"data": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]"
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]"
|
||||
},
|
||||
"data_description": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]"
|
||||
}
|
||||
},
|
||||
"manual_tunnel": {
|
||||
@@ -184,6 +205,7 @@
|
||||
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data::local_ip%]"
|
||||
},
|
||||
"data_description": {
|
||||
"tunneling_type": "[%key:component::knx::config::step::manual_tunnel::data_description::tunneling_type%]",
|
||||
"port": "[%key:component::knx::config::step::manual_tunnel::data_description::port%]",
|
||||
"host": "[%key:component::knx::config::step::manual_tunnel::data_description::host%]",
|
||||
"route_back": "[%key:component::knx::config::step::manual_tunnel::data_description::route_back%]",
|
||||
@@ -214,14 +236,17 @@
|
||||
"knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data::knxkeys_password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"knxkeys_file": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_file%]",
|
||||
"knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_password%]"
|
||||
}
|
||||
},
|
||||
"knxkeys_tunnel_select": {
|
||||
"title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]",
|
||||
"description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]",
|
||||
"title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]",
|
||||
"data": {
|
||||
"user_id": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]"
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]"
|
||||
},
|
||||
"data_description": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]"
|
||||
}
|
||||
},
|
||||
"secure_tunnel_manual": {
|
||||
@@ -262,6 +287,9 @@
|
||||
},
|
||||
"data_description": {
|
||||
"individual_address": "[%key:component::knx::config::step::routing::data_description::individual_address%]",
|
||||
"routing_secure": "[%key:component::knx::config::step::routing::data_description::routing_secure%]",
|
||||
"multicast_group": "[%key:component::knx::config::step::routing::data_description::multicast_group%]",
|
||||
"multicast_port": "[%key:component::knx::config::step::routing::data_description::multicast_port%]",
|
||||
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data_description::local_ip%]"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==1.4.2"]
|
||||
"requirements": ["pylamarzocco==1.4.6"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/mealie",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["aiomealie==0.9.4"]
|
||||
"requirements": ["aiomealie==0.9.5"]
|
||||
}
|
||||
|
||||
@@ -92,7 +92,7 @@ SERVICE_SET_MEALPLAN_SCHEMA = vol.Any(
|
||||
[x.lower() for x in MealplanEntryType]
|
||||
),
|
||||
vol.Required(ATTR_NOTE_TITLE): str,
|
||||
vol.Required(ATTR_NOTE_TEXT): str,
|
||||
vol.Optional(ATTR_NOTE_TEXT): str,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
@@ -229,8 +229,8 @@
|
||||
"description": "The type of dish to set the recipe to."
|
||||
},
|
||||
"recipe_id": {
|
||||
"name": "[%key:component::mealie::services::get_recipe::fields::recipe_id::name%]",
|
||||
"description": "[%key:component::mealie::services::get_recipe::fields::recipe_id::description%]"
|
||||
"name": "Recipe ID",
|
||||
"description": "The recipe ID or the slug of the recipe to get."
|
||||
},
|
||||
"note_title": {
|
||||
"name": "Meal note title",
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "calculated",
|
||||
"loggers": ["yt_dlp"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["yt-dlp[default]==2024.12.13"],
|
||||
"requirements": ["yt-dlp[default]==2024.12.23"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -5,11 +5,11 @@ from __future__ import annotations
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
from .const import CONF_AREAS, DOMAIN, LOGGER, PLATFORMS
|
||||
from .coordinator import NordPoolDataUpdateCoordinator
|
||||
from .services import async_setup_services
|
||||
|
||||
@@ -25,10 +25,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool:
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, config_entry: NordPoolConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Nord Pool from a config entry."""
|
||||
|
||||
coordinator = NordPoolDataUpdateCoordinator(hass, entry)
|
||||
await cleanup_device(hass, config_entry)
|
||||
|
||||
coordinator = NordPoolDataUpdateCoordinator(hass, config_entry)
|
||||
await coordinator.fetch_data(dt_util.utcnow())
|
||||
if not coordinator.last_update_success:
|
||||
raise ConfigEntryNotReady(
|
||||
@@ -36,13 +40,33 @@ async def async_setup_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) ->
|
||||
translation_key="initial_update_failed",
|
||||
translation_placeholders={"error": str(coordinator.last_exception)},
|
||||
)
|
||||
entry.runtime_data = coordinator
|
||||
config_entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool:
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, config_entry: NordPoolConfigEntry
|
||||
) -> bool:
|
||||
"""Unload Nord Pool config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS)
|
||||
|
||||
|
||||
async def cleanup_device(
|
||||
hass: HomeAssistant, config_entry: NordPoolConfigEntry
|
||||
) -> None:
|
||||
"""Cleanup device and entities."""
|
||||
device_reg = dr.async_get(hass)
|
||||
|
||||
entries = dr.async_entries_for_config_entry(device_reg, config_entry.entry_id)
|
||||
for area in config_entry.data[CONF_AREAS]:
|
||||
for entry in entries:
|
||||
if entry.identifiers == {(DOMAIN, area)}:
|
||||
continue
|
||||
|
||||
LOGGER.debug("Removing device %s", entry.name)
|
||||
device_reg.async_update_device(
|
||||
entry.id, remove_config_entry_id=config_entry.entry_id
|
||||
)
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime, timedelta
|
||||
from typing import TYPE_CHECKING
|
||||
@@ -73,7 +72,7 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]):
|
||||
self.hass, self.fetch_data, self.get_next_interval(dt_util.utcnow())
|
||||
)
|
||||
data = await self.api_call()
|
||||
if data:
|
||||
if data and data.entries:
|
||||
self.async_set_updated_data(data)
|
||||
|
||||
async def api_call(self, retry: int = 3) -> DeliveryPeriodsData | None:
|
||||
@@ -90,18 +89,20 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]):
|
||||
self.config_entry.data[CONF_AREAS],
|
||||
)
|
||||
except (
|
||||
NordPoolEmptyResponseError,
|
||||
NordPoolResponseError,
|
||||
NordPoolError,
|
||||
) as error:
|
||||
LOGGER.debug("Connection error: %s", error)
|
||||
if retry > 0:
|
||||
next_run = (4 - retry) * 15
|
||||
LOGGER.debug("Wait %d seconds for next try", next_run)
|
||||
await asyncio.sleep(next_run)
|
||||
return await self.api_call(retry - 1)
|
||||
self.async_set_update_error(error)
|
||||
|
||||
if data:
|
||||
current_day = dt_util.utcnow().strftime("%Y-%m-%d")
|
||||
for entry in data.entries:
|
||||
if entry.requested_date == current_day:
|
||||
LOGGER.debug("Data for current day found")
|
||||
return data
|
||||
|
||||
self.async_set_update_error(NordPoolEmptyResponseError("No current day data"))
|
||||
return data
|
||||
|
||||
def merge_price_entries(self) -> list[DeliveryPeriodEntry]:
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pynordpool"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pynordpool==0.2.3"],
|
||||
"requirements": ["pynordpool==0.2.4"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ollama",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["ollama==0.3.3"]
|
||||
"requirements": ["ollama==0.4.5"]
|
||||
}
|
||||
|
||||
@@ -427,7 +427,7 @@ class OnkyoMediaPlayer(MediaPlayerEntity):
|
||||
"""
|
||||
# HA_VOL * (MAX VOL / 100) * VOL_RESOLUTION
|
||||
self._update_receiver(
|
||||
"volume", int(volume * (self._max_volume / 100) * self._volume_resolution)
|
||||
"volume", round(volume * (self._max_volume / 100) * self._volume_resolution)
|
||||
)
|
||||
|
||||
async def async_volume_up(self) -> None:
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, time
|
||||
|
||||
from open_meteo import Forecast as OpenMeteoForecast
|
||||
|
||||
from homeassistant.components.weather import (
|
||||
@@ -107,8 +109,9 @@ class OpenMeteoWeatherEntity(
|
||||
|
||||
daily = self.coordinator.data.daily
|
||||
for index, date in enumerate(self.coordinator.data.daily.time):
|
||||
_datetime = datetime.combine(date=date, time=time(0), tzinfo=dt_util.UTC)
|
||||
forecast = Forecast(
|
||||
datetime=date.isoformat(),
|
||||
datetime=_datetime.isoformat(),
|
||||
)
|
||||
|
||||
if daily.weathercode is not None:
|
||||
@@ -155,12 +158,14 @@ class OpenMeteoWeatherEntity(
|
||||
today = dt_util.utcnow()
|
||||
|
||||
hourly = self.coordinator.data.hourly
|
||||
for index, datetime in enumerate(self.coordinator.data.hourly.time):
|
||||
if dt_util.as_utc(datetime) < today:
|
||||
for index, _datetime in enumerate(self.coordinator.data.hourly.time):
|
||||
if _datetime.tzinfo is None:
|
||||
_datetime = _datetime.replace(tzinfo=dt_util.UTC)
|
||||
if _datetime < today:
|
||||
continue
|
||||
|
||||
forecast = Forecast(
|
||||
datetime=datetime.isoformat(),
|
||||
datetime=_datetime.isoformat(),
|
||||
)
|
||||
|
||||
if hourly.weather_code is not None:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/opower",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["opower"],
|
||||
"requirements": ["opower==0.8.6"]
|
||||
"requirements": ["opower==0.8.7"]
|
||||
}
|
||||
|
||||
@@ -27,7 +27,7 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
_host: str
|
||||
_discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -137,8 +137,15 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(sn)
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.host})
|
||||
|
||||
self._host = discovery_info.host
|
||||
self.context.update({"configuration_url": f"http://{discovery_info.host}"})
|
||||
self._discovery_info = discovery_info
|
||||
self.context.update(
|
||||
{
|
||||
"title_placeholders": {
|
||||
"name": discovery_info.name.replace("._http._tcp.local.", "")
|
||||
},
|
||||
"configuration_url": f"http://{discovery_info.host}",
|
||||
},
|
||||
)
|
||||
return await self.async_step_zeroconf_confirm()
|
||||
|
||||
async def async_step_zeroconf_confirm(
|
||||
@@ -149,7 +156,7 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if user_input is not None:
|
||||
peblar = Peblar(
|
||||
host=self._host,
|
||||
host=self._discovery_info.host,
|
||||
session=async_create_clientsession(
|
||||
self.hass, cookie_jar=CookieJar(unsafe=True)
|
||||
),
|
||||
@@ -165,7 +172,7 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(
|
||||
title="Peblar",
|
||||
data={
|
||||
CONF_HOST: self._host,
|
||||
CONF_HOST: self._discovery_info.host,
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
},
|
||||
)
|
||||
@@ -179,6 +186,10 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
),
|
||||
}
|
||||
),
|
||||
description_placeholders={
|
||||
"hostname": self._discovery_info.name.replace("._http._tcp.local.", ""),
|
||||
"host": self._discovery_info.host,
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["peblar==0.3.0"],
|
||||
"requirements": ["peblar==0.3.1"],
|
||||
"zeroconf": [{ "type": "_http._tcp.local.", "name": "pblr-*" }]
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"data_description": {
|
||||
"password": "[%key:component::peblar::config::step::user::data_description::password%]"
|
||||
},
|
||||
"description": "Reauthenticate with your Peblar EV charger.\n\nTo do so, you will need to enter your new password you use to log into Peblar EV charger' web interface."
|
||||
"description": "Reauthenticate with your Peblar EV charger.\n\nTo do so, you will need to enter your new password you use to log in to the Peblar EV charger's web interface."
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
@@ -31,7 +31,7 @@
|
||||
"host": "[%key:component::peblar::config::step::user::data_description::host%]",
|
||||
"password": "[%key:component::peblar::config::step::user::data_description::password%]"
|
||||
},
|
||||
"description": "Reconfigure your Peblar EV charger.\n\nThis allows you to change the IP address of your Peblar EV charger and the password you use to log into its web interface."
|
||||
"description": "Reconfigure your Peblar EV charger.\n\nThis allows you to change the IP address of your Peblar EV charger and the password you use to log in to its web interface."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
@@ -40,9 +40,9 @@
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your Peblar EV charger on your home network.",
|
||||
"password": "The same password as you use to log in to the Peblar EV charger' local web interface."
|
||||
"password": "The same password as you use to log in to the Peblar EV charger's local web interface."
|
||||
},
|
||||
"description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need to get the IP address of your Peblar EV charger and the password you use to log into its web interface.\n\nHome Assistant will automatically configure your Peblar EV charger for use with Home Assistant."
|
||||
"description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need to get the IP address of your Peblar EV charger and the password you use to log in to its web interface.\n\nHome Assistant will automatically configure your Peblar EV charger for use with Home Assistant."
|
||||
},
|
||||
"zeroconf_confirm": {
|
||||
"data": {
|
||||
@@ -51,7 +51,7 @@
|
||||
"data_description": {
|
||||
"password": "[%key:component::peblar::config::step::user::data_description::password%]"
|
||||
},
|
||||
"description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need the password you use to log into the Peblar EV charger' web interface.\n\nHome Assistant will automatically configure your Peblar EV charger for use with Home Assistant."
|
||||
"description": "Set up your Peblar EV charger {hostname}, on IP address {host}, to integrate with Home Assistant\n\nTo do so, you will need the password you use to log in to the Peblar EV charger's web interface.\n\nHome Assistant will automatically configure your Peblar EV charger for use with Home Assistant."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -5,10 +5,12 @@ from __future__ import annotations
|
||||
import logging
|
||||
|
||||
from aiopegelonline import PegelOnline
|
||||
from aiopegelonline.const import CONNECT_ERRORS
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_STATION
|
||||
@@ -28,7 +30,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: PegelOnlineConfigEntry)
|
||||
_LOGGER.debug("Setting up station with uuid %s", station_uuid)
|
||||
|
||||
api = PegelOnline(async_get_clientsession(hass))
|
||||
station = await api.async_get_station_details(station_uuid)
|
||||
try:
|
||||
station = await api.async_get_station_details(station_uuid)
|
||||
except CONNECT_ERRORS as err:
|
||||
raise ConfigEntryNotReady("Failed to connect") from err
|
||||
|
||||
coordinator = PegelOnlineDataUpdateCoordinator(hass, entry.title, api, station)
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aiopegelonline"],
|
||||
"requirements": ["aiopegelonline==0.1.0"]
|
||||
"requirements": ["aiopegelonline==0.1.1"]
|
||||
}
|
||||
|
||||
@@ -2,8 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from collections.abc import Callable
|
||||
from contextlib import suppress
|
||||
from dataclasses import astuple, dataclass
|
||||
import logging
|
||||
import string
|
||||
from typing import Any, cast
|
||||
@@ -158,6 +159,22 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class MetricNameWithLabelValues:
|
||||
"""Class to represent a metric with its label values.
|
||||
|
||||
The prometheus client library doesn't easily allow us to get back the
|
||||
information we put into it. Specifically, it is very expensive to query
|
||||
which label values have been set for metrics.
|
||||
|
||||
This class is used to hold a bit of data we need to efficiently remove
|
||||
labelsets from metrics.
|
||||
"""
|
||||
|
||||
metric_name: str
|
||||
label_values: tuple[str, ...]
|
||||
|
||||
|
||||
class PrometheusMetrics:
|
||||
"""Model all of the metrics which should be exposed to Prometheus."""
|
||||
|
||||
@@ -191,6 +208,9 @@ class PrometheusMetrics:
|
||||
else:
|
||||
self.metrics_prefix = ""
|
||||
self._metrics: dict[str, MetricWrapperBase] = {}
|
||||
self._metrics_by_entity_id: dict[str, set[MetricNameWithLabelValues]] = (
|
||||
defaultdict(set)
|
||||
)
|
||||
self._climate_units = climate_units
|
||||
|
||||
def handle_state_changed_event(self, event: Event[EventStateChangedData]) -> None:
|
||||
@@ -202,10 +222,12 @@ class PrometheusMetrics:
|
||||
_LOGGER.debug("Filtered out entity %s", state.entity_id)
|
||||
return
|
||||
|
||||
if (old_state := event.data.get("old_state")) is not None and (
|
||||
old_friendly_name := old_state.attributes.get(ATTR_FRIENDLY_NAME)
|
||||
if (
|
||||
old_state := event.data.get("old_state")
|
||||
) is not None and old_state.attributes.get(
|
||||
ATTR_FRIENDLY_NAME
|
||||
) != state.attributes.get(ATTR_FRIENDLY_NAME):
|
||||
self._remove_labelsets(old_state.entity_id, old_friendly_name)
|
||||
self._remove_labelsets(old_state.entity_id)
|
||||
|
||||
self.handle_state(state)
|
||||
|
||||
@@ -215,30 +237,32 @@ class PrometheusMetrics:
|
||||
_LOGGER.debug("Handling state update for %s", entity_id)
|
||||
|
||||
labels = self._labels(state)
|
||||
state_change = self._metric(
|
||||
"state_change", prometheus_client.Counter, "The number of state changes"
|
||||
)
|
||||
state_change.labels(**labels).inc()
|
||||
|
||||
entity_available = self._metric(
|
||||
self._metric(
|
||||
"state_change",
|
||||
prometheus_client.Counter,
|
||||
"The number of state changes",
|
||||
labels,
|
||||
).inc()
|
||||
|
||||
self._metric(
|
||||
"entity_available",
|
||||
prometheus_client.Gauge,
|
||||
"Entity is available (not in the unavailable or unknown state)",
|
||||
)
|
||||
entity_available.labels(**labels).set(float(state.state not in IGNORED_STATES))
|
||||
labels,
|
||||
).set(float(state.state not in IGNORED_STATES))
|
||||
|
||||
last_updated_time_seconds = self._metric(
|
||||
self._metric(
|
||||
"last_updated_time_seconds",
|
||||
prometheus_client.Gauge,
|
||||
"The last_updated timestamp",
|
||||
)
|
||||
last_updated_time_seconds.labels(**labels).set(state.last_updated.timestamp())
|
||||
labels,
|
||||
).set(state.last_updated.timestamp())
|
||||
|
||||
if state.state in IGNORED_STATES:
|
||||
self._remove_labelsets(
|
||||
entity_id,
|
||||
None,
|
||||
{state_change, entity_available, last_updated_time_seconds},
|
||||
{"state_change", "entity_available", "last_updated_time_seconds"},
|
||||
)
|
||||
else:
|
||||
domain, _ = hacore.split_entity_id(entity_id)
|
||||
@@ -274,67 +298,68 @@ class PrometheusMetrics:
|
||||
def _remove_labelsets(
|
||||
self,
|
||||
entity_id: str,
|
||||
friendly_name: str | None = None,
|
||||
ignored_metrics: set[MetricWrapperBase] | None = None,
|
||||
ignored_metric_names: set[str] | None = None,
|
||||
) -> None:
|
||||
"""Remove labelsets matching the given entity id from all non-ignored metrics."""
|
||||
if ignored_metrics is None:
|
||||
ignored_metrics = set()
|
||||
for metric in list(self._metrics.values()):
|
||||
if metric in ignored_metrics:
|
||||
if ignored_metric_names is None:
|
||||
ignored_metric_names = set()
|
||||
metric_set = self._metrics_by_entity_id[entity_id]
|
||||
removed_metrics = set()
|
||||
for metric in metric_set:
|
||||
metric_name, label_values = astuple(metric)
|
||||
if metric_name in ignored_metric_names:
|
||||
continue
|
||||
for sample in cast(list[prometheus_client.Metric], metric.collect())[
|
||||
0
|
||||
].samples:
|
||||
if sample.labels["entity"] == entity_id and (
|
||||
not friendly_name or sample.labels["friendly_name"] == friendly_name
|
||||
):
|
||||
_LOGGER.debug(
|
||||
"Removing labelset from %s for entity_id: %s",
|
||||
sample.name,
|
||||
entity_id,
|
||||
)
|
||||
with suppress(KeyError):
|
||||
metric.remove(*sample.labels.values())
|
||||
|
||||
_LOGGER.debug(
|
||||
"Removing labelset %s from %s for entity_id: %s",
|
||||
label_values,
|
||||
metric_name,
|
||||
entity_id,
|
||||
)
|
||||
removed_metrics.add(metric)
|
||||
self._metrics[metric_name].remove(*label_values)
|
||||
metric_set -= removed_metrics
|
||||
if not metric_set:
|
||||
del self._metrics_by_entity_id[entity_id]
|
||||
|
||||
def _handle_attributes(self, state: State) -> None:
|
||||
for key, value in state.attributes.items():
|
||||
metric = self._metric(
|
||||
try:
|
||||
value = float(value)
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
|
||||
self._metric(
|
||||
f"{state.domain}_attr_{key.lower()}",
|
||||
prometheus_client.Gauge,
|
||||
f"{key} attribute of {state.domain} entity",
|
||||
)
|
||||
|
||||
try:
|
||||
value = float(value)
|
||||
metric.labels(**self._labels(state)).set(value)
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
self._labels(state),
|
||||
).set(value)
|
||||
|
||||
def _metric[_MetricBaseT: MetricWrapperBase](
|
||||
self,
|
||||
metric: str,
|
||||
metric_name: str,
|
||||
factory: type[_MetricBaseT],
|
||||
documentation: str,
|
||||
extra_labels: list[str] | None = None,
|
||||
labels: dict[str, str],
|
||||
) -> _MetricBaseT:
|
||||
labels = ["entity", "friendly_name", "domain"]
|
||||
if extra_labels is not None:
|
||||
labels.extend(extra_labels)
|
||||
|
||||
try:
|
||||
return cast(_MetricBaseT, self._metrics[metric])
|
||||
metric = cast(_MetricBaseT, self._metrics[metric_name])
|
||||
except KeyError:
|
||||
full_metric_name = self._sanitize_metric_name(
|
||||
f"{self.metrics_prefix}{metric}"
|
||||
f"{self.metrics_prefix}{metric_name}"
|
||||
)
|
||||
self._metrics[metric] = factory(
|
||||
self._metrics[metric_name] = factory(
|
||||
full_metric_name,
|
||||
documentation,
|
||||
labels,
|
||||
labels.keys(),
|
||||
registry=prometheus_client.REGISTRY,
|
||||
)
|
||||
return cast(_MetricBaseT, self._metrics[metric])
|
||||
metric = cast(_MetricBaseT, self._metrics[metric_name])
|
||||
self._metrics_by_entity_id[labels["entity"]].add(
|
||||
MetricNameWithLabelValues(metric_name, tuple(labels.values()))
|
||||
)
|
||||
return metric.labels(**labels)
|
||||
|
||||
@staticmethod
|
||||
def _sanitize_metric_name(metric: str) -> str:
|
||||
@@ -356,67 +381,90 @@ class PrometheusMetrics:
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
def _labels(state: State) -> dict[str, Any]:
|
||||
return {
|
||||
def _labels(
|
||||
state: State,
|
||||
extra_labels: dict[str, str] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
if extra_labels is None:
|
||||
extra_labels = {}
|
||||
labels = {
|
||||
"entity": state.entity_id,
|
||||
"domain": state.domain,
|
||||
"friendly_name": state.attributes.get(ATTR_FRIENDLY_NAME),
|
||||
}
|
||||
if not labels.keys().isdisjoint(extra_labels.keys()):
|
||||
conflicting_keys = labels.keys() & extra_labels.keys()
|
||||
raise ValueError(
|
||||
f"extra_labels contains conflicting keys: {conflicting_keys}"
|
||||
)
|
||||
return labels | extra_labels
|
||||
|
||||
def _battery(self, state: State) -> None:
|
||||
if (battery_level := state.attributes.get(ATTR_BATTERY_LEVEL)) is not None:
|
||||
metric = self._metric(
|
||||
"battery_level_percent",
|
||||
prometheus_client.Gauge,
|
||||
"Battery level as a percentage of its capacity",
|
||||
)
|
||||
try:
|
||||
value = float(battery_level)
|
||||
metric.labels(**self._labels(state)).set(value)
|
||||
except ValueError:
|
||||
pass
|
||||
if (battery_level := state.attributes.get(ATTR_BATTERY_LEVEL)) is None:
|
||||
return
|
||||
|
||||
try:
|
||||
value = float(battery_level)
|
||||
except ValueError:
|
||||
return
|
||||
|
||||
self._metric(
|
||||
"battery_level_percent",
|
||||
prometheus_client.Gauge,
|
||||
"Battery level as a percentage of its capacity",
|
||||
self._labels(state),
|
||||
).set(value)
|
||||
|
||||
def _handle_binary_sensor(self, state: State) -> None:
|
||||
metric = self._metric(
|
||||
if (value := self.state_as_number(state)) is None:
|
||||
return
|
||||
|
||||
self._metric(
|
||||
"binary_sensor_state",
|
||||
prometheus_client.Gauge,
|
||||
"State of the binary sensor (0/1)",
|
||||
)
|
||||
if (value := self.state_as_number(state)) is not None:
|
||||
metric.labels(**self._labels(state)).set(value)
|
||||
self._labels(state),
|
||||
).set(value)
|
||||
|
||||
def _handle_input_boolean(self, state: State) -> None:
|
||||
metric = self._metric(
|
||||
if (value := self.state_as_number(state)) is None:
|
||||
return
|
||||
|
||||
self._metric(
|
||||
"input_boolean_state",
|
||||
prometheus_client.Gauge,
|
||||
"State of the input boolean (0/1)",
|
||||
)
|
||||
if (value := self.state_as_number(state)) is not None:
|
||||
metric.labels(**self._labels(state)).set(value)
|
||||
self._labels(state),
|
||||
).set(value)
|
||||
|
||||
def _numeric_handler(self, state: State, domain: str, title: str) -> None:
|
||||
if (value := self.state_as_number(state)) is None:
|
||||
return
|
||||
|
||||
if unit := self._unit_string(state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)):
|
||||
metric = self._metric(
|
||||
f"{domain}_state_{unit}",
|
||||
prometheus_client.Gauge,
|
||||
f"State of the {title} measured in {unit}",
|
||||
self._labels(state),
|
||||
)
|
||||
else:
|
||||
metric = self._metric(
|
||||
f"{domain}_state",
|
||||
prometheus_client.Gauge,
|
||||
f"State of the {title}",
|
||||
self._labels(state),
|
||||
)
|
||||
|
||||
if (value := self.state_as_number(state)) is not None:
|
||||
if (
|
||||
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
||||
== UnitOfTemperature.FAHRENHEIT
|
||||
):
|
||||
value = TemperatureConverter.convert(
|
||||
value, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS
|
||||
)
|
||||
metric.labels(**self._labels(state)).set(value)
|
||||
if (
|
||||
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
||||
== UnitOfTemperature.FAHRENHEIT
|
||||
):
|
||||
value = TemperatureConverter.convert(
|
||||
value, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS
|
||||
)
|
||||
|
||||
metric.set(value)
|
||||
|
||||
def _handle_input_number(self, state: State) -> None:
|
||||
self._numeric_handler(state, "input_number", "input number")
|
||||
@@ -425,88 +473,99 @@ class PrometheusMetrics:
|
||||
self._numeric_handler(state, "number", "number")
|
||||
|
||||
def _handle_device_tracker(self, state: State) -> None:
|
||||
metric = self._metric(
|
||||
if (value := self.state_as_number(state)) is None:
|
||||
return
|
||||
|
||||
self._metric(
|
||||
"device_tracker_state",
|
||||
prometheus_client.Gauge,
|
||||
"State of the device tracker (0/1)",
|
||||
)
|
||||
if (value := self.state_as_number(state)) is not None:
|
||||
metric.labels(**self._labels(state)).set(value)
|
||||
self._labels(state),
|
||||
).set(value)
|
||||
|
||||
def _handle_person(self, state: State) -> None:
|
||||
metric = self._metric(
|
||||
"person_state", prometheus_client.Gauge, "State of the person (0/1)"
|
||||
)
|
||||
if (value := self.state_as_number(state)) is not None:
|
||||
metric.labels(**self._labels(state)).set(value)
|
||||
if (value := self.state_as_number(state)) is None:
|
||||
return
|
||||
|
||||
self._metric(
|
||||
"person_state",
|
||||
prometheus_client.Gauge,
|
||||
"State of the person (0/1)",
|
||||
self._labels(state),
|
||||
).set(value)
|
||||
|
||||
def _handle_cover(self, state: State) -> None:
|
||||
metric = self._metric(
|
||||
"cover_state",
|
||||
prometheus_client.Gauge,
|
||||
"State of the cover (0/1)",
|
||||
["state"],
|
||||
)
|
||||
|
||||
cover_states = [STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING]
|
||||
for cover_state in cover_states:
|
||||
metric.labels(**dict(self._labels(state), state=cover_state)).set(
|
||||
float(cover_state == state.state)
|
||||
metric = self._metric(
|
||||
"cover_state",
|
||||
prometheus_client.Gauge,
|
||||
"State of the cover (0/1)",
|
||||
self._labels(state, {"state": cover_state}),
|
||||
)
|
||||
metric.set(float(cover_state == state.state))
|
||||
|
||||
position = state.attributes.get(ATTR_CURRENT_POSITION)
|
||||
if position is not None:
|
||||
position_metric = self._metric(
|
||||
self._metric(
|
||||
"cover_position",
|
||||
prometheus_client.Gauge,
|
||||
"Position of the cover (0-100)",
|
||||
)
|
||||
position_metric.labels(**self._labels(state)).set(float(position))
|
||||
self._labels(state),
|
||||
).set(float(position))
|
||||
|
||||
tilt_position = state.attributes.get(ATTR_CURRENT_TILT_POSITION)
|
||||
if tilt_position is not None:
|
||||
tilt_position_metric = self._metric(
|
||||
self._metric(
|
||||
"cover_tilt_position",
|
||||
prometheus_client.Gauge,
|
||||
"Tilt Position of the cover (0-100)",
|
||||
)
|
||||
tilt_position_metric.labels(**self._labels(state)).set(float(tilt_position))
|
||||
self._labels(state),
|
||||
).set(float(tilt_position))
|
||||
|
||||
def _handle_light(self, state: State) -> None:
|
||||
metric = self._metric(
|
||||
if (value := self.state_as_number(state)) is None:
|
||||
return
|
||||
|
||||
brightness = state.attributes.get(ATTR_BRIGHTNESS)
|
||||
if state.state == STATE_ON and brightness is not None:
|
||||
value = float(brightness) / 255.0
|
||||
value = value * 100
|
||||
|
||||
self._metric(
|
||||
"light_brightness_percent",
|
||||
prometheus_client.Gauge,
|
||||
"Light brightness percentage (0..100)",
|
||||
)
|
||||
|
||||
if (value := self.state_as_number(state)) is not None:
|
||||
brightness = state.attributes.get(ATTR_BRIGHTNESS)
|
||||
if state.state == STATE_ON and brightness is not None:
|
||||
value = float(brightness) / 255.0
|
||||
value = value * 100
|
||||
metric.labels(**self._labels(state)).set(value)
|
||||
self._labels(state),
|
||||
).set(value)
|
||||
|
||||
def _handle_lock(self, state: State) -> None:
|
||||
metric = self._metric(
|
||||
"lock_state", prometheus_client.Gauge, "State of the lock (0/1)"
|
||||
)
|
||||
if (value := self.state_as_number(state)) is not None:
|
||||
metric.labels(**self._labels(state)).set(value)
|
||||
if (value := self.state_as_number(state)) is None:
|
||||
return
|
||||
|
||||
self._metric(
|
||||
"lock_state",
|
||||
prometheus_client.Gauge,
|
||||
"State of the lock (0/1)",
|
||||
self._labels(state),
|
||||
).set(value)
|
||||
|
||||
def _handle_climate_temp(
|
||||
self, state: State, attr: str, metric_name: str, metric_description: str
|
||||
) -> None:
|
||||
if (temp := state.attributes.get(attr)) is not None:
|
||||
if self._climate_units == UnitOfTemperature.FAHRENHEIT:
|
||||
temp = TemperatureConverter.convert(
|
||||
temp, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS
|
||||
)
|
||||
metric = self._metric(
|
||||
metric_name,
|
||||
prometheus_client.Gauge,
|
||||
metric_description,
|
||||
if (temp := state.attributes.get(attr)) is None:
|
||||
return
|
||||
|
||||
if self._climate_units == UnitOfTemperature.FAHRENHEIT:
|
||||
temp = TemperatureConverter.convert(
|
||||
temp, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS
|
||||
)
|
||||
metric.labels(**self._labels(state)).set(temp)
|
||||
self._metric(
|
||||
metric_name,
|
||||
prometheus_client.Gauge,
|
||||
metric_description,
|
||||
self._labels(state),
|
||||
).set(temp)
|
||||
|
||||
def _handle_climate(self, state: State) -> None:
|
||||
self._handle_climate_temp(
|
||||
@@ -535,90 +594,75 @@ class PrometheusMetrics:
|
||||
)
|
||||
|
||||
if current_action := state.attributes.get(ATTR_HVAC_ACTION):
|
||||
metric = self._metric(
|
||||
"climate_action",
|
||||
prometheus_client.Gauge,
|
||||
"HVAC action",
|
||||
["action"],
|
||||
)
|
||||
for action in HVACAction:
|
||||
metric.labels(**dict(self._labels(state), action=action.value)).set(
|
||||
float(action == current_action)
|
||||
)
|
||||
self._metric(
|
||||
"climate_action",
|
||||
prometheus_client.Gauge,
|
||||
"HVAC action",
|
||||
self._labels(state, {"action": action.value}),
|
||||
).set(float(action == current_action))
|
||||
|
||||
current_mode = state.state
|
||||
available_modes = state.attributes.get(ATTR_HVAC_MODES)
|
||||
if current_mode and available_modes:
|
||||
metric = self._metric(
|
||||
"climate_mode",
|
||||
prometheus_client.Gauge,
|
||||
"HVAC mode",
|
||||
["mode"],
|
||||
)
|
||||
for mode in available_modes:
|
||||
metric.labels(**dict(self._labels(state), mode=mode)).set(
|
||||
float(mode == current_mode)
|
||||
)
|
||||
self._metric(
|
||||
"climate_mode",
|
||||
prometheus_client.Gauge,
|
||||
"HVAC mode",
|
||||
self._labels(state, {"mode": mode}),
|
||||
).set(float(mode == current_mode))
|
||||
|
||||
preset_mode = state.attributes.get(ATTR_PRESET_MODE)
|
||||
available_preset_modes = state.attributes.get(ATTR_PRESET_MODES)
|
||||
if preset_mode and available_preset_modes:
|
||||
preset_metric = self._metric(
|
||||
"climate_preset_mode",
|
||||
prometheus_client.Gauge,
|
||||
"Preset mode enum",
|
||||
["mode"],
|
||||
)
|
||||
for mode in available_preset_modes:
|
||||
preset_metric.labels(**dict(self._labels(state), mode=mode)).set(
|
||||
float(mode == preset_mode)
|
||||
)
|
||||
self._metric(
|
||||
"climate_preset_mode",
|
||||
prometheus_client.Gauge,
|
||||
"Preset mode enum",
|
||||
self._labels(state, {"mode": mode}),
|
||||
).set(float(mode == preset_mode))
|
||||
|
||||
fan_mode = state.attributes.get(ATTR_FAN_MODE)
|
||||
available_fan_modes = state.attributes.get(ATTR_FAN_MODES)
|
||||
if fan_mode and available_fan_modes:
|
||||
fan_mode_metric = self._metric(
|
||||
"climate_fan_mode",
|
||||
prometheus_client.Gauge,
|
||||
"Fan mode enum",
|
||||
["mode"],
|
||||
)
|
||||
for mode in available_fan_modes:
|
||||
fan_mode_metric.labels(**dict(self._labels(state), mode=mode)).set(
|
||||
float(mode == fan_mode)
|
||||
)
|
||||
self._metric(
|
||||
"climate_fan_mode",
|
||||
prometheus_client.Gauge,
|
||||
"Fan mode enum",
|
||||
self._labels(state, {"mode": mode}),
|
||||
).set(float(mode == fan_mode))
|
||||
|
||||
def _handle_humidifier(self, state: State) -> None:
|
||||
humidifier_target_humidity_percent = state.attributes.get(ATTR_HUMIDITY)
|
||||
if humidifier_target_humidity_percent:
|
||||
metric = self._metric(
|
||||
self._metric(
|
||||
"humidifier_target_humidity_percent",
|
||||
prometheus_client.Gauge,
|
||||
"Target Relative Humidity",
|
||||
)
|
||||
metric.labels(**self._labels(state)).set(humidifier_target_humidity_percent)
|
||||
self._labels(state),
|
||||
).set(humidifier_target_humidity_percent)
|
||||
|
||||
metric = self._metric(
|
||||
"humidifier_state",
|
||||
prometheus_client.Gauge,
|
||||
"State of the humidifier (0/1)",
|
||||
)
|
||||
if (value := self.state_as_number(state)) is not None:
|
||||
metric.labels(**self._labels(state)).set(value)
|
||||
self._metric(
|
||||
"humidifier_state",
|
||||
prometheus_client.Gauge,
|
||||
"State of the humidifier (0/1)",
|
||||
self._labels(state),
|
||||
).set(value)
|
||||
|
||||
current_mode = state.attributes.get(ATTR_MODE)
|
||||
available_modes = state.attributes.get(ATTR_AVAILABLE_MODES)
|
||||
if current_mode and available_modes:
|
||||
metric = self._metric(
|
||||
"humidifier_mode",
|
||||
prometheus_client.Gauge,
|
||||
"Humidifier Mode",
|
||||
["mode"],
|
||||
)
|
||||
for mode in available_modes:
|
||||
metric.labels(**dict(self._labels(state), mode=mode)).set(
|
||||
float(mode == current_mode)
|
||||
)
|
||||
self._metric(
|
||||
"humidifier_mode",
|
||||
prometheus_client.Gauge,
|
||||
"Humidifier Mode",
|
||||
self._labels(state, {"mode": mode}),
|
||||
).set(float(mode == current_mode))
|
||||
|
||||
def _handle_sensor(self, state: State) -> None:
|
||||
unit = self._unit_string(state.attributes.get(ATTR_UNIT_OF_MEASUREMENT))
|
||||
@@ -628,22 +672,24 @@ class PrometheusMetrics:
|
||||
if metric is not None:
|
||||
break
|
||||
|
||||
if metric is not None:
|
||||
if metric is not None and (value := self.state_as_number(state)) is not None:
|
||||
documentation = "State of the sensor"
|
||||
if unit:
|
||||
documentation = f"Sensor data measured in {unit}"
|
||||
|
||||
_metric = self._metric(metric, prometheus_client.Gauge, documentation)
|
||||
|
||||
if (value := self.state_as_number(state)) is not None:
|
||||
if (
|
||||
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
||||
== UnitOfTemperature.FAHRENHEIT
|
||||
):
|
||||
value = TemperatureConverter.convert(
|
||||
value, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS
|
||||
)
|
||||
_metric.labels(**self._labels(state)).set(value)
|
||||
if (
|
||||
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
||||
== UnitOfTemperature.FAHRENHEIT
|
||||
):
|
||||
value = TemperatureConverter.convert(
|
||||
value, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS
|
||||
)
|
||||
self._metric(
|
||||
metric,
|
||||
prometheus_client.Gauge,
|
||||
documentation,
|
||||
self._labels(state),
|
||||
).set(value)
|
||||
|
||||
self._battery(state)
|
||||
|
||||
@@ -702,114 +748,107 @@ class PrometheusMetrics:
|
||||
return units.get(unit, default)
|
||||
|
||||
def _handle_switch(self, state: State) -> None:
|
||||
metric = self._metric(
|
||||
"switch_state", prometheus_client.Gauge, "State of the switch (0/1)"
|
||||
)
|
||||
|
||||
if (value := self.state_as_number(state)) is not None:
|
||||
metric.labels(**self._labels(state)).set(value)
|
||||
self._metric(
|
||||
"switch_state",
|
||||
prometheus_client.Gauge,
|
||||
"State of the switch (0/1)",
|
||||
self._labels(state),
|
||||
).set(value)
|
||||
|
||||
self._handle_attributes(state)
|
||||
|
||||
def _handle_fan(self, state: State) -> None:
|
||||
metric = self._metric(
|
||||
"fan_state", prometheus_client.Gauge, "State of the fan (0/1)"
|
||||
)
|
||||
|
||||
if (value := self.state_as_number(state)) is not None:
|
||||
metric.labels(**self._labels(state)).set(value)
|
||||
self._metric(
|
||||
"fan_state",
|
||||
prometheus_client.Gauge,
|
||||
"State of the fan (0/1)",
|
||||
self._labels(state),
|
||||
).set(value)
|
||||
|
||||
fan_speed_percent = state.attributes.get(ATTR_PERCENTAGE)
|
||||
if fan_speed_percent is not None:
|
||||
fan_speed_metric = self._metric(
|
||||
self._metric(
|
||||
"fan_speed_percent",
|
||||
prometheus_client.Gauge,
|
||||
"Fan speed percent (0-100)",
|
||||
)
|
||||
fan_speed_metric.labels(**self._labels(state)).set(float(fan_speed_percent))
|
||||
self._labels(state),
|
||||
).set(float(fan_speed_percent))
|
||||
|
||||
fan_is_oscillating = state.attributes.get(ATTR_OSCILLATING)
|
||||
if fan_is_oscillating is not None:
|
||||
fan_oscillating_metric = self._metric(
|
||||
self._metric(
|
||||
"fan_is_oscillating",
|
||||
prometheus_client.Gauge,
|
||||
"Whether the fan is oscillating (0/1)",
|
||||
)
|
||||
fan_oscillating_metric.labels(**self._labels(state)).set(
|
||||
float(fan_is_oscillating)
|
||||
)
|
||||
self._labels(state),
|
||||
).set(float(fan_is_oscillating))
|
||||
|
||||
fan_preset_mode = state.attributes.get(ATTR_PRESET_MODE)
|
||||
available_modes = state.attributes.get(ATTR_PRESET_MODES)
|
||||
if fan_preset_mode and available_modes:
|
||||
fan_preset_metric = self._metric(
|
||||
"fan_preset_mode",
|
||||
prometheus_client.Gauge,
|
||||
"Fan preset mode enum",
|
||||
["mode"],
|
||||
)
|
||||
for mode in available_modes:
|
||||
fan_preset_metric.labels(**dict(self._labels(state), mode=mode)).set(
|
||||
float(mode == fan_preset_mode)
|
||||
)
|
||||
self._metric(
|
||||
"fan_preset_mode",
|
||||
prometheus_client.Gauge,
|
||||
"Fan preset mode enum",
|
||||
self._labels(state, {"mode": mode}),
|
||||
).set(float(mode == fan_preset_mode))
|
||||
|
||||
fan_direction = state.attributes.get(ATTR_DIRECTION)
|
||||
if fan_direction is not None:
|
||||
fan_direction_metric = self._metric(
|
||||
if fan_direction in {DIRECTION_FORWARD, DIRECTION_REVERSE}:
|
||||
self._metric(
|
||||
"fan_direction_reversed",
|
||||
prometheus_client.Gauge,
|
||||
"Fan direction reversed (bool)",
|
||||
)
|
||||
if fan_direction == DIRECTION_FORWARD:
|
||||
fan_direction_metric.labels(**self._labels(state)).set(0)
|
||||
elif fan_direction == DIRECTION_REVERSE:
|
||||
fan_direction_metric.labels(**self._labels(state)).set(1)
|
||||
self._labels(state),
|
||||
).set(float(fan_direction == DIRECTION_REVERSE))
|
||||
|
||||
def _handle_zwave(self, state: State) -> None:
|
||||
self._battery(state)
|
||||
|
||||
def _handle_automation(self, state: State) -> None:
|
||||
metric = self._metric(
|
||||
self._metric(
|
||||
"automation_triggered_count",
|
||||
prometheus_client.Counter,
|
||||
"Count of times an automation has been triggered",
|
||||
)
|
||||
|
||||
metric.labels(**self._labels(state)).inc()
|
||||
self._labels(state),
|
||||
).inc()
|
||||
|
||||
def _handle_counter(self, state: State) -> None:
|
||||
metric = self._metric(
|
||||
if (value := self.state_as_number(state)) is None:
|
||||
return
|
||||
|
||||
self._metric(
|
||||
"counter_value",
|
||||
prometheus_client.Gauge,
|
||||
"Value of counter entities",
|
||||
)
|
||||
if (value := self.state_as_number(state)) is not None:
|
||||
metric.labels(**self._labels(state)).set(value)
|
||||
self._labels(state),
|
||||
).set(value)
|
||||
|
||||
def _handle_update(self, state: State) -> None:
|
||||
metric = self._metric(
|
||||
if (value := self.state_as_number(state)) is None:
|
||||
return
|
||||
|
||||
self._metric(
|
||||
"update_state",
|
||||
prometheus_client.Gauge,
|
||||
"Update state, indicating if an update is available (0/1)",
|
||||
)
|
||||
if (value := self.state_as_number(state)) is not None:
|
||||
metric.labels(**self._labels(state)).set(value)
|
||||
self._labels(state),
|
||||
).set(value)
|
||||
|
||||
def _handle_alarm_control_panel(self, state: State) -> None:
|
||||
current_state = state.state
|
||||
|
||||
if current_state:
|
||||
metric = self._metric(
|
||||
"alarm_control_panel_state",
|
||||
prometheus_client.Gauge,
|
||||
"State of the alarm control panel (0/1)",
|
||||
["state"],
|
||||
)
|
||||
|
||||
for alarm_state in AlarmControlPanelState:
|
||||
metric.labels(**dict(self._labels(state), state=alarm_state.value)).set(
|
||||
float(alarm_state.value == current_state)
|
||||
)
|
||||
self._metric(
|
||||
"alarm_control_panel_state",
|
||||
prometheus_client.Gauge,
|
||||
"State of the alarm control panel (0/1)",
|
||||
self._labels(state, {"state": alarm_state.value}),
|
||||
).set(float(alarm_state.value == current_state))
|
||||
|
||||
|
||||
class PrometheusView(HomeAssistantView):
|
||||
|
||||
@@ -719,6 +719,16 @@ class Recorder(threading.Thread):
|
||||
if schema_status is None:
|
||||
# Give up if we could not validate the schema
|
||||
return
|
||||
if schema_status.current_version > SCHEMA_VERSION:
|
||||
_LOGGER.error(
|
||||
"The database schema version %s is newer than %s which is the maximum "
|
||||
"database schema version supported by the installed version of "
|
||||
"Home Assistant Core, either upgrade Home Assistant Core or restore "
|
||||
"the database from a backup compatible with this version",
|
||||
schema_status.current_version,
|
||||
SCHEMA_VERSION,
|
||||
)
|
||||
return
|
||||
self.schema_version = schema_status.current_version
|
||||
|
||||
if not schema_status.migration_needed and not schema_status.schema_errors:
|
||||
|
||||
@@ -77,7 +77,7 @@ class LegacyBase(DeclarativeBase):
|
||||
"""Base class for tables, used for schema migration."""
|
||||
|
||||
|
||||
SCHEMA_VERSION = 47
|
||||
SCHEMA_VERSION = 48
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -1976,6 +1976,17 @@ class _SchemaVersion47Migrator(_SchemaVersionMigrator, target_version=47):
|
||||
)
|
||||
|
||||
|
||||
class _SchemaVersion48Migrator(_SchemaVersionMigrator, target_version=48):
|
||||
def _apply_update(self) -> None:
|
||||
"""Version specific update method."""
|
||||
# https://github.com/home-assistant/core/issues/134002
|
||||
# If the system has unmigrated states rows, we need to
|
||||
# ensure they are migrated now so the new optimized
|
||||
# queries can be used. For most systems, this should
|
||||
# be very fast and nothing will be migrated.
|
||||
_migrate_columns_to_timestamp(self.instance, self.session_maker, self.engine)
|
||||
|
||||
|
||||
def _migrate_statistics_columns_to_timestamp_removing_duplicates(
|
||||
hass: HomeAssistant,
|
||||
instance: Recorder,
|
||||
@@ -2109,7 +2120,8 @@ def _migrate_columns_to_timestamp(
|
||||
connection.execute(
|
||||
text(
|
||||
'UPDATE events set time_fired_ts=strftime("%s",time_fired) + '
|
||||
"cast(substr(time_fired,-7) AS FLOAT);"
|
||||
"cast(substr(time_fired,-7) AS FLOAT) "
|
||||
"WHERE time_fired_ts is NULL;"
|
||||
)
|
||||
)
|
||||
connection.execute(
|
||||
@@ -2117,7 +2129,8 @@ def _migrate_columns_to_timestamp(
|
||||
'UPDATE states set last_updated_ts=strftime("%s",last_updated) + '
|
||||
"cast(substr(last_updated,-7) AS FLOAT), "
|
||||
'last_changed_ts=strftime("%s",last_changed) + '
|
||||
"cast(substr(last_changed,-7) AS FLOAT);"
|
||||
"cast(substr(last_changed,-7) AS FLOAT) "
|
||||
" WHERE last_updated_ts is NULL;"
|
||||
)
|
||||
)
|
||||
elif engine.dialect.name == SupportedDialect.MYSQL:
|
||||
|
||||
@@ -100,7 +100,7 @@ async def async_setup_entry(
|
||||
if not entity_description.supported(reolink_data.host.api, channel):
|
||||
continue
|
||||
stream_url = await reolink_data.host.api.get_stream_source(
|
||||
channel, entity_description.stream
|
||||
channel, entity_description.stream, False
|
||||
)
|
||||
if stream_url is None and "snapshots" not in entity_description.stream:
|
||||
continue
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.11.5"]
|
||||
"requirements": ["reolink-aio==0.11.6"]
|
||||
}
|
||||
|
||||
@@ -81,6 +81,8 @@ class ReolinkVODMediaSource(MediaSource):
|
||||
|
||||
def get_vod_type() -> VodRequestType:
|
||||
if filename.endswith(".mp4"):
|
||||
if host.api.is_nvr:
|
||||
return VodRequestType.DOWNLOAD
|
||||
return VodRequestType.PLAYBACK
|
||||
if host.api.is_nvr:
|
||||
return VodRequestType.FLV
|
||||
|
||||
@@ -567,6 +567,7 @@
|
||||
"stayoff": "Stay off",
|
||||
"auto": "[%key:component::reolink::entity::select::day_night_mode::state::auto%]",
|
||||
"alwaysonatnight": "Auto & always on at night",
|
||||
"always": "Always on",
|
||||
"alwayson": "Always on"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""The russound_rio component."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from aiorussound import RussoundClient, RussoundTcpConnectionHandler
|
||||
@@ -11,7 +10,7 @@ from homeassistant.const import CONF_HOST, CONF_PORT, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .const import CONNECT_TIMEOUT, DOMAIN, RUSSOUND_RIO_EXCEPTIONS
|
||||
from .const import DOMAIN, RUSSOUND_RIO_EXCEPTIONS
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||
|
||||
@@ -40,8 +39,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: RussoundConfigEntry) ->
|
||||
await client.register_state_update_callbacks(_connection_update_callback)
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(CONNECT_TIMEOUT):
|
||||
await client.connect()
|
||||
await client.connect()
|
||||
except RUSSOUND_RIO_EXCEPTIONS as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -17,7 +16,7 @@ from homeassistant.config_entries import (
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import CONNECT_TIMEOUT, DOMAIN, RUSSOUND_RIO_EXCEPTIONS
|
||||
from .const import DOMAIN, RUSSOUND_RIO_EXCEPTIONS
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -45,10 +44,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
client = RussoundClient(RussoundTcpConnectionHandler(host, port))
|
||||
try:
|
||||
async with asyncio.timeout(CONNECT_TIMEOUT):
|
||||
await client.connect()
|
||||
controller = client.controllers[1]
|
||||
await client.disconnect()
|
||||
await client.connect()
|
||||
controller = client.controllers[1]
|
||||
await client.disconnect()
|
||||
except RUSSOUND_RIO_EXCEPTIONS:
|
||||
_LOGGER.exception("Could not connect to Russound RIO")
|
||||
errors["base"] = "cannot_connect"
|
||||
@@ -90,10 +88,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
# Connection logic is repeated here since this method will be removed in future releases
|
||||
client = RussoundClient(RussoundTcpConnectionHandler(host, port))
|
||||
try:
|
||||
async with asyncio.timeout(CONNECT_TIMEOUT):
|
||||
await client.connect()
|
||||
controller = client.controllers[1]
|
||||
await client.disconnect()
|
||||
await client.connect()
|
||||
controller = client.controllers[1]
|
||||
await client.disconnect()
|
||||
except RUSSOUND_RIO_EXCEPTIONS:
|
||||
_LOGGER.exception("Could not connect to Russound RIO")
|
||||
return self.async_abort(
|
||||
|
||||
@@ -16,9 +16,6 @@ RUSSOUND_RIO_EXCEPTIONS = (
|
||||
asyncio.CancelledError,
|
||||
)
|
||||
|
||||
|
||||
CONNECT_TIMEOUT = 15
|
||||
|
||||
MP_FEATURES_BY_FLAG = {
|
||||
FeatureFlag.COMMANDS_ZONE_MUTE_OFF_ON: MediaPlayerEntityFeature.VOLUME_MUTE
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiorussound"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aiorussound==4.1.0"]
|
||||
"requirements": ["aiorussound==4.1.1"]
|
||||
}
|
||||
|
||||
@@ -141,8 +141,10 @@
|
||||
"options": {
|
||||
"apparent_power": "[%key:component::sensor::entity_component::apparent_power::name%]",
|
||||
"aqi": "[%key:component::sensor::entity_component::aqi::name%]",
|
||||
"area": "[%key:component::sensor::entity_component::area::name%]",
|
||||
"atmospheric_pressure": "[%key:component::sensor::entity_component::atmospheric_pressure::name%]",
|
||||
"battery": "[%key:component::sensor::entity_component::battery::name%]",
|
||||
"blood_glucose_concentration": "[%key:component::sensor::entity_component::blood_glucose_concentration::name%]",
|
||||
"carbon_dioxide": "[%key:component::sensor::entity_component::carbon_dioxide::name%]",
|
||||
"carbon_monoxide": "[%key:component::sensor::entity_component::carbon_monoxide::name%]",
|
||||
"conductivity": "[%key:component::sensor::entity_component::conductivity::name%]",
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
"is_illuminance": "Current {entity_name} illuminance",
|
||||
"is_irradiance": "Current {entity_name} irradiance",
|
||||
"is_moisture": "Current {entity_name} moisture",
|
||||
"is_monetary": "Current {entity_name} money",
|
||||
"is_monetary": "Current {entity_name} balance",
|
||||
"is_nitrogen_dioxide": "Current {entity_name} nitrogen dioxide concentration level",
|
||||
"is_nitrogen_monoxide": "Current {entity_name} nitrogen monoxide concentration level",
|
||||
"is_nitrous_oxide": "Current {entity_name} nitrous oxide concentration level",
|
||||
@@ -75,7 +75,7 @@
|
||||
"illuminance": "{entity_name} illuminance changes",
|
||||
"irradiance": "{entity_name} irradiance changes",
|
||||
"moisture": "{entity_name} moisture changes",
|
||||
"monetary": "{entity_name} money changes",
|
||||
"monetary": "{entity_name} balance changes",
|
||||
"nitrogen_dioxide": "{entity_name} nitrogen dioxide concentration changes",
|
||||
"nitrogen_monoxide": "{entity_name} nitrogen monoxide concentration changes",
|
||||
"nitrous_oxide": "{entity_name} nitrous oxide concentration changes",
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioshelly"],
|
||||
"requirements": ["aioshelly==12.1.0"],
|
||||
"requirements": ["aioshelly==12.2.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_http._tcp.local.",
|
||||
|
||||
@@ -485,7 +485,7 @@ class SimpliSafe:
|
||||
except Exception as err: # noqa: BLE001
|
||||
LOGGER.error("Unknown exception while connecting to websocket: %s", err)
|
||||
|
||||
LOGGER.warning("Reconnecting to websocket")
|
||||
LOGGER.debug("Reconnecting to websocket")
|
||||
await self._async_cancel_websocket_loop()
|
||||
self._websocket_reconnect_task = self._hass.async_create_task(
|
||||
self._async_start_websocket_loop()
|
||||
|
||||
@@ -331,9 +331,16 @@ class SQLSensor(ManualTriggerSensorEntity):
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
manufacturer="SQL",
|
||||
name=self.name,
|
||||
name=self._rendered.get(CONF_NAME),
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self) -> str | None:
|
||||
"""Name of the entity."""
|
||||
if self.has_entity_name:
|
||||
return self._attr_name
|
||||
return self._rendered.get(CONF_NAME)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Call when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@@ -113,7 +113,7 @@ class SwissPublicTransportDataUpdateCoordinator(
|
||||
destination=self._opendata.to_name,
|
||||
remaining_time=str(self.remaining_time(connections[i]["departure"])),
|
||||
delay=connections[i]["delay"],
|
||||
line=connections[i]["line"],
|
||||
line=connections[i].get("line"),
|
||||
)
|
||||
for i in range(limit)
|
||||
if len(connections) > i and connections[i] is not None
|
||||
@@ -134,7 +134,7 @@ class SwissPublicTransportDataUpdateCoordinator(
|
||||
"train_number": connection["train_number"],
|
||||
"transfers": connection["transfers"],
|
||||
"delay": connection["delay"],
|
||||
"line": connection["line"],
|
||||
"line": connection.get("line"),
|
||||
}
|
||||
for connection in await self.fetch_connections(limit)
|
||||
]
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/syncthru",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pysyncthru"],
|
||||
"requirements": ["PySyncThru==0.7.10", "url-normalize==1.4.3"],
|
||||
"requirements": ["PySyncThru==0.8.0", "url-normalize==1.4.3"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:Printer:1",
|
||||
|
||||
@@ -429,16 +429,17 @@ async def async_setup_entry(
|
||||
is_enabled = check_legacy_resource(
|
||||
f"{_type}_{argument}", legacy_resources
|
||||
)
|
||||
loaded_resources.add(slugify(f"{_type}_{argument}"))
|
||||
entities.append(
|
||||
SystemMonitorSensor(
|
||||
coordinator,
|
||||
sensor_description,
|
||||
entry.entry_id,
|
||||
argument,
|
||||
is_enabled,
|
||||
if (_add := slugify(f"{_type}_{argument}")) not in loaded_resources:
|
||||
loaded_resources.add(_add)
|
||||
entities.append(
|
||||
SystemMonitorSensor(
|
||||
coordinator,
|
||||
sensor_description,
|
||||
entry.entry_id,
|
||||
argument,
|
||||
is_enabled,
|
||||
)
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
if _type.startswith("ipv"):
|
||||
|
||||
@@ -64,6 +64,15 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) -> bool:
|
||||
"""Set up TeslaFleet config."""
|
||||
|
||||
try:
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
except ValueError as e:
|
||||
# Remove invalid implementation from config entry then raise AuthFailed
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, data={"auth_implementation": None}
|
||||
)
|
||||
raise ConfigEntryAuthFailed from e
|
||||
|
||||
access_token = entry.data[CONF_TOKEN][CONF_ACCESS_TOKEN]
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
@@ -71,7 +80,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) -
|
||||
scopes: list[Scope] = [Scope(s) for s in token["scp"]]
|
||||
region: str = token["ou_code"].lower()
|
||||
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
oauth_session = OAuth2Session(hass, entry, implementation)
|
||||
refresh_lock = asyncio.Lock()
|
||||
|
||||
|
||||
@@ -85,6 +85,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) -
|
||||
|
||||
scopes = calls[0]["scopes"]
|
||||
region = calls[0]["region"]
|
||||
vehicle_metadata = calls[0]["vehicles"]
|
||||
products = calls[1]["response"]
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
@@ -102,7 +103,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) -
|
||||
)
|
||||
|
||||
for product in products:
|
||||
if "vin" in product and Scope.VEHICLE_DEVICE_DATA in scopes:
|
||||
if (
|
||||
"vin" in product
|
||||
and vehicle_metadata.get(product["vin"], {}).get("access")
|
||||
and Scope.VEHICLE_DEVICE_DATA in scopes
|
||||
):
|
||||
# Remove the protobuff 'cached_data' that we do not use to save memory
|
||||
product.pop("cached_data", None)
|
||||
vin = product["vin"]
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pytile"],
|
||||
"requirements": ["pytile==2023.12.0"]
|
||||
"requirements": ["pytile==2024.12.0"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/velux",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyvlx"],
|
||||
"requirements": ["pyvlx==0.2.21"]
|
||||
"requirements": ["pyvlx==0.2.26"]
|
||||
}
|
||||
|
||||
@@ -135,7 +135,18 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
in_use_platforms = []
|
||||
if hass.data[DOMAIN][VS_SWITCHES]:
|
||||
in_use_platforms.append(Platform.SWITCH)
|
||||
if hass.data[DOMAIN][VS_FANS]:
|
||||
in_use_platforms.append(Platform.FAN)
|
||||
if hass.data[DOMAIN][VS_LIGHTS]:
|
||||
in_use_platforms.append(Platform.LIGHT)
|
||||
if hass.data[DOMAIN][VS_SENSORS]:
|
||||
in_use_platforms.append(Platform.SENSOR)
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(
|
||||
entry, in_use_platforms
|
||||
)
|
||||
if unload_ok:
|
||||
hass.data.pop(DOMAIN)
|
||||
|
||||
|
||||
@@ -56,6 +56,7 @@ SKU_TO_BASE_DEVICE = {
|
||||
"LAP-V201S-WEU": "Vital200S", # Alt ID Model Vital200S
|
||||
"LAP-V201S-WUS": "Vital200S", # Alt ID Model Vital200S
|
||||
"LAP-V201-AUSR": "Vital200S", # Alt ID Model Vital200S
|
||||
"LAP-V201S-AEUR": "Vital200S", # Alt ID Model Vital200S
|
||||
"LAP-V201S-AUSR": "Vital200S", # Alt ID Model Vital200S
|
||||
"Vital100S": "Vital100S",
|
||||
"LAP-V102S-WUS": "Vital100S", # Alt ID Model Vital100S
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/voip",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["voip-utils==0.2.1"]
|
||||
"requirements": ["voip-utils==0.2.2"]
|
||||
}
|
||||
|
||||
@@ -15,3 +15,4 @@ send_magic_packet:
|
||||
number:
|
||||
min: 1
|
||||
max: 65535
|
||||
mode: "box"
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["zabbix_utils"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["zabbix-utils==2.0.1"]
|
||||
"requirements": ["zabbix-utils==2.0.2"]
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"zha",
|
||||
"universal_silabs_flasher"
|
||||
],
|
||||
"requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.43"],
|
||||
"requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.44"],
|
||||
"usb": [
|
||||
{
|
||||
"vid": "10C4",
|
||||
|
||||
@@ -879,6 +879,12 @@
|
||||
},
|
||||
"regulator_set_point": {
|
||||
"name": "Regulator set point"
|
||||
},
|
||||
"detection_delay": {
|
||||
"name": "Detection delay"
|
||||
},
|
||||
"fading_time": {
|
||||
"name": "Fading time"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
@@ -1237,6 +1243,9 @@
|
||||
},
|
||||
"local_temperature_floor": {
|
||||
"name": "Floor temperature"
|
||||
},
|
||||
"self_test": {
|
||||
"name": "Self test result"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
|
||||
@@ -290,7 +290,7 @@
|
||||
"name": "[%key:component::zwave_js::services::set_config_parameter::fields::value::name%]"
|
||||
}
|
||||
},
|
||||
"name": "Bulk set partial configuration parameters (advanced)."
|
||||
"name": "Bulk set partial configuration parameters (advanced)"
|
||||
},
|
||||
"clear_lock_usercode": {
|
||||
"description": "Clears a user code from a lock.",
|
||||
|
||||
@@ -25,7 +25,7 @@ if TYPE_CHECKING:
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2025
|
||||
MINOR_VERSION: Final = 1
|
||||
PATCH_VERSION: Final = "0b2"
|
||||
PATCH_VERSION: Final = "0b7"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
from functools import cache
|
||||
from getpass import getuser
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
@@ -13,6 +12,7 @@ from homeassistant.const import __version__ as current_version
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.package import is_docker_env, is_virtual_env
|
||||
from homeassistant.util.system_info import is_official_image
|
||||
|
||||
from .hassio import is_hassio
|
||||
from .importlib import async_import_module
|
||||
@@ -23,12 +23,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
_DATA_MAC_VER = "system_info_mac_ver"
|
||||
|
||||
|
||||
@cache
|
||||
def is_official_image() -> bool:
|
||||
"""Return True if Home Assistant is running in an official container."""
|
||||
return os.path.isfile("/OFFICIAL_IMAGE")
|
||||
|
||||
|
||||
@singleton(_DATA_MAC_VER)
|
||||
async def async_get_mac_ver(hass: HomeAssistant) -> str:
|
||||
"""Return the macOS version."""
|
||||
|
||||
@@ -33,10 +33,10 @@ go2rtc-client==0.1.2
|
||||
ha-ffmpeg==3.2.2
|
||||
habluetooth==3.6.0
|
||||
hass-nabucasa==0.87.0
|
||||
hassil==2.0.5
|
||||
hassil==2.1.0
|
||||
home-assistant-bluetooth==1.13.0
|
||||
home-assistant-frontend==20241224.0
|
||||
home-assistant-intents==2024.12.20
|
||||
home-assistant-frontend==20250102.0
|
||||
home-assistant-intents==2025.1.1
|
||||
httpx==0.27.2
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.5
|
||||
|
||||
@@ -15,6 +15,8 @@ from urllib.parse import urlparse
|
||||
|
||||
from packaging.requirements import InvalidRequirement, Requirement
|
||||
|
||||
from .system_info import is_official_image
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -28,8 +30,13 @@ def is_virtual_env() -> bool:
|
||||
|
||||
@cache
|
||||
def is_docker_env() -> bool:
|
||||
"""Return True if we run in a docker env."""
|
||||
return Path("/.dockerenv").exists()
|
||||
"""Return True if we run in a container env."""
|
||||
return (
|
||||
Path("/.dockerenv").exists()
|
||||
or Path("/run/.containerenv").exists()
|
||||
or "KUBERNETES_SERVICE_HOST" in os.environ
|
||||
or is_official_image()
|
||||
)
|
||||
|
||||
|
||||
def get_installed_versions(specifiers: set[str]) -> set[str]:
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
"""Util to gather system info."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import cache
|
||||
import os
|
||||
|
||||
|
||||
@cache
|
||||
def is_official_image() -> bool:
|
||||
"""Return True if Home Assistant is running in an official container."""
|
||||
return os.path.isfile("/OFFICIAL_IMAGE")
|
||||
+1
-1
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2025.1.0b2"
|
||||
version = "2025.1.0b7"
|
||||
license = {text = "Apache-2.0"}
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
readme = "README.rst"
|
||||
|
||||
+28
-28
@@ -48,7 +48,7 @@ ProgettiHWSW==0.1.3
|
||||
PyChromecast==14.0.5
|
||||
|
||||
# homeassistant.components.flick_electric
|
||||
PyFlick==0.0.2
|
||||
PyFlick==1.1.2
|
||||
|
||||
# homeassistant.components.flume
|
||||
PyFlume==0.6.5
|
||||
@@ -90,7 +90,7 @@ PySwitchbot==0.55.4
|
||||
PySwitchmate==0.5.1
|
||||
|
||||
# homeassistant.components.syncthru
|
||||
PySyncThru==0.7.10
|
||||
PySyncThru==0.8.0
|
||||
|
||||
# homeassistant.components.transport_nsw
|
||||
PyTransportNSW==0.1.1
|
||||
@@ -173,7 +173,7 @@ aio-geojson-usgs-earthquakes==0.3
|
||||
aio-georss-gdacs==0.10
|
||||
|
||||
# homeassistant.components.acaia
|
||||
aioacaia==0.1.11
|
||||
aioacaia==0.1.12
|
||||
|
||||
# homeassistant.components.airq
|
||||
aioairq==0.4.3
|
||||
@@ -213,7 +213,7 @@ aiobafi6==0.9.0
|
||||
aiobotocore==2.13.1
|
||||
|
||||
# homeassistant.components.comelit
|
||||
aiocomelit==0.9.1
|
||||
aiocomelit==0.10.1
|
||||
|
||||
# homeassistant.components.dhcp
|
||||
aiodhcpwatcher==1.0.2
|
||||
@@ -294,7 +294,7 @@ aiolookin==1.0.0
|
||||
aiolyric==2.0.1
|
||||
|
||||
# homeassistant.components.mealie
|
||||
aiomealie==0.9.4
|
||||
aiomealie==0.9.5
|
||||
|
||||
# homeassistant.components.modern_forms
|
||||
aiomodernforms==0.1.8
|
||||
@@ -321,7 +321,7 @@ aioopenexchangerates==0.6.8
|
||||
aiooui==0.1.7
|
||||
|
||||
# homeassistant.components.pegel_online
|
||||
aiopegelonline==0.1.0
|
||||
aiopegelonline==0.1.1
|
||||
|
||||
# homeassistant.components.acmeda
|
||||
aiopulse==0.4.6
|
||||
@@ -356,7 +356,7 @@ aioridwell==2024.01.0
|
||||
aioruckus==0.42
|
||||
|
||||
# homeassistant.components.russound_rio
|
||||
aiorussound==4.1.0
|
||||
aiorussound==4.1.1
|
||||
|
||||
# homeassistant.components.ruuvi_gateway
|
||||
aioruuvigateway==0.1.0
|
||||
@@ -365,7 +365,7 @@ aioruuvigateway==0.1.0
|
||||
aiosenz==1.0.0
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==12.1.0
|
||||
aioshelly==12.2.0
|
||||
|
||||
# homeassistant.components.skybell
|
||||
aioskybell==22.7.0
|
||||
@@ -738,7 +738,7 @@ debugpy==1.8.11
|
||||
# decora==0.6
|
||||
|
||||
# homeassistant.components.ecovacs
|
||||
deebot-client==10.0.1
|
||||
deebot-client==10.1.0
|
||||
|
||||
# homeassistant.components.ihc
|
||||
# homeassistant.components.namecheapdns
|
||||
@@ -827,7 +827,7 @@ eliqonline==1.2.2
|
||||
elkm1-lib==2.2.10
|
||||
|
||||
# homeassistant.components.elmax
|
||||
elmax-api==0.0.6.3
|
||||
elmax-api==0.0.6.4rc0
|
||||
|
||||
# homeassistant.components.elvia
|
||||
elvia==0.1.0
|
||||
@@ -1100,7 +1100,7 @@ hass-nabucasa==0.87.0
|
||||
hass-splunk==0.1.1
|
||||
|
||||
# homeassistant.components.conversation
|
||||
hassil==2.0.5
|
||||
hassil==2.1.0
|
||||
|
||||
# homeassistant.components.jewish_calendar
|
||||
hdate==0.11.1
|
||||
@@ -1134,10 +1134,10 @@ hole==0.8.0
|
||||
holidays==0.63
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20241224.0
|
||||
home-assistant-frontend==20250102.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2024.12.20
|
||||
home-assistant-intents==2025.1.1
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
homeconnect==0.8.0
|
||||
@@ -1260,7 +1260,7 @@ kiwiki-client==0.1.1
|
||||
knocki==0.4.2
|
||||
|
||||
# homeassistant.components.knx
|
||||
knx-frontend==2024.11.16.205004
|
||||
knx-frontend==2024.12.26.233449
|
||||
|
||||
# homeassistant.components.konnected
|
||||
konnected==1.2.0
|
||||
@@ -1528,7 +1528,7 @@ oemthermostat==1.1.1
|
||||
ohme==1.2.0
|
||||
|
||||
# homeassistant.components.ollama
|
||||
ollama==0.3.3
|
||||
ollama==0.4.5
|
||||
|
||||
# homeassistant.components.omnilogic
|
||||
omnilogic==0.4.5
|
||||
@@ -1570,7 +1570,7 @@ openwrt-luci-rpc==1.1.17
|
||||
openwrt-ubus-rpc==0.0.2
|
||||
|
||||
# homeassistant.components.opower
|
||||
opower==0.8.6
|
||||
opower==0.8.7
|
||||
|
||||
# homeassistant.components.oralb
|
||||
oralb-ble==0.17.6
|
||||
@@ -1603,7 +1603,7 @@ panasonic-viera==0.4.2
|
||||
pdunehd==1.3.2
|
||||
|
||||
# homeassistant.components.peblar
|
||||
peblar==0.3.0
|
||||
peblar==0.3.1
|
||||
|
||||
# homeassistant.components.peco
|
||||
peco==0.0.30
|
||||
@@ -1779,7 +1779,7 @@ pyairnow==1.2.1
|
||||
pyairvisual==2023.08.1
|
||||
|
||||
# homeassistant.components.aprilaire
|
||||
pyaprilaire==0.7.4
|
||||
pyaprilaire==0.7.7
|
||||
|
||||
# homeassistant.components.asuswrt
|
||||
pyasuswrt==0.1.21
|
||||
@@ -2043,7 +2043,7 @@ pykwb==0.0.8
|
||||
pylacrosse==0.4
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==1.4.2
|
||||
pylamarzocco==1.4.6
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
@@ -2118,7 +2118,7 @@ pynetio==0.1.9.1
|
||||
pynobo==1.8.1
|
||||
|
||||
# homeassistant.components.nordpool
|
||||
pynordpool==0.2.3
|
||||
pynordpool==0.2.4
|
||||
|
||||
# homeassistant.components.nuki
|
||||
pynuki==1.6.3
|
||||
@@ -2360,7 +2360,7 @@ python-gc100==1.0.3a0
|
||||
python-gitlab==1.6.0
|
||||
|
||||
# homeassistant.components.analytics_insights
|
||||
python-homeassistant-analytics==0.8.0
|
||||
python-homeassistant-analytics==0.8.1
|
||||
|
||||
# homeassistant.components.homewizard
|
||||
python-homewizard-energy==v7.0.0
|
||||
@@ -2442,7 +2442,7 @@ python-vlc==3.0.18122
|
||||
pythonegardia==1.0.52
|
||||
|
||||
# homeassistant.components.tile
|
||||
pytile==2023.12.0
|
||||
pytile==2024.12.0
|
||||
|
||||
# homeassistant.components.tomorrowio
|
||||
pytomorrowio==0.3.6
|
||||
@@ -2491,7 +2491,7 @@ pyvesync==2.1.12
|
||||
pyvizio==0.1.61
|
||||
|
||||
# homeassistant.components.velux
|
||||
pyvlx==0.2.21
|
||||
pyvlx==0.2.26
|
||||
|
||||
# homeassistant.components.volumio
|
||||
pyvolumio==0.1.5
|
||||
@@ -2572,7 +2572,7 @@ renault-api==0.2.8
|
||||
renson-endura-delta==1.7.2
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.11.5
|
||||
reolink-aio==0.11.6
|
||||
|
||||
# homeassistant.components.idteck_prox
|
||||
rfk101py==0.0.1
|
||||
@@ -2960,7 +2960,7 @@ venstarcolortouch==0.19
|
||||
vilfo-api-client==0.5.0
|
||||
|
||||
# homeassistant.components.voip
|
||||
voip-utils==0.2.1
|
||||
voip-utils==0.2.2
|
||||
|
||||
# homeassistant.components.volkszaehler
|
||||
volkszaehler==0.4.0
|
||||
@@ -3082,10 +3082,10 @@ youless-api==2.1.2
|
||||
youtubeaio==1.1.5
|
||||
|
||||
# homeassistant.components.media_extractor
|
||||
yt-dlp[default]==2024.12.13
|
||||
yt-dlp[default]==2024.12.23
|
||||
|
||||
# homeassistant.components.zabbix
|
||||
zabbix-utils==2.0.1
|
||||
zabbix-utils==2.0.2
|
||||
|
||||
# homeassistant.components.zamg
|
||||
zamg==0.3.6
|
||||
@@ -3100,7 +3100,7 @@ zeroconf==0.136.2
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.43
|
||||
zha==0.0.44
|
||||
|
||||
# homeassistant.components.zhong_hong
|
||||
zhong-hong-hvac==1.0.13
|
||||
|
||||
+27
-27
@@ -45,7 +45,7 @@ ProgettiHWSW==0.1.3
|
||||
PyChromecast==14.0.5
|
||||
|
||||
# homeassistant.components.flick_electric
|
||||
PyFlick==0.0.2
|
||||
PyFlick==1.1.2
|
||||
|
||||
# homeassistant.components.flume
|
||||
PyFlume==0.6.5
|
||||
@@ -84,7 +84,7 @@ PyRMVtransport==0.3.3
|
||||
PySwitchbot==0.55.4
|
||||
|
||||
# homeassistant.components.syncthru
|
||||
PySyncThru==0.7.10
|
||||
PySyncThru==0.8.0
|
||||
|
||||
# homeassistant.components.transport_nsw
|
||||
PyTransportNSW==0.1.1
|
||||
@@ -161,7 +161,7 @@ aio-geojson-usgs-earthquakes==0.3
|
||||
aio-georss-gdacs==0.10
|
||||
|
||||
# homeassistant.components.acaia
|
||||
aioacaia==0.1.11
|
||||
aioacaia==0.1.12
|
||||
|
||||
# homeassistant.components.airq
|
||||
aioairq==0.4.3
|
||||
@@ -201,7 +201,7 @@ aiobafi6==0.9.0
|
||||
aiobotocore==2.13.1
|
||||
|
||||
# homeassistant.components.comelit
|
||||
aiocomelit==0.9.1
|
||||
aiocomelit==0.10.1
|
||||
|
||||
# homeassistant.components.dhcp
|
||||
aiodhcpwatcher==1.0.2
|
||||
@@ -276,7 +276,7 @@ aiolookin==1.0.0
|
||||
aiolyric==2.0.1
|
||||
|
||||
# homeassistant.components.mealie
|
||||
aiomealie==0.9.4
|
||||
aiomealie==0.9.5
|
||||
|
||||
# homeassistant.components.modern_forms
|
||||
aiomodernforms==0.1.8
|
||||
@@ -303,7 +303,7 @@ aioopenexchangerates==0.6.8
|
||||
aiooui==0.1.7
|
||||
|
||||
# homeassistant.components.pegel_online
|
||||
aiopegelonline==0.1.0
|
||||
aiopegelonline==0.1.1
|
||||
|
||||
# homeassistant.components.acmeda
|
||||
aiopulse==0.4.6
|
||||
@@ -338,7 +338,7 @@ aioridwell==2024.01.0
|
||||
aioruckus==0.42
|
||||
|
||||
# homeassistant.components.russound_rio
|
||||
aiorussound==4.1.0
|
||||
aiorussound==4.1.1
|
||||
|
||||
# homeassistant.components.ruuvi_gateway
|
||||
aioruuvigateway==0.1.0
|
||||
@@ -347,7 +347,7 @@ aioruuvigateway==0.1.0
|
||||
aiosenz==1.0.0
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==12.1.0
|
||||
aioshelly==12.2.0
|
||||
|
||||
# homeassistant.components.skybell
|
||||
aioskybell==22.7.0
|
||||
@@ -628,7 +628,7 @@ dbus-fast==2.24.3
|
||||
debugpy==1.8.11
|
||||
|
||||
# homeassistant.components.ecovacs
|
||||
deebot-client==10.0.1
|
||||
deebot-client==10.1.0
|
||||
|
||||
# homeassistant.components.ihc
|
||||
# homeassistant.components.namecheapdns
|
||||
@@ -702,7 +702,7 @@ elgato==5.1.2
|
||||
elkm1-lib==2.2.10
|
||||
|
||||
# homeassistant.components.elmax
|
||||
elmax-api==0.0.6.3
|
||||
elmax-api==0.0.6.4rc0
|
||||
|
||||
# homeassistant.components.elvia
|
||||
elvia==0.1.0
|
||||
@@ -938,7 +938,7 @@ habluetooth==3.6.0
|
||||
hass-nabucasa==0.87.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
hassil==2.0.5
|
||||
hassil==2.1.0
|
||||
|
||||
# homeassistant.components.jewish_calendar
|
||||
hdate==0.11.1
|
||||
@@ -963,10 +963,10 @@ hole==0.8.0
|
||||
holidays==0.63
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20241224.0
|
||||
home-assistant-frontend==20250102.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2024.12.20
|
||||
home-assistant-intents==2025.1.1
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
homeconnect==0.8.0
|
||||
@@ -1062,7 +1062,7 @@ kegtron-ble==0.4.0
|
||||
knocki==0.4.2
|
||||
|
||||
# homeassistant.components.knx
|
||||
knx-frontend==2024.11.16.205004
|
||||
knx-frontend==2024.12.26.233449
|
||||
|
||||
# homeassistant.components.konnected
|
||||
konnected==1.2.0
|
||||
@@ -1276,7 +1276,7 @@ odp-amsterdam==6.0.2
|
||||
ohme==1.2.0
|
||||
|
||||
# homeassistant.components.ollama
|
||||
ollama==0.3.3
|
||||
ollama==0.4.5
|
||||
|
||||
# homeassistant.components.omnilogic
|
||||
omnilogic==0.4.5
|
||||
@@ -1306,7 +1306,7 @@ openhomedevice==2.2.0
|
||||
openwebifpy==4.3.0
|
||||
|
||||
# homeassistant.components.opower
|
||||
opower==0.8.6
|
||||
opower==0.8.7
|
||||
|
||||
# homeassistant.components.oralb
|
||||
oralb-ble==0.17.6
|
||||
@@ -1330,7 +1330,7 @@ panasonic-viera==0.4.2
|
||||
pdunehd==1.3.2
|
||||
|
||||
# homeassistant.components.peblar
|
||||
peblar==0.3.0
|
||||
peblar==0.3.1
|
||||
|
||||
# homeassistant.components.peco
|
||||
peco==0.0.30
|
||||
@@ -1459,7 +1459,7 @@ pyairnow==1.2.1
|
||||
pyairvisual==2023.08.1
|
||||
|
||||
# homeassistant.components.aprilaire
|
||||
pyaprilaire==0.7.4
|
||||
pyaprilaire==0.7.7
|
||||
|
||||
# homeassistant.components.asuswrt
|
||||
pyasuswrt==0.1.21
|
||||
@@ -1657,7 +1657,7 @@ pykrakenapi==0.1.8
|
||||
pykulersky==0.5.2
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==1.4.2
|
||||
pylamarzocco==1.4.6
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
@@ -1720,7 +1720,7 @@ pynetgear==0.10.10
|
||||
pynobo==1.8.1
|
||||
|
||||
# homeassistant.components.nordpool
|
||||
pynordpool==0.2.3
|
||||
pynordpool==0.2.4
|
||||
|
||||
# homeassistant.components.nuki
|
||||
pynuki==1.6.3
|
||||
@@ -1902,7 +1902,7 @@ python-fullykiosk==0.0.14
|
||||
# python-gammu==3.2.4
|
||||
|
||||
# homeassistant.components.analytics_insights
|
||||
python-homeassistant-analytics==0.8.0
|
||||
python-homeassistant-analytics==0.8.1
|
||||
|
||||
# homeassistant.components.homewizard
|
||||
python-homewizard-energy==v7.0.0
|
||||
@@ -1966,7 +1966,7 @@ python-technove==1.3.1
|
||||
python-telegram-bot[socks]==21.5
|
||||
|
||||
# homeassistant.components.tile
|
||||
pytile==2023.12.0
|
||||
pytile==2024.12.0
|
||||
|
||||
# homeassistant.components.tomorrowio
|
||||
pytomorrowio==0.3.6
|
||||
@@ -2006,7 +2006,7 @@ pyvesync==2.1.12
|
||||
pyvizio==0.1.61
|
||||
|
||||
# homeassistant.components.velux
|
||||
pyvlx==0.2.21
|
||||
pyvlx==0.2.26
|
||||
|
||||
# homeassistant.components.volumio
|
||||
pyvolumio==0.1.5
|
||||
@@ -2072,7 +2072,7 @@ renault-api==0.2.8
|
||||
renson-endura-delta==1.7.2
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.11.5
|
||||
reolink-aio==0.11.6
|
||||
|
||||
# homeassistant.components.rflink
|
||||
rflink==0.0.66
|
||||
@@ -2376,7 +2376,7 @@ venstarcolortouch==0.19
|
||||
vilfo-api-client==0.5.0
|
||||
|
||||
# homeassistant.components.voip
|
||||
voip-utils==0.2.1
|
||||
voip-utils==0.2.2
|
||||
|
||||
# homeassistant.components.volvooncall
|
||||
volvooncall==0.10.3
|
||||
@@ -2477,7 +2477,7 @@ youless-api==2.1.2
|
||||
youtubeaio==1.1.5
|
||||
|
||||
# homeassistant.components.media_extractor
|
||||
yt-dlp[default]==2024.12.13
|
||||
yt-dlp[default]==2024.12.23
|
||||
|
||||
# homeassistant.components.zamg
|
||||
zamg==0.3.6
|
||||
@@ -2489,7 +2489,7 @@ zeroconf==0.136.2
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.43
|
||||
zha==0.0.44
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.60.0
|
||||
|
||||
@@ -23,7 +23,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.8,source=/uv,target=/bin/uv \
|
||||
-c /usr/src/homeassistant/homeassistant/package_constraints.txt \
|
||||
-r /usr/src/homeassistant/requirements.txt \
|
||||
stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.3 \
|
||||
PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.20 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
|
||||
PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.1.0 home-assistant-intents==2025.1.1 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
|
||||
|
||||
LABEL "name"="hassfest"
|
||||
LABEL "maintainer"="Home Assistant <hello@home-assistant.io>"
|
||||
|
||||
@@ -95,7 +95,6 @@ async def test_config_flow_data(client: AprilaireClient, hass: HomeAssistant) ->
|
||||
)
|
||||
|
||||
client.start_listen.assert_called_once()
|
||||
client.wait_for_response.assert_any_call(FunctionalDomain.IDENTIFICATION, 4, 30)
|
||||
client.wait_for_response.assert_any_call(FunctionalDomain.CONTROL, 7, 30)
|
||||
client.wait_for_response.assert_any_call(FunctionalDomain.SENSORS, 2, 30)
|
||||
client.stop_listen.assert_called_once()
|
||||
|
||||
@@ -166,3 +166,15 @@ async def setup_backup_integration(
|
||||
agent._loaded_backups = True
|
||||
|
||||
return result
|
||||
|
||||
|
||||
async def setup_backup_platform(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
domain: str,
|
||||
platform: Any,
|
||||
) -> None:
|
||||
"""Set up a mock domain."""
|
||||
mock_platform(hass, f"{domain}.backup", platform)
|
||||
assert await async_setup_component(hass, domain, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user