forked from home-assistant/core
Compare commits
110 Commits
2025.2.0b4
...
2025.2.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5c383f3d88 | ||
|
|
3a88c9d6f4 | ||
|
|
5c7cabed1e | ||
|
|
65fde6042f | ||
|
|
d5dd0f6ec1 | ||
|
|
95410586b1 | ||
|
|
d5ad91fce3 | ||
|
|
04b0d587c5 | ||
|
|
72a3c5296c | ||
|
|
d6414b9849 | ||
|
|
c4e2ddd28b | ||
|
|
5687a4d718 | ||
|
|
a4474b2794 | ||
|
|
72a69d7e41 | ||
|
|
e8314fb286 | ||
|
|
30c099ef4e | ||
|
|
c506c9080a | ||
|
|
79563f3746 | ||
|
|
0764c7e773 | ||
|
|
fa83591148 | ||
|
|
df2b29aef1 | ||
|
|
da8d300f29 | ||
|
|
2c5fd4ee2a | ||
|
|
16d9270833 | ||
|
|
d8179dacc6 | ||
|
|
3dc075f287 | ||
|
|
b5e4fee9aa | ||
|
|
1c8ced2c2d | ||
|
|
1a5b8cf854 | ||
|
|
af40bb39ad | ||
|
|
14034ed7f8 | ||
|
|
d7f0a55568 | ||
|
|
1038a849c4 | ||
|
|
c4b08d3d57 | ||
|
|
0e9658b5ff | ||
|
|
0463b90d36 | ||
|
|
37f0832c8b | ||
|
|
2005e14d5f | ||
|
|
99219a9a73 | ||
|
|
1f967f7f77 | ||
|
|
8de64b8b1f | ||
|
|
48c88d8fa1 | ||
|
|
d478f906df | ||
|
|
09e02493b7 | ||
|
|
55c746f909 | ||
|
|
834a04ac49 | ||
|
|
fa9b4c3524 | ||
|
|
13bfa82038 | ||
|
|
0766b47161 | ||
|
|
fa8225d0a2 | ||
|
|
623c82e5d1 | ||
|
|
728a1a4be5 | ||
|
|
4bbb3e351b | ||
|
|
044bafd6aa | ||
|
|
1e1069b647 | ||
|
|
455af9179b | ||
|
|
30b309d7a1 | ||
|
|
7e32342eb2 | ||
|
|
bb9740991e | ||
|
|
88e5d1c18f | ||
|
|
e960053226 | ||
|
|
b318fb46a0 | ||
|
|
523835080b | ||
|
|
5a63138581 | ||
|
|
90ddb6cce1 | ||
|
|
81783dcfd3 | ||
|
|
405cc47157 | ||
|
|
809f5eea49 | ||
|
|
63c153d671 | ||
|
|
c8c6eddc65 | ||
|
|
ddb40cb4a8 | ||
|
|
38975775ac | ||
|
|
4fa043e6ff | ||
|
|
433a51f6d5 | ||
|
|
48511986bb | ||
|
|
f1128adec4 | ||
|
|
54a718c1d7 | ||
|
|
63d1dddc76 | ||
|
|
7d1b72a581 | ||
|
|
6c172705d1 | ||
|
|
505f089a73 | ||
|
|
dbf9e370a8 | ||
|
|
dc1c2f24e6 | ||
|
|
78dcf8b18e | ||
|
|
613168fd62 | ||
|
|
5f28e95bdc | ||
|
|
1db5da4037 | ||
|
|
6bf5e95089 | ||
|
|
1ea23fda10 | ||
|
|
21a85c014a | ||
|
|
4c8f716320 | ||
|
|
63bd67f6cd | ||
|
|
73b874c5e6 | ||
|
|
3b67dc3651 | ||
|
|
434a4ebc9f | ||
|
|
cb4b7e71af | ||
|
|
4c6fda2096 | ||
|
|
9b5c21524c | ||
|
|
76937541f1 | ||
|
|
bad966f3ab | ||
|
|
2d1d9bbe5a | ||
|
|
e76ff0a0de | ||
|
|
fa8d1b4dc4 | ||
|
|
b3c44ca03a | ||
|
|
6efa6f9687 | ||
|
|
3588b88cbb | ||
|
|
a51846a8cd | ||
|
|
ec22479733 | ||
|
|
3a11e8df6a | ||
|
|
a4eab35e01 |
@@ -161,6 +161,12 @@ FRONTEND_INTEGRATIONS = {
|
||||
# integrations can be removed and database migration status is
|
||||
# visible in frontend
|
||||
"frontend",
|
||||
# Hassio is an after dependency of backup, after dependencies
|
||||
# are not promoted from stage 2 to earlier stages, so we need to
|
||||
# add it here. Hassio needs to be setup before backup, otherwise
|
||||
# the backup integration will think we are a container/core install
|
||||
# when using HAOS or Supervised install.
|
||||
"hassio",
|
||||
# Backup is an after dependency of frontend, after dependencies
|
||||
# are not promoted from stage 2 to earlier stages, so we need to
|
||||
# add it here.
|
||||
|
||||
@@ -144,7 +144,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
return self.async_create_entry(title=discovery.name, data={})
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -92,7 +92,7 @@ class AranetConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address][0], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aranet",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aranet4==2.5.0"]
|
||||
"requirements": ["aranet4==2.5.1"]
|
||||
}
|
||||
|
||||
@@ -26,15 +26,18 @@ from .manager import (
|
||||
BackupReaderWriterError,
|
||||
CoreBackupReaderWriter,
|
||||
CreateBackupEvent,
|
||||
CreateBackupStage,
|
||||
CreateBackupState,
|
||||
IdleEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
NewBackup,
|
||||
RestoreBackupEvent,
|
||||
RestoreBackupStage,
|
||||
RestoreBackupState,
|
||||
WrittenBackup,
|
||||
)
|
||||
from .models import AddonInfo, AgentBackup, Folder
|
||||
from .models import AddonInfo, AgentBackup, BackupNotFound, Folder
|
||||
from .util import suggested_filename, suggested_filename_from_name_date
|
||||
from .websocket import async_register_websocket_handlers
|
||||
|
||||
@@ -45,10 +48,13 @@ __all__ = [
|
||||
"BackupAgentError",
|
||||
"BackupAgentPlatformProtocol",
|
||||
"BackupManagerError",
|
||||
"BackupNotFound",
|
||||
"BackupPlatformProtocol",
|
||||
"BackupReaderWriter",
|
||||
"BackupReaderWriterError",
|
||||
"CreateBackupEvent",
|
||||
"CreateBackupStage",
|
||||
"CreateBackupState",
|
||||
"Folder",
|
||||
"IdleEvent",
|
||||
"IncorrectPasswordError",
|
||||
@@ -56,6 +62,7 @@ __all__ = [
|
||||
"ManagerBackup",
|
||||
"NewBackup",
|
||||
"RestoreBackupEvent",
|
||||
"RestoreBackupStage",
|
||||
"RestoreBackupState",
|
||||
"WrittenBackup",
|
||||
"async_get_manager",
|
||||
|
||||
@@ -11,13 +11,7 @@ from propcache.api import cached_property
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .models import AgentBackup, BackupError
|
||||
|
||||
|
||||
class BackupAgentError(BackupError):
|
||||
"""Base class for backup agent errors."""
|
||||
|
||||
error_code = "backup_agent_error"
|
||||
from .models import AgentBackup, BackupAgentError
|
||||
|
||||
|
||||
class BackupAgentUnreachableError(BackupAgentError):
|
||||
@@ -27,12 +21,6 @@ class BackupAgentUnreachableError(BackupAgentError):
|
||||
_message = "The backup agent is unreachable."
|
||||
|
||||
|
||||
class BackupNotFound(BackupAgentError):
|
||||
"""Raised when a backup is not found."""
|
||||
|
||||
error_code = "backup_not_found"
|
||||
|
||||
|
||||
class BackupAgent(abc.ABC):
|
||||
"""Backup agent interface."""
|
||||
|
||||
|
||||
@@ -11,9 +11,9 @@ from typing import Any
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
|
||||
from .agent import BackupAgent, BackupNotFound, LocalBackupAgent
|
||||
from .agent import BackupAgent, LocalBackupAgent
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .models import AgentBackup
|
||||
from .models import AgentBackup, BackupNotFound
|
||||
from .util import read_backup, suggested_filename
|
||||
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ from . import util
|
||||
from .agent import BackupAgent
|
||||
from .const import DATA_MANAGER
|
||||
from .manager import BackupManager
|
||||
from .models import BackupNotFound
|
||||
|
||||
|
||||
@callback
|
||||
@@ -69,13 +70,16 @@ class DownloadBackupView(HomeAssistantView):
|
||||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
|
||||
}
|
||||
|
||||
if not password or not backup.protected:
|
||||
return await self._send_backup_no_password(
|
||||
request, headers, backup_id, agent_id, agent, manager
|
||||
try:
|
||||
if not password or not backup.protected:
|
||||
return await self._send_backup_no_password(
|
||||
request, headers, backup_id, agent_id, agent, manager
|
||||
)
|
||||
return await self._send_backup_with_password(
|
||||
hass, request, headers, backup_id, agent_id, password, agent, manager
|
||||
)
|
||||
return await self._send_backup_with_password(
|
||||
hass, request, headers, backup_id, agent_id, password, agent, manager
|
||||
)
|
||||
except BackupNotFound:
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
|
||||
async def _send_backup_no_password(
|
||||
self,
|
||||
|
||||
@@ -9,6 +9,7 @@ from dataclasses import dataclass, replace
|
||||
from enum import StrEnum
|
||||
import hashlib
|
||||
import io
|
||||
from itertools import chain
|
||||
import json
|
||||
from pathlib import Path, PurePath
|
||||
import shutil
|
||||
@@ -50,7 +51,14 @@ from .const import (
|
||||
EXCLUDE_FROM_BACKUP,
|
||||
LOGGER,
|
||||
)
|
||||
from .models import AgentBackup, BackupError, BackupManagerError, BaseBackup, Folder
|
||||
from .models import (
|
||||
AgentBackup,
|
||||
BackupError,
|
||||
BackupManagerError,
|
||||
BackupReaderWriterError,
|
||||
BaseBackup,
|
||||
Folder,
|
||||
)
|
||||
from .store import BackupStore
|
||||
from .util import (
|
||||
AsyncIteratorReader,
|
||||
@@ -274,12 +282,6 @@ class BackupReaderWriter(abc.ABC):
|
||||
"""Get restore events after core restart."""
|
||||
|
||||
|
||||
class BackupReaderWriterError(BackupError):
|
||||
"""Backup reader/writer error."""
|
||||
|
||||
error_code = "backup_reader_writer_error"
|
||||
|
||||
|
||||
class IncorrectPasswordError(BackupReaderWriterError):
|
||||
"""Raised when the password is incorrect."""
|
||||
|
||||
@@ -826,7 +828,7 @@ class BackupManager:
|
||||
password=None,
|
||||
)
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
self.known_backups.add(written_backup.backup, agent_errors, [])
|
||||
return written_backup.backup.backup_id
|
||||
|
||||
async def async_create_backup(
|
||||
@@ -950,12 +952,23 @@ class BackupManager:
|
||||
with_automatic_settings: bool,
|
||||
) -> NewBackup:
|
||||
"""Initiate generating a backup."""
|
||||
if not agent_ids:
|
||||
raise BackupManagerError("At least one agent must be selected")
|
||||
if invalid_agents := [
|
||||
unavailable_agents = [
|
||||
agent_id for agent_id in agent_ids if agent_id not in self.backup_agents
|
||||
]:
|
||||
raise BackupManagerError(f"Invalid agents selected: {invalid_agents}")
|
||||
]
|
||||
if not (
|
||||
available_agents := [
|
||||
agent_id for agent_id in agent_ids if agent_id in self.backup_agents
|
||||
]
|
||||
):
|
||||
raise BackupManagerError(
|
||||
f"At least one available backup agent must be selected, got {agent_ids}"
|
||||
)
|
||||
if unavailable_agents:
|
||||
LOGGER.warning(
|
||||
"Backup agents %s are not available, will backupp to %s",
|
||||
unavailable_agents,
|
||||
available_agents,
|
||||
)
|
||||
if include_all_addons and include_addons:
|
||||
raise BackupManagerError(
|
||||
"Cannot include all addons and specify specific addons"
|
||||
@@ -972,7 +985,7 @@ class BackupManager:
|
||||
new_backup,
|
||||
self._backup_task,
|
||||
) = await self._reader_writer.async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
agent_ids=available_agents,
|
||||
backup_name=backup_name,
|
||||
extra_metadata=extra_metadata
|
||||
| {
|
||||
@@ -991,7 +1004,9 @@ class BackupManager:
|
||||
raise BackupManagerError(str(err)) from err
|
||||
|
||||
backup_finish_task = self._backup_finish_task = self.hass.async_create_task(
|
||||
self._async_finish_backup(agent_ids, with_automatic_settings, password),
|
||||
self._async_finish_backup(
|
||||
available_agents, unavailable_agents, with_automatic_settings, password
|
||||
),
|
||||
name="backup_manager_finish_backup",
|
||||
)
|
||||
if not raise_task_error:
|
||||
@@ -1008,7 +1023,11 @@ class BackupManager:
|
||||
return new_backup
|
||||
|
||||
async def _async_finish_backup(
|
||||
self, agent_ids: list[str], with_automatic_settings: bool, password: str | None
|
||||
self,
|
||||
available_agents: list[str],
|
||||
unavailable_agents: list[str],
|
||||
with_automatic_settings: bool,
|
||||
password: str | None,
|
||||
) -> None:
|
||||
"""Finish a backup."""
|
||||
if TYPE_CHECKING:
|
||||
@@ -1027,7 +1046,7 @@ class BackupManager:
|
||||
LOGGER.debug(
|
||||
"Generated new backup with backup_id %s, uploading to agents %s",
|
||||
written_backup.backup.backup_id,
|
||||
agent_ids,
|
||||
available_agents,
|
||||
)
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(
|
||||
@@ -1040,13 +1059,15 @@ class BackupManager:
|
||||
try:
|
||||
agent_errors = await self._async_upload_backup(
|
||||
backup=written_backup.backup,
|
||||
agent_ids=agent_ids,
|
||||
agent_ids=available_agents,
|
||||
open_stream=written_backup.open_stream,
|
||||
password=password,
|
||||
)
|
||||
finally:
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
self.known_backups.add(
|
||||
written_backup.backup, agent_errors, unavailable_agents
|
||||
)
|
||||
if not agent_errors:
|
||||
if with_automatic_settings:
|
||||
# create backup was successful, update last_completed_automatic_backup
|
||||
@@ -1055,7 +1076,7 @@ class BackupManager:
|
||||
backup_success = True
|
||||
|
||||
if with_automatic_settings:
|
||||
self._update_issue_after_agent_upload(agent_errors)
|
||||
self._update_issue_after_agent_upload(agent_errors, unavailable_agents)
|
||||
# delete old backups more numerous than copies
|
||||
# try this regardless of agent errors above
|
||||
await delete_backups_exceeding_configured_count(self)
|
||||
@@ -1215,10 +1236,10 @@ class BackupManager:
|
||||
)
|
||||
|
||||
def _update_issue_after_agent_upload(
|
||||
self, agent_errors: dict[str, Exception]
|
||||
self, agent_errors: dict[str, Exception], unavailable_agents: list[str]
|
||||
) -> None:
|
||||
"""Update issue registry after a backup is uploaded to agents."""
|
||||
if not agent_errors:
|
||||
if not agent_errors and not unavailable_agents:
|
||||
ir.async_delete_issue(self.hass, DOMAIN, "automatic_backup_failed")
|
||||
return
|
||||
ir.async_create_issue(
|
||||
@@ -1232,7 +1253,13 @@ class BackupManager:
|
||||
translation_key="automatic_backup_failed_upload_agents",
|
||||
translation_placeholders={
|
||||
"failed_agents": ", ".join(
|
||||
self.backup_agents[agent_id].name for agent_id in agent_errors
|
||||
chain(
|
||||
(
|
||||
self.backup_agents[agent_id].name
|
||||
for agent_id in agent_errors
|
||||
),
|
||||
unavailable_agents,
|
||||
)
|
||||
)
|
||||
},
|
||||
)
|
||||
@@ -1301,11 +1328,12 @@ class KnownBackups:
|
||||
self,
|
||||
backup: AgentBackup,
|
||||
agent_errors: dict[str, Exception],
|
||||
unavailable_agents: list[str],
|
||||
) -> None:
|
||||
"""Add a backup."""
|
||||
self._backups[backup.backup_id] = KnownBackup(
|
||||
backup_id=backup.backup_id,
|
||||
failed_agent_ids=list(agent_errors),
|
||||
failed_agent_ids=list(chain(agent_errors, unavailable_agents)),
|
||||
)
|
||||
self._manager.store.save()
|
||||
|
||||
@@ -1411,7 +1439,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
manager = self._hass.data[DATA_MANAGER]
|
||||
|
||||
agent_config = manager.config.data.agents.get(self._local_agent_id)
|
||||
if agent_config and not agent_config.protected:
|
||||
if (
|
||||
self._local_agent_id in agent_ids
|
||||
and agent_config
|
||||
and not agent_config.protected
|
||||
):
|
||||
password = None
|
||||
|
||||
backup = AgentBackup(
|
||||
|
||||
@@ -41,12 +41,6 @@ class BaseBackup:
|
||||
homeassistant_version: str | None # None if homeassistant_included is False
|
||||
name: str
|
||||
|
||||
def as_frontend_json(self) -> dict:
|
||||
"""Return a dict representation of this backup for sending to frontend."""
|
||||
return {
|
||||
key: val for key, val in asdict(self).items() if key != "extra_metadata"
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AgentBackup(BaseBackup):
|
||||
@@ -83,7 +77,25 @@ class BackupError(HomeAssistantError):
|
||||
error_code = "unknown"
|
||||
|
||||
|
||||
class BackupAgentError(BackupError):
|
||||
"""Base class for backup agent errors."""
|
||||
|
||||
error_code = "backup_agent_error"
|
||||
|
||||
|
||||
class BackupManagerError(BackupError):
|
||||
"""Backup manager error."""
|
||||
|
||||
error_code = "backup_manager_error"
|
||||
|
||||
|
||||
class BackupReaderWriterError(BackupError):
|
||||
"""Backup reader/writer error."""
|
||||
|
||||
error_code = "backup_reader_writer_error"
|
||||
|
||||
|
||||
class BackupNotFound(BackupAgentError, BackupManagerError):
|
||||
"""Raised when a backup is not found."""
|
||||
|
||||
error_code = "backup_not_found"
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from concurrent.futures import CancelledError, Future
|
||||
import copy
|
||||
from dataclasses import dataclass, replace
|
||||
from io import BytesIO
|
||||
@@ -12,6 +13,7 @@ import os
|
||||
from pathlib import Path, PurePath
|
||||
from queue import SimpleQueue
|
||||
import tarfile
|
||||
import threading
|
||||
from typing import IO, Any, Self, cast
|
||||
|
||||
import aiohttp
|
||||
@@ -22,7 +24,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
from homeassistant.util.thread import ThreadWithException
|
||||
|
||||
from .const import BUF_SIZE, LOGGER
|
||||
from .models import AddonInfo, AgentBackup, Folder
|
||||
@@ -121,7 +122,7 @@ def read_backup(backup_path: Path) -> AgentBackup:
|
||||
def suggested_filename_from_name_date(name: str, date_str: str) -> str:
|
||||
"""Suggest a filename for the backup."""
|
||||
date = dt_util.parse_datetime(date_str, raise_on_error=True)
|
||||
return "_".join(f"{name} - {date.strftime('%Y-%m-%d %H.%M %S%f')}.tar".split())
|
||||
return "_".join(f"{name} {date.strftime('%Y-%m-%d %H.%M %S%f')}.tar".split())
|
||||
|
||||
|
||||
def suggested_filename(backup: AgentBackup) -> str:
|
||||
@@ -167,23 +168,38 @@ class AsyncIteratorReader:
|
||||
|
||||
def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None:
|
||||
"""Initialize the wrapper."""
|
||||
self._aborted = False
|
||||
self._hass = hass
|
||||
self._stream = stream
|
||||
self._buffer: bytes | None = None
|
||||
self._next_future: Future[bytes | None] | None = None
|
||||
self._pos: int = 0
|
||||
|
||||
async def _next(self) -> bytes | None:
|
||||
"""Get the next chunk from the iterator."""
|
||||
return await anext(self._stream, None)
|
||||
|
||||
def abort(self) -> None:
|
||||
"""Abort the reader."""
|
||||
self._aborted = True
|
||||
if self._next_future is not None:
|
||||
self._next_future.cancel()
|
||||
|
||||
def read(self, n: int = -1, /) -> bytes:
|
||||
"""Read data from the iterator."""
|
||||
result = bytearray()
|
||||
while n < 0 or len(result) < n:
|
||||
if not self._buffer:
|
||||
self._buffer = asyncio.run_coroutine_threadsafe(
|
||||
self._next_future = asyncio.run_coroutine_threadsafe(
|
||||
self._next(), self._hass.loop
|
||||
).result()
|
||||
)
|
||||
if self._aborted:
|
||||
self._next_future.cancel()
|
||||
raise AbortCipher
|
||||
try:
|
||||
self._buffer = self._next_future.result()
|
||||
except CancelledError as err:
|
||||
raise AbortCipher from err
|
||||
self._pos = 0
|
||||
if not self._buffer:
|
||||
# The stream is exhausted
|
||||
@@ -205,9 +221,11 @@ class AsyncIteratorWriter:
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the wrapper."""
|
||||
self._aborted = False
|
||||
self._hass = hass
|
||||
self._pos: int = 0
|
||||
self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1)
|
||||
self._write_future: Future[bytes | None] | None = None
|
||||
|
||||
def __aiter__(self) -> Self:
|
||||
"""Return the iterator."""
|
||||
@@ -219,13 +237,28 @@ class AsyncIteratorWriter:
|
||||
return data
|
||||
raise StopAsyncIteration
|
||||
|
||||
def abort(self) -> None:
|
||||
"""Abort the writer."""
|
||||
self._aborted = True
|
||||
if self._write_future is not None:
|
||||
self._write_future.cancel()
|
||||
|
||||
def tell(self) -> int:
|
||||
"""Return the current position in the iterator."""
|
||||
return self._pos
|
||||
|
||||
def write(self, s: bytes, /) -> int:
|
||||
"""Write data to the iterator."""
|
||||
asyncio.run_coroutine_threadsafe(self._queue.put(s), self._hass.loop).result()
|
||||
self._write_future = asyncio.run_coroutine_threadsafe(
|
||||
self._queue.put(s), self._hass.loop
|
||||
)
|
||||
if self._aborted:
|
||||
self._write_future.cancel()
|
||||
raise AbortCipher
|
||||
try:
|
||||
self._write_future.result()
|
||||
except CancelledError as err:
|
||||
raise AbortCipher from err
|
||||
self._pos += len(s)
|
||||
return len(s)
|
||||
|
||||
@@ -415,7 +448,9 @@ def _encrypt_backup(
|
||||
class _CipherWorkerStatus:
|
||||
done: asyncio.Event
|
||||
error: Exception | None = None
|
||||
thread: ThreadWithException
|
||||
reader: AsyncIteratorReader
|
||||
thread: threading.Thread
|
||||
writer: AsyncIteratorWriter
|
||||
|
||||
|
||||
class _CipherBackupStreamer:
|
||||
@@ -468,11 +503,13 @@ class _CipherBackupStreamer:
|
||||
stream = await self._open_stream()
|
||||
reader = AsyncIteratorReader(self._hass, stream)
|
||||
writer = AsyncIteratorWriter(self._hass)
|
||||
worker = ThreadWithException(
|
||||
worker = threading.Thread(
|
||||
target=self._cipher_func,
|
||||
args=[reader, writer, self._password, on_done, self.size(), self._nonces],
|
||||
)
|
||||
worker_status = _CipherWorkerStatus(done=asyncio.Event(), thread=worker)
|
||||
worker_status = _CipherWorkerStatus(
|
||||
done=asyncio.Event(), reader=reader, thread=worker, writer=writer
|
||||
)
|
||||
self._workers.append(worker_status)
|
||||
worker.start()
|
||||
return writer
|
||||
@@ -480,9 +517,8 @@ class _CipherBackupStreamer:
|
||||
async def wait(self) -> None:
|
||||
"""Wait for the worker threads to finish."""
|
||||
for worker in self._workers:
|
||||
if not worker.thread.is_alive():
|
||||
continue
|
||||
worker.thread.raise_exc(AbortCipher)
|
||||
worker.reader.abort()
|
||||
worker.writer.abort()
|
||||
await asyncio.gather(*(worker.done.wait() for worker in self._workers))
|
||||
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ from .manager import (
|
||||
IncorrectPasswordError,
|
||||
ManagerStateEvent,
|
||||
)
|
||||
from .models import Folder
|
||||
from .models import BackupNotFound, Folder
|
||||
|
||||
|
||||
@callback
|
||||
@@ -57,7 +57,7 @@ async def handle_info(
|
||||
"agent_errors": {
|
||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||
},
|
||||
"backups": [backup.as_frontend_json() for backup in backups.values()],
|
||||
"backups": list(backups.values()),
|
||||
"last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup,
|
||||
"last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup,
|
||||
"last_non_idle_event": manager.last_non_idle_event,
|
||||
@@ -91,7 +91,7 @@ async def handle_details(
|
||||
"agent_errors": {
|
||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||
},
|
||||
"backup": backup.as_frontend_json() if backup else None,
|
||||
"backup": backup,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -151,6 +151,8 @@ async def handle_restore(
|
||||
restore_folders=msg.get("restore_folders"),
|
||||
restore_homeassistant=msg["restore_homeassistant"],
|
||||
)
|
||||
except BackupNotFound:
|
||||
connection.send_error(msg["id"], "backup_not_found", "Backup not found")
|
||||
except IncorrectPasswordError:
|
||||
connection.send_error(msg["id"], "password_incorrect", "Incorrect password")
|
||||
else:
|
||||
@@ -179,6 +181,8 @@ async def handle_can_decrypt_on_download(
|
||||
agent_id=msg["agent_id"],
|
||||
password=msg.get("password"),
|
||||
)
|
||||
except BackupNotFound:
|
||||
connection.send_error(msg["id"], "backup_not_found", "Backup not found")
|
||||
except IncorrectPasswordError:
|
||||
connection.send_error(msg["id"], "password_incorrect", "Incorrect password")
|
||||
except DecryptOnDowloadNotSupported:
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import datetime
|
||||
import logging
|
||||
import platform
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from bleak_retry_connector import BleakSlotManager
|
||||
from bluetooth_adapters import (
|
||||
@@ -80,6 +80,7 @@ from .const import (
|
||||
CONF_DETAILS,
|
||||
CONF_PASSIVE,
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID,
|
||||
CONF_SOURCE_DEVICE_ID,
|
||||
CONF_SOURCE_DOMAIN,
|
||||
CONF_SOURCE_MODEL,
|
||||
DOMAIN,
|
||||
@@ -297,7 +298,11 @@ async def async_discover_adapters(
|
||||
|
||||
|
||||
async def async_update_device(
|
||||
hass: HomeAssistant, entry: ConfigEntry, adapter: str, details: AdapterDetails
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
adapter: str,
|
||||
details: AdapterDetails,
|
||||
via_device_id: str | None = None,
|
||||
) -> None:
|
||||
"""Update device registry entry.
|
||||
|
||||
@@ -306,7 +311,8 @@ async def async_update_device(
|
||||
update the device with the new location so they can
|
||||
figure out where the adapter is.
|
||||
"""
|
||||
dr.async_get(hass).async_get_or_create(
|
||||
device_registry = dr.async_get(hass)
|
||||
device_entry = device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
name=adapter_human_name(adapter, details[ADAPTER_ADDRESS]),
|
||||
connections={(dr.CONNECTION_BLUETOOTH, details[ADAPTER_ADDRESS])},
|
||||
@@ -315,6 +321,11 @@ async def async_update_device(
|
||||
sw_version=details.get(ADAPTER_SW_VERSION),
|
||||
hw_version=details.get(ADAPTER_HW_VERSION),
|
||||
)
|
||||
if via_device_id and (via_device_entry := device_registry.async_get(via_device_id)):
|
||||
kwargs: dict[str, Any] = {"via_device_id": via_device_id}
|
||||
if not device_entry.area_id and via_device_entry.area_id:
|
||||
kwargs["area_id"] = via_device_entry.area_id
|
||||
device_registry.async_update_device(device_entry.id, **kwargs)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
@@ -349,6 +360,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
source_entry.title,
|
||||
details,
|
||||
entry.data.get(CONF_SOURCE_DEVICE_ID),
|
||||
)
|
||||
return True
|
||||
manager = _get_manager(hass)
|
||||
|
||||
@@ -181,10 +181,16 @@ def async_register_scanner(
|
||||
source_domain: str | None = None,
|
||||
source_model: str | None = None,
|
||||
source_config_entry_id: str | None = None,
|
||||
source_device_id: str | None = None,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Register a BleakScanner."""
|
||||
return _get_manager(hass).async_register_hass_scanner(
|
||||
scanner, connection_slots, source_domain, source_model, source_config_entry_id
|
||||
scanner,
|
||||
connection_slots,
|
||||
source_domain,
|
||||
source_model,
|
||||
source_config_entry_id,
|
||||
source_device_id,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -37,6 +37,7 @@ from .const import (
|
||||
CONF_PASSIVE,
|
||||
CONF_SOURCE,
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID,
|
||||
CONF_SOURCE_DEVICE_ID,
|
||||
CONF_SOURCE_DOMAIN,
|
||||
CONF_SOURCE_MODEL,
|
||||
DOMAIN,
|
||||
@@ -139,7 +140,7 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=adapter_title(adapter, details), data={}
|
||||
)
|
||||
|
||||
configured_addresses = self._async_current_ids()
|
||||
configured_addresses = self._async_current_ids(include_ignore=False)
|
||||
bluetooth_adapters = get_adapters()
|
||||
await bluetooth_adapters.refresh()
|
||||
self._adapters = bluetooth_adapters.adapters
|
||||
@@ -154,12 +155,8 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
and not (system == "Linux" and details[ADAPTER_ADDRESS] == DEFAULT_ADDRESS)
|
||||
]
|
||||
if not unconfigured_adapters:
|
||||
ignored_adapters = len(
|
||||
self._async_current_entries(include_ignore=True)
|
||||
) - len(self._async_current_entries(include_ignore=False))
|
||||
return self.async_abort(
|
||||
reason="no_adapters",
|
||||
description_placeholders={"ignored_adapters": str(ignored_adapters)},
|
||||
)
|
||||
if len(unconfigured_adapters) == 1:
|
||||
self._adapter = list(self._adapters)[0]
|
||||
@@ -194,6 +191,7 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_SOURCE_MODEL: user_input[CONF_SOURCE_MODEL],
|
||||
CONF_SOURCE_DOMAIN: user_input[CONF_SOURCE_DOMAIN],
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID: user_input[CONF_SOURCE_CONFIG_ENTRY_ID],
|
||||
CONF_SOURCE_DEVICE_ID: user_input[CONF_SOURCE_DEVICE_ID],
|
||||
}
|
||||
self._abort_if_unique_id_configured(updates=data)
|
||||
manager = get_manager()
|
||||
|
||||
@@ -22,7 +22,7 @@ CONF_SOURCE: Final = "source"
|
||||
CONF_SOURCE_DOMAIN: Final = "source_domain"
|
||||
CONF_SOURCE_MODEL: Final = "source_model"
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID: Final = "source_config_entry_id"
|
||||
|
||||
CONF_SOURCE_DEVICE_ID: Final = "source_device_id"
|
||||
|
||||
SOURCE_LOCAL: Final = "local"
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from .const import (
|
||||
CONF_SOURCE,
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID,
|
||||
CONF_SOURCE_DEVICE_ID,
|
||||
CONF_SOURCE_DOMAIN,
|
||||
CONF_SOURCE_MODEL,
|
||||
DOMAIN,
|
||||
@@ -254,6 +255,7 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
||||
source_domain: str | None = None,
|
||||
source_model: str | None = None,
|
||||
source_config_entry_id: str | None = None,
|
||||
source_device_id: str | None = None,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Register a scanner."""
|
||||
cancel = self.async_register_scanner(scanner, connection_slots)
|
||||
@@ -261,9 +263,6 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
||||
isinstance(scanner, BaseHaRemoteScanner)
|
||||
and source_domain
|
||||
and source_config_entry_id
|
||||
and not self.hass.config_entries.async_entry_for_domain_unique_id(
|
||||
DOMAIN, scanner.source
|
||||
)
|
||||
):
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.flow.async_init(
|
||||
@@ -274,6 +273,7 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
||||
CONF_SOURCE_DOMAIN: source_domain,
|
||||
CONF_SOURCE_MODEL: source_model,
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID: source_config_entry_id,
|
||||
CONF_SOURCE_DEVICE_ID: source_device_id,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
@@ -16,11 +16,11 @@
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"bleak==0.22.3",
|
||||
"bleak-retry-connector==3.8.0",
|
||||
"bluetooth-adapters==0.21.1",
|
||||
"bleak-retry-connector==3.8.1",
|
||||
"bluetooth-adapters==0.21.4",
|
||||
"bluetooth-auto-recovery==1.4.2",
|
||||
"bluetooth-data-tools==1.22.0",
|
||||
"dbus-fast==2.30.2",
|
||||
"habluetooth==3.17.1"
|
||||
"bluetooth-data-tools==1.23.4",
|
||||
"dbus-fast==2.33.0",
|
||||
"habluetooth==3.21.1"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"no_adapters": "No unconfigured Bluetooth adapters found. There are {ignored_adapters} ignored adapters."
|
||||
"no_adapters": "No unconfigured Bluetooth adapters found."
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
|
||||
@@ -132,7 +132,7 @@ class BTHomeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
return self._async_get_or_create_entry()
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -1175,12 +1175,17 @@ async def async_handle_snapshot_service(
|
||||
f"Cannot write `{snapshot_file}`, no access to path; `allowlist_external_dirs` may need to be adjusted in `configuration.yaml`"
|
||||
)
|
||||
|
||||
async with asyncio.timeout(CAMERA_IMAGE_TIMEOUT):
|
||||
image = (
|
||||
await _async_get_stream_image(camera, wait_for_next_keyframe=True)
|
||||
if camera.use_stream_for_stills
|
||||
else await camera.async_camera_image()
|
||||
)
|
||||
try:
|
||||
async with asyncio.timeout(CAMERA_IMAGE_TIMEOUT):
|
||||
image = (
|
||||
await _async_get_stream_image(camera, wait_for_next_keyframe=True)
|
||||
if camera.use_stream_for_stills
|
||||
else await camera.async_camera_image()
|
||||
)
|
||||
except TimeoutError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Unable to get snapshot: Timed out after {CAMERA_IMAGE_TIMEOUT} seconds"
|
||||
) from err
|
||||
|
||||
if image is None:
|
||||
return
|
||||
@@ -1194,7 +1199,7 @@ async def async_handle_snapshot_service(
|
||||
try:
|
||||
await hass.async_add_executor_job(_write_image, snapshot_file, image)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write image to file: %s", err)
|
||||
raise HomeAssistantError(f"Can't write image to file: {err}") from err
|
||||
|
||||
|
||||
async def async_handle_play_stream_service(
|
||||
|
||||
@@ -29,6 +29,7 @@ from homeassistant.components.google_assistant import helpers as google_helpers
|
||||
from homeassistant.components.homeassistant import exposed_entities
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.components.system_health import get_info as get_system_health_info
|
||||
from homeassistant.const import CLOUD_NEVER_EXPOSED_ENTITIES
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -107,6 +108,7 @@ def async_setup(hass: HomeAssistant) -> None:
|
||||
hass.http.register_view(CloudRegisterView)
|
||||
hass.http.register_view(CloudResendConfirmView)
|
||||
hass.http.register_view(CloudForgotPasswordView)
|
||||
hass.http.register_view(DownloadSupportPackageView)
|
||||
|
||||
_CLOUD_ERRORS.update(
|
||||
{
|
||||
@@ -389,6 +391,59 @@ class CloudForgotPasswordView(HomeAssistantView):
|
||||
return self.json_message("ok")
|
||||
|
||||
|
||||
class DownloadSupportPackageView(HomeAssistantView):
|
||||
"""Download support package view."""
|
||||
|
||||
url = "/api/cloud/support_package"
|
||||
name = "api:cloud:support_package"
|
||||
|
||||
def _generate_markdown(
|
||||
self, hass_info: dict[str, Any], domains_info: dict[str, dict[str, str]]
|
||||
) -> str:
|
||||
def get_domain_table_markdown(domain_info: dict[str, Any]) -> str:
|
||||
if len(domain_info) == 0:
|
||||
return "No information available\n"
|
||||
|
||||
markdown = ""
|
||||
first = True
|
||||
for key, value in domain_info.items():
|
||||
markdown += f"{key} | {value}\n"
|
||||
if first:
|
||||
markdown += "--- | ---\n"
|
||||
first = False
|
||||
return markdown + "\n"
|
||||
|
||||
markdown = "## System Information\n\n"
|
||||
markdown += get_domain_table_markdown(hass_info)
|
||||
|
||||
for domain, domain_info in domains_info.items():
|
||||
domain_info_md = get_domain_table_markdown(domain_info)
|
||||
markdown += (
|
||||
f"<details><summary>{domain}</summary>\n\n"
|
||||
f"{domain_info_md}"
|
||||
"</details>\n\n"
|
||||
)
|
||||
|
||||
return markdown
|
||||
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
"""Download support package file."""
|
||||
|
||||
hass = request.app[KEY_HASS]
|
||||
domain_health = await get_system_health_info(hass)
|
||||
|
||||
hass_info = domain_health.pop("homeassistant", {})
|
||||
markdown = self._generate_markdown(hass_info, domain_health)
|
||||
|
||||
return web.Response(
|
||||
body=markdown,
|
||||
content_type="text/markdown",
|
||||
headers={
|
||||
"Content-Disposition": 'attachment; filename="support_package.md"'
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "cloud/remove_data"})
|
||||
@websocket_api.async_response
|
||||
|
||||
@@ -302,7 +302,8 @@ def config_entries_progress(
|
||||
[
|
||||
flw
|
||||
for flw in hass.config_entries.flow.async_progress()
|
||||
if flw["context"]["source"] != config_entries.SOURCE_USER
|
||||
if flw["context"]["source"]
|
||||
not in (config_entries.SOURCE_RECONFIGURE, config_entries.SOURCE_USER)
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.2.0", "home-assistant-intents==2025.1.28"]
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.2.5"]
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
],
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"aiodhcpwatcher==1.0.2",
|
||||
"aiodhcpwatcher==1.0.3",
|
||||
"aiodiscover==2.1.0",
|
||||
"cached-ipaddress==0.8.0"
|
||||
]
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==11.1.0b2"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.0.0"]
|
||||
}
|
||||
|
||||
@@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.6.0"]
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.7.0"]
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ def async_connect_scanner(
|
||||
entry_data: RuntimeEntryData,
|
||||
cli: APIClient,
|
||||
device_info: DeviceInfo,
|
||||
device_id: str,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Connect scanner."""
|
||||
client_data = connect_scanner(cli, device_info, entry_data.available)
|
||||
@@ -45,6 +46,7 @@ def async_connect_scanner(
|
||||
source_domain=DOMAIN,
|
||||
source_model=device_info.model,
|
||||
source_config_entry_id=entry_data.entry_id,
|
||||
source_device_id=device_id,
|
||||
),
|
||||
scanner.async_setup(),
|
||||
],
|
||||
|
||||
@@ -425,7 +425,9 @@ class ESPHomeManager:
|
||||
|
||||
if device_info.bluetooth_proxy_feature_flags_compat(api_version):
|
||||
entry_data.disconnect_callbacks.add(
|
||||
async_connect_scanner(hass, entry_data, cli, device_info)
|
||||
async_connect_scanner(
|
||||
hass, entry_data, cli, device_info, self.device_id
|
||||
)
|
||||
)
|
||||
else:
|
||||
bluetooth.async_remove_scanner(hass, device_info.mac_address)
|
||||
@@ -571,7 +573,9 @@ def _async_setup_device_registry(
|
||||
|
||||
configuration_url = None
|
||||
if device_info.webserver_port > 0:
|
||||
configuration_url = f"http://{entry.data['host']}:{device_info.webserver_port}"
|
||||
entry_host = entry.data["host"]
|
||||
host = f"[{entry_host}]" if ":" in entry_host else entry_host
|
||||
configuration_url = f"http://{host}:{device_info.webserver_port}"
|
||||
elif (
|
||||
(dashboard := async_get_dashboard(hass))
|
||||
and dashboard.data
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
"requirements": [
|
||||
"aioesphomeapi==29.0.0",
|
||||
"esphome-dashboard-api==1.2.3",
|
||||
"bleak-esphome==2.6.0"
|
||||
"bleak-esphome==2.7.0"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -21,5 +21,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250131.0"]
|
||||
"requirements": ["home-assistant-frontend==20250205.0"]
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ from aiohttp import ClientSession, ClientTimeout, StreamReader
|
||||
from aiohttp.client_exceptions import ClientError, ClientResponseError
|
||||
from google_drive_api.api import AbstractAuth, GoogleDriveApi
|
||||
|
||||
from homeassistant.components.backup import AgentBackup
|
||||
from homeassistant.components.backup import AgentBackup, suggested_filename
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.exceptions import (
|
||||
@@ -132,7 +132,7 @@ class DriveClient:
|
||||
"""Upload a backup."""
|
||||
folder_id, _ = await self.async_create_ha_root_folder_if_not_exists()
|
||||
backup_metadata = {
|
||||
"name": f"{backup.name} {backup.date}.tar",
|
||||
"name": suggested_filename(backup),
|
||||
"description": json.dumps(backup.as_dict()),
|
||||
"parents": [folder_id],
|
||||
"properties": {
|
||||
|
||||
@@ -78,7 +78,7 @@ class GoveeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=title, data={CONF_DEVICE_TYPE: device.device_type}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/habitica",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["habiticalib"],
|
||||
"requirements": ["habiticalib==0.3.3"]
|
||||
"requirements": ["habiticalib==0.3.4"]
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ from aiohasupervisor.models import (
|
||||
backups as supervisor_backups,
|
||||
mounts as supervisor_mounts,
|
||||
)
|
||||
from aiohasupervisor.models.backups import LOCATION_CLOUD_BACKUP, LOCATION_LOCAL_STORAGE
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
DATA_MANAGER,
|
||||
@@ -27,15 +28,19 @@ from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupManagerError,
|
||||
BackupNotFound,
|
||||
BackupReaderWriter,
|
||||
BackupReaderWriterError,
|
||||
CreateBackupEvent,
|
||||
CreateBackupStage,
|
||||
CreateBackupState,
|
||||
Folder,
|
||||
IdleEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
NewBackup,
|
||||
RestoreBackupEvent,
|
||||
RestoreBackupStage,
|
||||
RestoreBackupState,
|
||||
WrittenBackup,
|
||||
async_get_manager as async_get_backup_manager,
|
||||
@@ -47,12 +52,11 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
|
||||
from .const import DOMAIN, EVENT_SUPERVISOR_EVENT
|
||||
from .handler import get_supervisor_client
|
||||
|
||||
LOCATION_CLOUD_BACKUP = ".cloud_backup"
|
||||
LOCATION_LOCAL = ".local"
|
||||
MOUNT_JOBS = ("mount_manager_create_mount", "mount_manager_remove_mount")
|
||||
RESTORE_JOB_ID_ENV = "SUPERVISOR_RESTORE_JOB_ID"
|
||||
# Set on backups automatically created when updating an addon
|
||||
@@ -67,7 +71,9 @@ async def async_get_backup_agents(
|
||||
"""Return the hassio backup agents."""
|
||||
client = get_supervisor_client(hass)
|
||||
mounts = await client.mounts.info()
|
||||
agents: list[BackupAgent] = [SupervisorBackupAgent(hass, "local", None)]
|
||||
agents: list[BackupAgent] = [
|
||||
SupervisorBackupAgent(hass, "local", LOCATION_LOCAL_STORAGE)
|
||||
]
|
||||
for mount in mounts.mounts:
|
||||
if mount.usage is not supervisor_mounts.MountUsage.BACKUP:
|
||||
continue
|
||||
@@ -107,7 +113,7 @@ def async_register_backup_agents_listener(
|
||||
|
||||
|
||||
def _backup_details_to_agent_backup(
|
||||
details: supervisor_backups.BackupComplete, location: str | None
|
||||
details: supervisor_backups.BackupComplete, location: str
|
||||
) -> AgentBackup:
|
||||
"""Convert a supervisor backup details object to an agent backup."""
|
||||
homeassistant_included = details.homeassistant is not None
|
||||
@@ -120,7 +126,6 @@ def _backup_details_to_agent_backup(
|
||||
for addon in details.addons
|
||||
]
|
||||
extra_metadata = details.extra or {}
|
||||
location = location or LOCATION_LOCAL
|
||||
return AgentBackup(
|
||||
addons=addons,
|
||||
backup_id=details.slug,
|
||||
@@ -143,7 +148,7 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
|
||||
domain = DOMAIN
|
||||
|
||||
def __init__(self, hass: HomeAssistant, name: str, location: str | None) -> None:
|
||||
def __init__(self, hass: HomeAssistant, name: str, location: str) -> None:
|
||||
"""Initialize the backup agent."""
|
||||
super().__init__()
|
||||
self._hass = hass
|
||||
@@ -158,10 +163,15 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file."""
|
||||
return await self._client.backups.download_backup(
|
||||
backup_id,
|
||||
options=supervisor_backups.DownloadBackupOptions(location=self.location),
|
||||
)
|
||||
try:
|
||||
return await self._client.backups.download_backup(
|
||||
backup_id,
|
||||
options=supervisor_backups.DownloadBackupOptions(
|
||||
location=self.location
|
||||
),
|
||||
)
|
||||
except SupervisorNotFoundError as err:
|
||||
raise BackupNotFound from err
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
@@ -196,7 +206,7 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
backup_list = await self._client.backups.list()
|
||||
result = []
|
||||
for backup in backup_list:
|
||||
if not backup.locations or self.location not in backup.locations:
|
||||
if self.location not in backup.location_attributes:
|
||||
continue
|
||||
details = await self._client.backups.backup_info(backup.slug)
|
||||
result.append(_backup_details_to_agent_backup(details, self.location))
|
||||
@@ -212,7 +222,7 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
details = await self._client.backups.backup_info(backup_id)
|
||||
except SupervisorNotFoundError:
|
||||
return None
|
||||
if self.location not in details.locations:
|
||||
if self.location not in details.location_attributes:
|
||||
return None
|
||||
return _backup_details_to_agent_backup(details, self.location)
|
||||
|
||||
@@ -285,8 +295,8 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
# will be handled by async_upload_backup.
|
||||
# If the lists are the same length, it does not matter which one we send,
|
||||
# we send the encrypted list to have a well defined behavior.
|
||||
encrypted_locations: list[str | None] = []
|
||||
decrypted_locations: list[str | None] = []
|
||||
encrypted_locations: list[str] = []
|
||||
decrypted_locations: list[str] = []
|
||||
agents_settings = manager.config.data.agents
|
||||
for hassio_agent in hassio_agents:
|
||||
if password is not None:
|
||||
@@ -336,31 +346,43 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
self._async_wait_for_backup(
|
||||
backup,
|
||||
locations,
|
||||
on_progress=on_progress,
|
||||
remove_after_upload=locations == [LOCATION_CLOUD_BACKUP],
|
||||
),
|
||||
name="backup_manager_create_backup",
|
||||
eager_start=False, # To ensure the task is not started before we return
|
||||
)
|
||||
|
||||
return (NewBackup(backup_job_id=backup.job_id), backup_task)
|
||||
return (NewBackup(backup_job_id=backup.job_id.hex), backup_task)
|
||||
|
||||
async def _async_wait_for_backup(
|
||||
self,
|
||||
backup: supervisor_backups.NewBackup,
|
||||
locations: list[str | None],
|
||||
locations: list[str],
|
||||
*,
|
||||
on_progress: Callable[[CreateBackupEvent], None],
|
||||
remove_after_upload: bool,
|
||||
) -> WrittenBackup:
|
||||
"""Wait for a backup to complete."""
|
||||
backup_complete = asyncio.Event()
|
||||
backup_id: str | None = None
|
||||
create_errors: list[dict[str, str]] = []
|
||||
|
||||
@callback
|
||||
def on_job_progress(data: Mapping[str, Any]) -> None:
|
||||
"""Handle backup progress."""
|
||||
nonlocal backup_id
|
||||
if not (stage := try_parse_enum(CreateBackupStage, data.get("stage"))):
|
||||
_LOGGER.debug("Unknown create stage: %s", data.get("stage"))
|
||||
else:
|
||||
on_progress(
|
||||
CreateBackupEvent(
|
||||
reason=None, stage=stage, state=CreateBackupState.IN_PROGRESS
|
||||
)
|
||||
)
|
||||
if data.get("done") is True:
|
||||
backup_id = data.get("reference")
|
||||
create_errors.extend(data.get("errors", []))
|
||||
backup_complete.set()
|
||||
|
||||
unsub = self._async_listen_job_events(backup.job_id, on_job_progress)
|
||||
@@ -369,8 +391,11 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
await backup_complete.wait()
|
||||
finally:
|
||||
unsub()
|
||||
if not backup_id:
|
||||
raise BackupReaderWriterError("Backup failed")
|
||||
if not backup_id or create_errors:
|
||||
# We should add more specific error handling here in the future
|
||||
raise BackupReaderWriterError(
|
||||
f"Backup failed: {create_errors or 'no backup_id'}"
|
||||
)
|
||||
|
||||
async def open_backup() -> AsyncIterator[bytes]:
|
||||
try:
|
||||
@@ -483,7 +508,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
else None
|
||||
)
|
||||
|
||||
restore_location: str | None
|
||||
restore_location: str
|
||||
if manager.backup_agents[agent_id].domain != DOMAIN:
|
||||
# Download the backup to the supervisor. Supervisor will clean up the backup
|
||||
# two days after the restore is done.
|
||||
@@ -509,6 +534,8 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
location=restore_location,
|
||||
),
|
||||
)
|
||||
except SupervisorNotFoundError as err:
|
||||
raise BackupNotFound from err
|
||||
except SupervisorBadRequestError as err:
|
||||
# Supervisor currently does not transmit machine parsable error types
|
||||
message = err.args[0]
|
||||
@@ -517,17 +544,30 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
raise HomeAssistantError(message) from err
|
||||
|
||||
restore_complete = asyncio.Event()
|
||||
restore_errors: list[dict[str, str]] = []
|
||||
|
||||
@callback
|
||||
def on_job_progress(data: Mapping[str, Any]) -> None:
|
||||
"""Handle backup restore progress."""
|
||||
if not (stage := try_parse_enum(RestoreBackupStage, data.get("stage"))):
|
||||
_LOGGER.debug("Unknown restore stage: %s", data.get("stage"))
|
||||
else:
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason=None, stage=stage, state=RestoreBackupState.IN_PROGRESS
|
||||
)
|
||||
)
|
||||
if data.get("done") is True:
|
||||
restore_complete.set()
|
||||
restore_errors.extend(data.get("errors", []))
|
||||
|
||||
unsub = self._async_listen_job_events(job.job_id, on_job_progress)
|
||||
try:
|
||||
await self._get_job_state(job.job_id, on_job_progress)
|
||||
await restore_complete.wait()
|
||||
if restore_errors:
|
||||
# We should add more specific error handling here in the future
|
||||
raise BackupReaderWriterError(f"Restore failed: {restore_errors}")
|
||||
finally:
|
||||
unsub()
|
||||
|
||||
@@ -537,28 +577,52 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
on_progress: Callable[[RestoreBackupEvent | IdleEvent], None],
|
||||
) -> None:
|
||||
"""Check restore status after core restart."""
|
||||
if not (restore_job_id := os.environ.get(RESTORE_JOB_ID_ENV)):
|
||||
if not (restore_job_str := os.environ.get(RESTORE_JOB_ID_ENV)):
|
||||
_LOGGER.debug("No restore job ID found in environment")
|
||||
return
|
||||
|
||||
restore_job_id = UUID(restore_job_str)
|
||||
_LOGGER.debug("Found restore job ID %s in environment", restore_job_id)
|
||||
|
||||
sent_event = False
|
||||
|
||||
@callback
|
||||
def on_job_progress(data: Mapping[str, Any]) -> None:
|
||||
"""Handle backup restore progress."""
|
||||
nonlocal sent_event
|
||||
|
||||
if not (stage := try_parse_enum(RestoreBackupStage, data.get("stage"))):
|
||||
_LOGGER.debug("Unknown restore stage: %s", data.get("stage"))
|
||||
|
||||
if data.get("done") is not True:
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason="", stage=None, state=RestoreBackupState.IN_PROGRESS
|
||||
if stage or not sent_event:
|
||||
sent_event = True
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason=None,
|
||||
stage=stage,
|
||||
state=RestoreBackupState.IN_PROGRESS,
|
||||
)
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason="", stage=None, state=RestoreBackupState.COMPLETED
|
||||
restore_errors = data.get("errors", [])
|
||||
if restore_errors:
|
||||
_LOGGER.warning("Restore backup failed: %s", restore_errors)
|
||||
# We should add more specific error handling here in the future
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason="unknown_error",
|
||||
stage=stage,
|
||||
state=RestoreBackupState.FAILED,
|
||||
)
|
||||
)
|
||||
else:
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason=None, stage=stage, state=RestoreBackupState.COMPLETED
|
||||
)
|
||||
)
|
||||
)
|
||||
on_progress(IdleEvent())
|
||||
unsub()
|
||||
|
||||
@@ -571,7 +635,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
|
||||
@callback
|
||||
def _async_listen_job_events(
|
||||
self, job_id: str, on_event: Callable[[Mapping[str, Any]], None]
|
||||
self, job_id: UUID, on_event: Callable[[Mapping[str, Any]], None]
|
||||
) -> Callable[[], None]:
|
||||
"""Listen for job events."""
|
||||
|
||||
@@ -586,7 +650,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
if (
|
||||
data.get("event") != "job"
|
||||
or not (event_data := data.get("data"))
|
||||
or event_data.get("uuid") != job_id
|
||||
or event_data.get("uuid") != job_id.hex
|
||||
):
|
||||
return
|
||||
on_event(event_data)
|
||||
@@ -597,10 +661,10 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
return unsub
|
||||
|
||||
async def _get_job_state(
|
||||
self, job_id: str, on_event: Callable[[Mapping[str, Any]], None]
|
||||
self, job_id: UUID, on_event: Callable[[Mapping[str, Any]], None]
|
||||
) -> None:
|
||||
"""Poll a job for its state."""
|
||||
job = await self._client.jobs.get_job(UUID(job_id))
|
||||
job = await self._client.jobs.get_job(job_id)
|
||||
_LOGGER.debug("Job state: %s", job)
|
||||
on_event(job.to_dict())
|
||||
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/hassio",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["aiohasupervisor==0.2.2b6"],
|
||||
"requirements": ["aiohasupervisor==0.3.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Constants for the homee integration."""
|
||||
|
||||
from homeassistant.const import (
|
||||
DEGREE,
|
||||
LIGHT_LUX,
|
||||
PERCENTAGE,
|
||||
REVOLUTIONS_PER_MINUTE,
|
||||
@@ -32,6 +33,7 @@ HOMEE_UNIT_TO_HA_UNIT = {
|
||||
"W": UnitOfPower.WATT,
|
||||
"m/s": UnitOfSpeed.METERS_PER_SECOND,
|
||||
"km/h": UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
"°": DEGREE,
|
||||
"°F": UnitOfTemperature.FAHRENHEIT,
|
||||
"°C": UnitOfTemperature.CELSIUS,
|
||||
"K": UnitOfTemperature.KELVIN,
|
||||
@@ -51,7 +53,7 @@ OPEN_CLOSE_MAP_REVERSED = {
|
||||
0.0: "closed",
|
||||
1.0: "open",
|
||||
2.0: "partial",
|
||||
3.0: "cosing",
|
||||
3.0: "closing",
|
||||
4.0: "opening",
|
||||
}
|
||||
WINDOW_MAP = {
|
||||
|
||||
@@ -78,6 +78,7 @@ from .const import (
|
||||
CONF_VIDEO_CODEC,
|
||||
CONF_VIDEO_MAP,
|
||||
CONF_VIDEO_PACKET_SIZE,
|
||||
CONF_VIDEO_PROFILE_NAMES,
|
||||
DEFAULT_AUDIO_CODEC,
|
||||
DEFAULT_AUDIO_MAP,
|
||||
DEFAULT_AUDIO_PACKET_SIZE,
|
||||
@@ -90,6 +91,7 @@ from .const import (
|
||||
DEFAULT_VIDEO_CODEC,
|
||||
DEFAULT_VIDEO_MAP,
|
||||
DEFAULT_VIDEO_PACKET_SIZE,
|
||||
DEFAULT_VIDEO_PROFILE_NAMES,
|
||||
DOMAIN,
|
||||
FEATURE_ON_OFF,
|
||||
FEATURE_PLAY_PAUSE,
|
||||
@@ -163,6 +165,9 @@ CAMERA_SCHEMA = BASIC_INFO_SCHEMA.extend(
|
||||
vol.Optional(CONF_VIDEO_CODEC, default=DEFAULT_VIDEO_CODEC): vol.In(
|
||||
VALID_VIDEO_CODECS
|
||||
),
|
||||
vol.Optional(CONF_VIDEO_PROFILE_NAMES, default=DEFAULT_VIDEO_PROFILE_NAMES): [
|
||||
cv.string
|
||||
],
|
||||
vol.Optional(
|
||||
CONF_AUDIO_PACKET_SIZE, default=DEFAULT_AUDIO_PACKET_SIZE
|
||||
): cv.positive_int,
|
||||
|
||||
@@ -25,7 +25,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeWizardConfigEntry) -
|
||||
|
||||
api: HomeWizardEnergy
|
||||
|
||||
if token := entry.data.get(CONF_TOKEN):
|
||||
is_battery = entry.unique_id.startswith("HWE-BAT") if entry.unique_id else False
|
||||
|
||||
if (token := entry.data.get(CONF_TOKEN)) and is_battery:
|
||||
api = HomeWizardEnergyV2(
|
||||
entry.data[CONF_IP_ADDRESS],
|
||||
token=token,
|
||||
@@ -37,7 +39,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeWizardConfigEntry) -
|
||||
clientsession=async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
await async_check_v2_support_and_create_issue(hass, entry)
|
||||
if is_battery:
|
||||
await async_check_v2_support_and_create_issue(hass, entry)
|
||||
|
||||
coordinator = HWEnergyDeviceUpdateCoordinator(hass, api)
|
||||
try:
|
||||
|
||||
@@ -272,9 +272,14 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the integration."""
|
||||
errors: dict[str, str] = {}
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input:
|
||||
try:
|
||||
device_info = await async_try_connect(user_input[CONF_IP_ADDRESS])
|
||||
device_info = await async_try_connect(
|
||||
user_input[CONF_IP_ADDRESS],
|
||||
token=reconfigure_entry.data.get(CONF_TOKEN),
|
||||
)
|
||||
|
||||
except RecoverableError as ex:
|
||||
LOGGER.error(ex)
|
||||
@@ -288,7 +293,6 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._get_reconfigure_entry(),
|
||||
data_updates=user_input,
|
||||
)
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=vol.Schema(
|
||||
@@ -306,7 +310,7 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
|
||||
async def async_try_connect(ip_address: str) -> Device:
|
||||
async def async_try_connect(ip_address: str, token: str | None = None) -> Device:
|
||||
"""Try to connect.
|
||||
|
||||
Make connection with device to test the connection
|
||||
@@ -317,7 +321,7 @@ async def async_try_connect(ip_address: str) -> Device:
|
||||
|
||||
# Determine if device is v1 or v2 capable
|
||||
if await has_v2_api(ip_address):
|
||||
energy_api = HomeWizardEnergyV2(ip_address)
|
||||
energy_api = HomeWizardEnergyV2(ip_address, token=token)
|
||||
else:
|
||||
energy_api = HomeWizardEnergyV1(ip_address)
|
||||
|
||||
|
||||
@@ -87,7 +87,7 @@ class IdasenDeskConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if discovery := self._discovery_info:
|
||||
self._discovered_devices[discovery.address] = discovery
|
||||
else:
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery in async_discovered_service_info(self.hass):
|
||||
if (
|
||||
discovery.address in current_addresses
|
||||
|
||||
@@ -72,7 +72,7 @@ class INKBIRDConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/lacrosse_view",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["lacrosse_view"],
|
||||
"requirements": ["lacrosse-view==1.0.3"]
|
||||
"requirements": ["lacrosse-view==1.0.4"]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/lcn",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pypck"],
|
||||
"requirements": ["pypck==0.8.3", "lcn-frontend==0.2.3"]
|
||||
"requirements": ["pypck==0.8.5", "lcn-frontend==0.2.3"]
|
||||
}
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ld2410_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bluetooth-data-tools==1.22.0", "ld2410-ble==0.1.1"]
|
||||
"requirements": ["bluetooth-data-tools==1.23.4", "ld2410-ble==0.1.1"]
|
||||
}
|
||||
|
||||
@@ -35,5 +35,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/led_ble",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["bluetooth-data-tools==1.22.0", "led-ble==1.1.4"]
|
||||
"requirements": ["bluetooth-data-tools==1.23.4", "led-ble==1.1.6"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import http
|
||||
from . import http, llm_api
|
||||
from .const import DOMAIN
|
||||
from .session import SessionManager
|
||||
from .types import MCPServerConfigEntry
|
||||
@@ -25,6 +25,7 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Model Context Protocol component."""
|
||||
http.async_register(hass)
|
||||
llm_api.async_register_api(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ from homeassistant.helpers.selector import (
|
||||
SelectSelectorConfig,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DOMAIN, LLM_API, LLM_API_NAME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -33,6 +33,12 @@ class ModelContextServerProtocolConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
llm_apis = {api.id: api.name for api in llm.async_get_apis(self.hass)}
|
||||
if LLM_API not in llm_apis:
|
||||
# MCP server component is not loaded yet, so make the LLM API a choice.
|
||||
llm_apis = {
|
||||
LLM_API: LLM_API_NAME,
|
||||
**llm_apis,
|
||||
}
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(
|
||||
|
||||
@@ -2,3 +2,5 @@
|
||||
|
||||
DOMAIN = "mcp_server"
|
||||
TITLE = "Model Context Protocol Server"
|
||||
LLM_API = "stateless_assist"
|
||||
LLM_API_NAME = "Stateless Assist"
|
||||
|
||||
48
homeassistant/components/mcp_server/llm_api.py
Normal file
48
homeassistant/components/mcp_server/llm_api.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""LLM API for MCP Server."""
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.util import yaml as yaml_util
|
||||
|
||||
from .const import LLM_API, LLM_API_NAME
|
||||
|
||||
EXPOSED_ENTITY_FIELDS = {"name", "domain", "description", "areas", "names"}
|
||||
|
||||
|
||||
def async_register_api(hass: HomeAssistant) -> None:
|
||||
"""Register the LLM API."""
|
||||
llm.async_register_api(hass, StatelessAssistAPI(hass))
|
||||
|
||||
|
||||
class StatelessAssistAPI(llm.AssistAPI):
|
||||
"""LLM API for MCP Server that provides the Assist API without state information in the prompt.
|
||||
|
||||
Syncing the state information is possible, but may put unnecessary load on
|
||||
the system so we are instead providing the prompt without entity state. Since
|
||||
actions don't care about the current state, there is little quality loss.
|
||||
"""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the StatelessAssistAPI."""
|
||||
super().__init__(hass)
|
||||
self.id = LLM_API
|
||||
self.name = LLM_API_NAME
|
||||
|
||||
@callback
|
||||
def _async_get_exposed_entities_prompt(
|
||||
self, llm_context: llm.LLMContext, exposed_entities: dict | None
|
||||
) -> list[str]:
|
||||
"""Return the prompt for the exposed entities."""
|
||||
prompt = []
|
||||
|
||||
if exposed_entities and exposed_entities["entities"]:
|
||||
prompt.append(
|
||||
"An overview of the areas and the devices in this smart home:"
|
||||
)
|
||||
entities = [
|
||||
{k: v for k, v in entity_info.items() if k in EXPOSED_ENTITY_FIELDS}
|
||||
for entity_info in exposed_entities["entities"].values()
|
||||
]
|
||||
prompt.append(yaml_util.dump(list(entities)))
|
||||
|
||||
return prompt
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/mill",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["mill", "mill_local"],
|
||||
"requirements": ["millheater==0.12.2", "mill-local==0.3.0"]
|
||||
"requirements": ["millheater==0.12.3", "mill-local==0.3.0"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/monarchmoney",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["typedmonarchmoney==0.3.1"]
|
||||
"requirements": ["typedmonarchmoney==0.4.4"]
|
||||
}
|
||||
|
||||
@@ -111,7 +111,7 @@ class MopekaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data={CONF_MEDIUM_TYPE: user_input[CONF_MEDIUM_TYPE]},
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -28,7 +28,7 @@ class MotionMountEntity(Entity):
|
||||
self.config_entry = config_entry
|
||||
|
||||
# We store the pin, as we might need it during reconnect
|
||||
self.pin = config_entry.data[CONF_PIN]
|
||||
self.pin = config_entry.data.get(CONF_PIN)
|
||||
|
||||
mac = format_mac(mm.mac.hex())
|
||||
|
||||
|
||||
@@ -485,7 +485,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
errors,
|
||||
):
|
||||
if is_reconfigure:
|
||||
update_password_from_user_input(
|
||||
validated_user_input = update_password_from_user_input(
|
||||
reconfigure_entry.data.get(CONF_PASSWORD), validated_user_input
|
||||
)
|
||||
|
||||
|
||||
@@ -378,7 +378,7 @@ class BackupInfoView(BackupOnboardingView):
|
||||
backups, _ = await manager.async_get_backups()
|
||||
return self.json(
|
||||
{
|
||||
"backups": [backup.as_frontend_json() for backup in backups.values()],
|
||||
"backups": list(backups.values()),
|
||||
"state": manager.state,
|
||||
"last_non_idle_event": manager.last_non_idle_event,
|
||||
}
|
||||
|
||||
@@ -2,37 +2,38 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import cast
|
||||
|
||||
from kiota_abstractions.api_error import APIError
|
||||
from kiota_abstractions.authentication import BaseBearerTokenAuthenticationProvider
|
||||
from msgraph import GraphRequestAdapter, GraphServiceClient
|
||||
from msgraph.generated.drives.item.items.items_request_builder import (
|
||||
ItemsRequestBuilder,
|
||||
from onedrive_personal_sdk import OneDriveClient
|
||||
from onedrive_personal_sdk.exceptions import (
|
||||
AuthenticationError,
|
||||
HttpRequestException,
|
||||
OneDriveException,
|
||||
)
|
||||
from msgraph.generated.models.drive_item import DriveItem
|
||||
from msgraph.generated.models.folder import Folder
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
OAuth2Session,
|
||||
async_get_config_entry_implementation,
|
||||
)
|
||||
from homeassistant.helpers.httpx_client import create_async_httpx_client
|
||||
from homeassistant.helpers.instance_id import async_get as async_get_instance_id
|
||||
|
||||
from .api import OneDriveConfigEntryAccessTokenProvider
|
||||
from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN, OAUTH_SCOPES
|
||||
from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
|
||||
|
||||
@dataclass
|
||||
class OneDriveRuntimeData:
|
||||
"""Runtime data for the OneDrive integration."""
|
||||
|
||||
items: ItemsRequestBuilder
|
||||
client: OneDriveClient
|
||||
token_function: Callable[[], Awaitable[str]]
|
||||
backup_folder_id: str
|
||||
|
||||
|
||||
@@ -47,29 +48,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) ->
|
||||
|
||||
session = OAuth2Session(hass, entry, implementation)
|
||||
|
||||
auth_provider = BaseBearerTokenAuthenticationProvider(
|
||||
access_token_provider=OneDriveConfigEntryAccessTokenProvider(session)
|
||||
)
|
||||
adapter = GraphRequestAdapter(
|
||||
auth_provider=auth_provider,
|
||||
client=create_async_httpx_client(hass, follow_redirects=True),
|
||||
)
|
||||
async def get_access_token() -> str:
|
||||
await session.async_ensure_token_valid()
|
||||
return cast(str, session.token[CONF_ACCESS_TOKEN])
|
||||
|
||||
graph_client = GraphServiceClient(
|
||||
request_adapter=adapter,
|
||||
scopes=OAUTH_SCOPES,
|
||||
)
|
||||
assert entry.unique_id
|
||||
drive_item = graph_client.drives.by_drive_id(entry.unique_id)
|
||||
client = OneDriveClient(get_access_token, async_get_clientsession(hass))
|
||||
|
||||
# get approot, will be created automatically if it does not exist
|
||||
try:
|
||||
approot = await drive_item.special.by_drive_item_id("approot").get()
|
||||
except APIError as err:
|
||||
if err.response_status_code == 403:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN, translation_key="authentication_failed"
|
||||
) from err
|
||||
approot = await client.get_approot()
|
||||
except AuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN, translation_key="authentication_failed"
|
||||
) from err
|
||||
except (HttpRequestException, OneDriveException, TimeoutError) as err:
|
||||
_LOGGER.debug("Failed to get approot", exc_info=True)
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
@@ -77,24 +69,24 @@ async def async_setup_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) ->
|
||||
translation_placeholders={"folder": "approot"},
|
||||
) from err
|
||||
|
||||
if approot is None or not approot.id:
|
||||
_LOGGER.debug("Failed to get approot, was None")
|
||||
instance_id = await async_get_instance_id(hass)
|
||||
backup_folder_name = f"backups_{instance_id[:8]}"
|
||||
try:
|
||||
backup_folder = await client.create_folder(
|
||||
parent_id=approot.id, name=backup_folder_name
|
||||
)
|
||||
except (HttpRequestException, OneDriveException, TimeoutError) as err:
|
||||
_LOGGER.debug("Failed to create backup folder", exc_info=True)
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_to_get_folder",
|
||||
translation_placeholders={"folder": "approot"},
|
||||
)
|
||||
|
||||
instance_id = await async_get_instance_id(hass)
|
||||
backup_folder_id = await _async_create_folder_if_not_exists(
|
||||
items=drive_item.items,
|
||||
base_folder_id=approot.id,
|
||||
folder=f"backups_{instance_id[:8]}",
|
||||
)
|
||||
translation_placeholders={"folder": backup_folder_name},
|
||||
) from err
|
||||
|
||||
entry.runtime_data = OneDriveRuntimeData(
|
||||
items=drive_item.items,
|
||||
backup_folder_id=backup_folder_id,
|
||||
client=client,
|
||||
token_function=get_access_token,
|
||||
backup_folder_id=backup_folder.id,
|
||||
)
|
||||
|
||||
_async_notify_backup_listeners_soon(hass)
|
||||
@@ -116,54 +108,3 @@ def _async_notify_backup_listeners(hass: HomeAssistant) -> None:
|
||||
@callback
|
||||
def _async_notify_backup_listeners_soon(hass: HomeAssistant) -> None:
|
||||
hass.loop.call_soon(_async_notify_backup_listeners, hass)
|
||||
|
||||
|
||||
async def _async_create_folder_if_not_exists(
|
||||
items: ItemsRequestBuilder,
|
||||
base_folder_id: str,
|
||||
folder: str,
|
||||
) -> str:
|
||||
"""Check if a folder exists and create it if it does not exist."""
|
||||
folder_item: DriveItem | None = None
|
||||
|
||||
try:
|
||||
folder_item = await items.by_drive_item_id(f"{base_folder_id}:/{folder}:").get()
|
||||
except APIError as err:
|
||||
if err.response_status_code != 404:
|
||||
_LOGGER.debug("Failed to get folder %s", folder, exc_info=True)
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_to_get_folder",
|
||||
translation_placeholders={"folder": folder},
|
||||
) from err
|
||||
# is 404 not found, create folder
|
||||
_LOGGER.debug("Creating folder %s", folder)
|
||||
request_body = DriveItem(
|
||||
name=folder,
|
||||
folder=Folder(),
|
||||
additional_data={
|
||||
"@microsoft_graph_conflict_behavior": "fail",
|
||||
},
|
||||
)
|
||||
try:
|
||||
folder_item = await items.by_drive_item_id(base_folder_id).children.post(
|
||||
request_body
|
||||
)
|
||||
except APIError as create_err:
|
||||
_LOGGER.debug("Failed to create folder %s", folder, exc_info=True)
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_to_create_folder",
|
||||
translation_placeholders={"folder": folder},
|
||||
) from create_err
|
||||
_LOGGER.debug("Created folder %s", folder)
|
||||
else:
|
||||
_LOGGER.debug("Found folder %s", folder)
|
||||
if folder_item is None or not folder_item.id:
|
||||
_LOGGER.debug("Failed to get folder %s, was None", folder)
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_to_get_folder",
|
||||
translation_placeholders={"folder": folder},
|
||||
)
|
||||
return folder_item.id
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
"""API for OneDrive bound to Home Assistant OAuth."""
|
||||
|
||||
from typing import Any, cast
|
||||
|
||||
from kiota_abstractions.authentication import AccessTokenProvider, AllowedHostsValidator
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
|
||||
class OneDriveAccessTokenProvider(AccessTokenProvider):
|
||||
"""Provide OneDrive authentication tied to an OAuth2 based config entry."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize OneDrive auth."""
|
||||
super().__init__()
|
||||
# currently allowing all hosts
|
||||
self._allowed_hosts_validator = AllowedHostsValidator(allowed_hosts=[])
|
||||
|
||||
def get_allowed_hosts_validator(self) -> AllowedHostsValidator:
|
||||
"""Retrieve the allowed hosts validator."""
|
||||
return self._allowed_hosts_validator
|
||||
|
||||
|
||||
class OneDriveConfigFlowAccessTokenProvider(OneDriveAccessTokenProvider):
|
||||
"""Provide OneDrive authentication tied to an OAuth2 based config entry."""
|
||||
|
||||
def __init__(self, token: str) -> None:
|
||||
"""Initialize OneDrive auth."""
|
||||
super().__init__()
|
||||
self._token = token
|
||||
|
||||
async def get_authorization_token( # pylint: disable=dangerous-default-value
|
||||
self, uri: str, additional_authentication_context: dict[str, Any] = {}
|
||||
) -> str:
|
||||
"""Return a valid authorization token."""
|
||||
return self._token
|
||||
|
||||
|
||||
class OneDriveConfigEntryAccessTokenProvider(OneDriveAccessTokenProvider):
|
||||
"""Provide OneDrive authentication tied to an OAuth2 based config entry."""
|
||||
|
||||
def __init__(self, oauth_session: config_entry_oauth2_flow.OAuth2Session) -> None:
|
||||
"""Initialize OneDrive auth."""
|
||||
super().__init__()
|
||||
self._oauth_session = oauth_session
|
||||
|
||||
async def get_authorization_token( # pylint: disable=dangerous-default-value
|
||||
self, uri: str, additional_authentication_context: dict[str, Any] = {}
|
||||
) -> str:
|
||||
"""Return a valid authorization token."""
|
||||
await self._oauth_session.async_ensure_token_valid()
|
||||
return cast(str, self._oauth_session.token[CONF_ACCESS_TOKEN])
|
||||
@@ -2,37 +2,22 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from functools import wraps
|
||||
import html
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Concatenate, cast
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from httpx import Response, TimeoutException
|
||||
from kiota_abstractions.api_error import APIError
|
||||
from kiota_abstractions.authentication import AnonymousAuthenticationProvider
|
||||
from kiota_abstractions.headers_collection import HeadersCollection
|
||||
from kiota_abstractions.method import Method
|
||||
from kiota_abstractions.native_response_handler import NativeResponseHandler
|
||||
from kiota_abstractions.request_information import RequestInformation
|
||||
from kiota_http.middleware.options import ResponseHandlerOption
|
||||
from msgraph import GraphRequestAdapter
|
||||
from msgraph.generated.drives.item.items.item.content.content_request_builder import (
|
||||
ContentRequestBuilder,
|
||||
from aiohttp import ClientTimeout
|
||||
from onedrive_personal_sdk.clients.large_file_upload import LargeFileUploadClient
|
||||
from onedrive_personal_sdk.exceptions import (
|
||||
AuthenticationError,
|
||||
HashMismatchError,
|
||||
OneDriveException,
|
||||
)
|
||||
from msgraph.generated.drives.item.items.item.create_upload_session.create_upload_session_post_request_body import (
|
||||
CreateUploadSessionPostRequestBody,
|
||||
)
|
||||
from msgraph.generated.drives.item.items.item.drive_item_item_request_builder import (
|
||||
DriveItemItemRequestBuilder,
|
||||
)
|
||||
from msgraph.generated.models.drive_item import DriveItem
|
||||
from msgraph.generated.models.drive_item_uploadable_properties import (
|
||||
DriveItemUploadableProperties,
|
||||
)
|
||||
from msgraph_core.models import LargeFileUploadSession
|
||||
from onedrive_personal_sdk.models.items import File, Folder, ItemUpdate
|
||||
from onedrive_personal_sdk.models.upload import FileInfo
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
@@ -41,14 +26,14 @@ from homeassistant.components.backup import (
|
||||
suggested_filename,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from . import OneDriveConfigEntry
|
||||
from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
UPLOAD_CHUNK_SIZE = 16 * 320 * 1024 # 5.2MB
|
||||
MAX_RETRIES = 5
|
||||
TIMEOUT = ClientTimeout(connect=10, total=43200) # 12 hours
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
@@ -92,18 +77,18 @@ def handle_backup_errors[_R, **P](
|
||||
) -> _R:
|
||||
try:
|
||||
return await func(self, *args, **kwargs)
|
||||
except APIError as err:
|
||||
if err.response_status_code == 403:
|
||||
self._entry.async_start_reauth(self._hass)
|
||||
except AuthenticationError as err:
|
||||
self._entry.async_start_reauth(self._hass)
|
||||
raise BackupAgentError("Authentication error") from err
|
||||
except OneDriveException as err:
|
||||
_LOGGER.error(
|
||||
"Error during backup in %s: Status %s, message %s",
|
||||
"Error during backup in %s:, message %s",
|
||||
func.__name__,
|
||||
err.response_status_code,
|
||||
err.message,
|
||||
err,
|
||||
)
|
||||
_LOGGER.debug("Full error: %s", err, exc_info=True)
|
||||
raise BackupAgentError("Backup operation failed") from err
|
||||
except TimeoutException as err:
|
||||
except TimeoutError as err:
|
||||
_LOGGER.error(
|
||||
"Error during backup in %s: Timeout",
|
||||
func.__name__,
|
||||
@@ -123,7 +108,8 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
super().__init__()
|
||||
self._hass = hass
|
||||
self._entry = entry
|
||||
self._items = entry.runtime_data.items
|
||||
self._client = entry.runtime_data.client
|
||||
self._token_function = entry.runtime_data.token_function
|
||||
self._folder_id = entry.runtime_data.backup_folder_id
|
||||
self.name = entry.title
|
||||
assert entry.unique_id
|
||||
@@ -134,24 +120,12 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
self, backup_id: str, **kwargs: Any
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file."""
|
||||
# this forces the query to return a raw httpx response, but breaks typing
|
||||
backup = await self._find_item_by_backup_id(backup_id)
|
||||
if backup is None or backup.id is None:
|
||||
item = await self._find_item_by_backup_id(backup_id)
|
||||
if item is None:
|
||||
raise BackupAgentError("Backup not found")
|
||||
|
||||
request_config = (
|
||||
ContentRequestBuilder.ContentRequestBuilderGetRequestConfiguration(
|
||||
options=[ResponseHandlerOption(NativeResponseHandler())],
|
||||
)
|
||||
)
|
||||
response = cast(
|
||||
Response,
|
||||
await self._items.by_drive_item_id(backup.id).content.get(
|
||||
request_configuration=request_config
|
||||
),
|
||||
)
|
||||
|
||||
return response.aiter_bytes(chunk_size=1024)
|
||||
stream = await self._client.download_drive_item(item.id, timeout=TIMEOUT)
|
||||
return stream.iter_chunked(1024)
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_upload_backup(
|
||||
@@ -163,27 +137,20 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
|
||||
# upload file in chunks to support large files
|
||||
upload_session_request_body = CreateUploadSessionPostRequestBody(
|
||||
item=DriveItemUploadableProperties(
|
||||
additional_data={
|
||||
"@microsoft.graph.conflictBehavior": "fail",
|
||||
},
|
||||
file = FileInfo(
|
||||
suggested_filename(backup),
|
||||
backup.size,
|
||||
self._folder_id,
|
||||
await open_stream(),
|
||||
)
|
||||
try:
|
||||
item = await LargeFileUploadClient.upload(
|
||||
self._token_function, file, session=async_get_clientsession(self._hass)
|
||||
)
|
||||
)
|
||||
file_item = self._get_backup_file_item(suggested_filename(backup))
|
||||
upload_session = await file_item.create_upload_session.post(
|
||||
upload_session_request_body
|
||||
)
|
||||
|
||||
if upload_session is None or upload_session.upload_url is None:
|
||||
except HashMismatchError as err:
|
||||
raise BackupAgentError(
|
||||
translation_domain=DOMAIN, translation_key="backup_no_upload_session"
|
||||
)
|
||||
|
||||
await self._upload_file(
|
||||
upload_session.upload_url, await open_stream(), backup.size
|
||||
)
|
||||
"Hash validation failed, backup file might be corrupt"
|
||||
) from err
|
||||
|
||||
# store metadata in description
|
||||
backup_dict = backup.as_dict()
|
||||
@@ -191,7 +158,10 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
description = json.dumps(backup_dict)
|
||||
_LOGGER.debug("Creating metadata: %s", description)
|
||||
|
||||
await file_item.patch(DriveItem(description=description))
|
||||
await self._client.update_drive_item(
|
||||
path_or_id=item.id,
|
||||
data=ItemUpdate(description=description),
|
||||
)
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_delete_backup(
|
||||
@@ -200,35 +170,31 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Delete a backup file."""
|
||||
backup = await self._find_item_by_backup_id(backup_id)
|
||||
if backup is None or backup.id is None:
|
||||
item = await self._find_item_by_backup_id(backup_id)
|
||||
if item is None:
|
||||
return
|
||||
await self._items.by_drive_item_id(backup.id).delete()
|
||||
await self._client.delete_drive_item(item.id)
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
backups: list[AgentBackup] = []
|
||||
items = await self._items.by_drive_item_id(f"{self._folder_id}").children.get()
|
||||
if items and (values := items.value):
|
||||
for item in values:
|
||||
if (description := item.description) is None:
|
||||
continue
|
||||
if "homeassistant_version" in description:
|
||||
backups.append(self._backup_from_description(description))
|
||||
return backups
|
||||
return [
|
||||
self._backup_from_description(item.description)
|
||||
for item in await self._client.list_drive_items(self._folder_id)
|
||||
if item.description and "homeassistant_version" in item.description
|
||||
]
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_get_backup(
|
||||
self, backup_id: str, **kwargs: Any
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
backup = await self._find_item_by_backup_id(backup_id)
|
||||
if backup is None:
|
||||
return None
|
||||
|
||||
assert backup.description # already checked in _find_item_by_backup_id
|
||||
return self._backup_from_description(backup.description)
|
||||
item = await self._find_item_by_backup_id(backup_id)
|
||||
return (
|
||||
self._backup_from_description(item.description)
|
||||
if item and item.description
|
||||
else None
|
||||
)
|
||||
|
||||
def _backup_from_description(self, description: str) -> AgentBackup:
|
||||
"""Create a backup object from a description."""
|
||||
@@ -237,91 +203,13 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
) # OneDrive encodes the description on save automatically
|
||||
return AgentBackup.from_dict(json.loads(description))
|
||||
|
||||
async def _find_item_by_backup_id(self, backup_id: str) -> DriveItem | None:
|
||||
"""Find a backup item by its backup ID."""
|
||||
|
||||
items = await self._items.by_drive_item_id(f"{self._folder_id}").children.get()
|
||||
if items and (values := items.value):
|
||||
for item in values:
|
||||
if (description := item.description) is None:
|
||||
continue
|
||||
if backup_id in description:
|
||||
return item
|
||||
return None
|
||||
|
||||
def _get_backup_file_item(self, backup_id: str) -> DriveItemItemRequestBuilder:
|
||||
return self._items.by_drive_item_id(f"{self._folder_id}:/{backup_id}:")
|
||||
|
||||
async def _upload_file(
|
||||
self, upload_url: str, stream: AsyncIterator[bytes], total_size: int
|
||||
) -> None:
|
||||
"""Use custom large file upload; SDK does not support stream."""
|
||||
|
||||
adapter = GraphRequestAdapter(
|
||||
auth_provider=AnonymousAuthenticationProvider(),
|
||||
client=get_async_client(self._hass),
|
||||
async def _find_item_by_backup_id(self, backup_id: str) -> File | Folder | None:
|
||||
"""Find an item by backup ID."""
|
||||
return next(
|
||||
(
|
||||
item
|
||||
for item in await self._client.list_drive_items(self._folder_id)
|
||||
if item.description and backup_id in item.description
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
async def async_upload(
|
||||
start: int, end: int, chunk_data: bytes
|
||||
) -> LargeFileUploadSession:
|
||||
info = RequestInformation()
|
||||
info.url = upload_url
|
||||
info.http_method = Method.PUT
|
||||
info.headers = HeadersCollection()
|
||||
info.headers.try_add("Content-Range", f"bytes {start}-{end}/{total_size}")
|
||||
info.headers.try_add("Content-Length", str(len(chunk_data)))
|
||||
info.headers.try_add("Content-Type", "application/octet-stream")
|
||||
_LOGGER.debug(info.headers.get_all())
|
||||
info.set_stream_content(chunk_data)
|
||||
result = await adapter.send_async(info, LargeFileUploadSession, {})
|
||||
_LOGGER.debug("Next expected range: %s", result.next_expected_ranges)
|
||||
return result
|
||||
|
||||
start = 0
|
||||
buffer: list[bytes] = []
|
||||
buffer_size = 0
|
||||
retries = 0
|
||||
|
||||
async for chunk in stream:
|
||||
buffer.append(chunk)
|
||||
buffer_size += len(chunk)
|
||||
if buffer_size >= UPLOAD_CHUNK_SIZE:
|
||||
chunk_data = b"".join(buffer)
|
||||
uploaded_chunks = 0
|
||||
while (
|
||||
buffer_size > UPLOAD_CHUNK_SIZE
|
||||
): # Loop in case the buffer is >= UPLOAD_CHUNK_SIZE * 2
|
||||
slice_start = uploaded_chunks * UPLOAD_CHUNK_SIZE
|
||||
try:
|
||||
await async_upload(
|
||||
start,
|
||||
start + UPLOAD_CHUNK_SIZE - 1,
|
||||
chunk_data[slice_start : slice_start + UPLOAD_CHUNK_SIZE],
|
||||
)
|
||||
except APIError as err:
|
||||
if (
|
||||
err.response_status_code and err.response_status_code < 500
|
||||
): # no retry on 4xx errors
|
||||
raise
|
||||
if retries < MAX_RETRIES:
|
||||
await asyncio.sleep(2**retries)
|
||||
retries += 1
|
||||
continue
|
||||
raise
|
||||
except TimeoutException:
|
||||
if retries < MAX_RETRIES:
|
||||
retries += 1
|
||||
continue
|
||||
raise
|
||||
retries = 0
|
||||
start += UPLOAD_CHUNK_SIZE
|
||||
uploaded_chunks += 1
|
||||
buffer_size -= UPLOAD_CHUNK_SIZE
|
||||
buffer = [chunk_data[UPLOAD_CHUNK_SIZE * uploaded_chunks :]]
|
||||
|
||||
# upload the remaining bytes
|
||||
if buffer:
|
||||
_LOGGER.debug("Last chunk")
|
||||
chunk_data = b"".join(buffer)
|
||||
await async_upload(start, start + len(chunk_data) - 1, chunk_data)
|
||||
|
||||
@@ -4,18 +4,14 @@ from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from kiota_abstractions.api_error import APIError
|
||||
from kiota_abstractions.authentication import BaseBearerTokenAuthenticationProvider
|
||||
from kiota_abstractions.method import Method
|
||||
from kiota_abstractions.request_information import RequestInformation
|
||||
from msgraph import GraphRequestAdapter, GraphServiceClient
|
||||
from onedrive_personal_sdk.clients.client import OneDriveClient
|
||||
from onedrive_personal_sdk.exceptions import OneDriveException
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
|
||||
from .api import OneDriveConfigFlowAccessTokenProvider
|
||||
from .const import DOMAIN, OAUTH_SCOPES
|
||||
|
||||
|
||||
@@ -39,48 +35,24 @@ class OneDriveConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
data: dict[str, Any],
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
auth_provider = BaseBearerTokenAuthenticationProvider(
|
||||
access_token_provider=OneDriveConfigFlowAccessTokenProvider(
|
||||
cast(str, data[CONF_TOKEN][CONF_ACCESS_TOKEN])
|
||||
)
|
||||
)
|
||||
adapter = GraphRequestAdapter(
|
||||
auth_provider=auth_provider,
|
||||
client=get_async_client(self.hass),
|
||||
|
||||
async def get_access_token() -> str:
|
||||
return cast(str, data[CONF_TOKEN][CONF_ACCESS_TOKEN])
|
||||
|
||||
graph_client = OneDriveClient(
|
||||
get_access_token, async_get_clientsession(self.hass)
|
||||
)
|
||||
|
||||
graph_client = GraphServiceClient(
|
||||
request_adapter=adapter,
|
||||
scopes=OAUTH_SCOPES,
|
||||
)
|
||||
|
||||
# need to get adapter from client, as client changes it
|
||||
request_adapter = cast(GraphRequestAdapter, graph_client.request_adapter)
|
||||
|
||||
request_info = RequestInformation(
|
||||
method=Method.GET,
|
||||
url_template="{+baseurl}/me/drive/special/approot",
|
||||
path_parameters={},
|
||||
)
|
||||
parent_span = request_adapter.start_tracing_span(request_info, "get_approot")
|
||||
|
||||
# get the OneDrive id
|
||||
# use low level methods, to avoid files.read permissions
|
||||
# which would be required by drives.me.get()
|
||||
try:
|
||||
response = await request_adapter.get_http_response_message(
|
||||
request_info=request_info, parent_span=parent_span
|
||||
)
|
||||
except APIError:
|
||||
approot = await graph_client.get_approot()
|
||||
except OneDriveException:
|
||||
self.logger.exception("Failed to connect to OneDrive")
|
||||
return self.async_abort(reason="connection_error")
|
||||
except Exception:
|
||||
self.logger.exception("Unknown error")
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
drive: dict = response.json()
|
||||
|
||||
await self.async_set_unique_id(drive["parentReference"]["driveId"])
|
||||
await self.async_set_unique_id(approot.parent_reference.drive_id)
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
@@ -94,10 +66,11 @@ class OneDriveConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
user = drive.get("createdBy", {}).get("user", {}).get("displayName")
|
||||
|
||||
title = f"{user}'s OneDrive" if user else "OneDrive"
|
||||
|
||||
title = (
|
||||
f"{approot.created_by.user.display_name}'s OneDrive"
|
||||
if approot.created_by.user and approot.created_by.user.display_name
|
||||
else "OneDrive"
|
||||
)
|
||||
return self.async_create_entry(title=title, data=data)
|
||||
|
||||
async def async_step_reauth(
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/onedrive",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["msgraph", "msgraph-core", "kiota"],
|
||||
"loggers": ["onedrive_personal_sdk"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["msgraph-sdk==1.16.0"]
|
||||
"requirements": ["onedrive-personal-sdk==0.0.8"]
|
||||
}
|
||||
|
||||
@@ -23,31 +23,18 @@
|
||||
"connection_error": "Failed to connect to OneDrive.",
|
||||
"wrong_drive": "New account does not contain previously configured OneDrive.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"failed_to_create_folder": "Failed to create backup folder"
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"backup_not_found": {
|
||||
"message": "Backup not found"
|
||||
},
|
||||
"backup_no_content": {
|
||||
"message": "Backup has no content"
|
||||
},
|
||||
"backup_no_upload_session": {
|
||||
"message": "Failed to start backup upload"
|
||||
},
|
||||
"authentication_failed": {
|
||||
"message": "Authentication failed"
|
||||
},
|
||||
"failed_to_get_folder": {
|
||||
"message": "Failed to get {folder} folder"
|
||||
},
|
||||
"failed_to_create_folder": {
|
||||
"message": "Failed to create {folder} folder"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -72,7 +72,7 @@ class OralBConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/private_ble_device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bluetooth-data-tools==1.22.0"]
|
||||
"requirements": ["bluetooth-data-tools==1.23.4"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/python_script",
|
||||
"loggers": ["RestrictedPython"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["RestrictedPython==7.4"]
|
||||
"requirements": ["RestrictedPython==8.0"]
|
||||
}
|
||||
|
||||
@@ -98,7 +98,7 @@ class QingpingConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.11.8"]
|
||||
"requirements": ["reolink-aio==0.11.9"]
|
||||
}
|
||||
|
||||
@@ -424,6 +424,7 @@ NUMBER_ENTITIES = (
|
||||
ReolinkNumberEntityDescription(
|
||||
key="image_brightness",
|
||||
cmd_key="GetImage",
|
||||
cmd_id=26,
|
||||
translation_key="image_brightness",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
@@ -437,6 +438,7 @@ NUMBER_ENTITIES = (
|
||||
ReolinkNumberEntityDescription(
|
||||
key="image_contrast",
|
||||
cmd_key="GetImage",
|
||||
cmd_id=26,
|
||||
translation_key="image_contrast",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
@@ -450,6 +452,7 @@ NUMBER_ENTITIES = (
|
||||
ReolinkNumberEntityDescription(
|
||||
key="image_saturation",
|
||||
cmd_key="GetImage",
|
||||
cmd_id=26,
|
||||
translation_key="image_saturation",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
@@ -463,6 +466,7 @@ NUMBER_ENTITIES = (
|
||||
ReolinkNumberEntityDescription(
|
||||
key="image_sharpness",
|
||||
cmd_key="GetImage",
|
||||
cmd_id=26,
|
||||
translation_key="image_sharpness",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
@@ -476,6 +480,7 @@ NUMBER_ENTITIES = (
|
||||
ReolinkNumberEntityDescription(
|
||||
key="image_hue",
|
||||
cmd_key="GetImage",
|
||||
cmd_id=26,
|
||||
translation_key="image_hue",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
|
||||
@@ -80,6 +80,7 @@ SELECT_ENTITIES = (
|
||||
ReolinkSelectEntityDescription(
|
||||
key="day_night_mode",
|
||||
cmd_key="GetIsp",
|
||||
cmd_id=26,
|
||||
translation_key="day_night_mode",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
get_options=[mode.name for mode in DayNightEnum],
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["roborock"],
|
||||
"requirements": [
|
||||
"python-roborock==2.9.7",
|
||||
"python-roborock==2.11.1",
|
||||
"vacuum-map-parser-roborock==0.1.2"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -105,7 +105,7 @@ class ScreenlogicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def async_step_gateway_select(self, user_input=None) -> ConfigFlowResult:
|
||||
"""Handle the selection of a discovered ScreenLogic gateway."""
|
||||
existing = self._async_current_ids()
|
||||
existing = self._async_current_ids(include_ignore=False)
|
||||
unconfigured_gateways = {
|
||||
mac: gateway[SL_GATEWAY_NAME]
|
||||
for mac, gateway in self.discovered_gateways.items()
|
||||
|
||||
@@ -72,7 +72,7 @@ class SensorPushConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -21,6 +21,7 @@ async def async_connect_scanner(
|
||||
hass: HomeAssistant,
|
||||
coordinator: ShellyRpcCoordinator,
|
||||
scanner_mode: BLEScannerMode,
|
||||
device_id: str,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Connect scanner."""
|
||||
device = coordinator.device
|
||||
@@ -34,6 +35,7 @@ async def async_connect_scanner(
|
||||
source_domain=entry.domain,
|
||||
source_model=coordinator.model,
|
||||
source_config_entry_id=entry.entry_id,
|
||||
source_device_id=device_id,
|
||||
),
|
||||
scanner.async_setup(),
|
||||
coordinator.async_subscribe_events(scanner.async_on_event),
|
||||
|
||||
@@ -704,8 +704,11 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]):
|
||||
# BLE enable required a reboot, don't bother connecting
|
||||
# the scanner since it will be disconnected anyway
|
||||
return
|
||||
assert self.device_id is not None
|
||||
self._disconnected_callbacks.append(
|
||||
await async_connect_scanner(self.hass, self, ble_scanner_mode)
|
||||
await async_connect_scanner(
|
||||
self.hass, self, ble_scanner_mode, self.device_id
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
|
||||
@@ -10,10 +10,11 @@ from pysmlight.const import Devices
|
||||
from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import SOURCE_USER, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -35,11 +36,9 @@ STEP_AUTH_DATA_SCHEMA = vol.Schema(
|
||||
class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for SMLIGHT Zigbee."""
|
||||
|
||||
host: str
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self.client: Api2
|
||||
_host: str
|
||||
_device_name: str
|
||||
client: Api2
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -48,11 +47,13 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self.host = user_input[CONF_HOST]
|
||||
self.client = Api2(self.host, session=async_get_clientsession(self.hass))
|
||||
self._host = user_input[CONF_HOST]
|
||||
self.client = Api2(self._host, session=async_get_clientsession(self.hass))
|
||||
|
||||
try:
|
||||
info = await self.client.get_info()
|
||||
self._host = str(info.device_ip)
|
||||
self._device_name = str(info.hostname)
|
||||
|
||||
if info.model not in Devices:
|
||||
return self.async_abort(reason="unsupported_device")
|
||||
@@ -96,15 +97,14 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a discovered Lan coordinator."""
|
||||
local_name = discovery_info.hostname[:-1]
|
||||
node_name = local_name.removesuffix(".local")
|
||||
mac: str | None = discovery_info.properties.get("mac")
|
||||
self._device_name = discovery_info.hostname.removesuffix(".local.")
|
||||
self._host = discovery_info.host
|
||||
|
||||
self.host = local_name
|
||||
self.context["title_placeholders"] = {CONF_NAME: node_name}
|
||||
self.client = Api2(self.host, session=async_get_clientsession(self.hass))
|
||||
self.context["title_placeholders"] = {CONF_NAME: self._device_name}
|
||||
self.client = Api2(self._host, session=async_get_clientsession(self.hass))
|
||||
|
||||
mac = discovery_info.properties.get("mac")
|
||||
# fallback for legacy firmware
|
||||
# fallback for legacy firmware older than v2.3.x
|
||||
if mac is None:
|
||||
try:
|
||||
info = await self.client.get_info()
|
||||
@@ -114,7 +114,7 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
mac = info.MAC
|
||||
|
||||
await self.async_set_unique_id(format_mac(mac))
|
||||
self._abort_if_unique_id_configured()
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: self._host})
|
||||
|
||||
return await self.async_step_confirm_discovery()
|
||||
|
||||
@@ -125,7 +125,6 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
user_input[CONF_HOST] = self.host
|
||||
try:
|
||||
info = await self.client.get_info()
|
||||
|
||||
@@ -145,7 +144,7 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="confirm_discovery",
|
||||
description_placeholders={"host": self.host},
|
||||
description_placeholders={"host": self._device_name},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -154,8 +153,8 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauth when API Authentication failed."""
|
||||
|
||||
self.host = entry_data[CONF_HOST]
|
||||
self.client = Api2(self.host, session=async_get_clientsession(self.hass))
|
||||
self._host = entry_data[CONF_HOST]
|
||||
self.client = Api2(self._host, session=async_get_clientsession(self.hass))
|
||||
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
@@ -185,6 +184,16 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_dhcp(
|
||||
self, discovery_info: DhcpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle DHCP discovery."""
|
||||
await self.async_set_unique_id(format_mac(discovery_info.macaddress))
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.ip})
|
||||
# This should never happen since we only listen to DHCP requests
|
||||
# for configured devices.
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
async def _async_check_auth_required(self, user_input: dict[str, Any]) -> bool:
|
||||
"""Check if auth required and attempt to authenticate."""
|
||||
if await self.client.check_auth_needed():
|
||||
@@ -199,12 +208,14 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
info = await self.client.get_info()
|
||||
await self.async_set_unique_id(format_mac(info.MAC))
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
if user_input.get(CONF_HOST) is None:
|
||||
user_input[CONF_HOST] = self.host
|
||||
await self.async_set_unique_id(
|
||||
format_mac(info.MAC), raise_on_progress=self.source != SOURCE_USER
|
||||
)
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: self._host})
|
||||
|
||||
user_input[CONF_HOST] = self._host
|
||||
|
||||
assert info.model is not None
|
||||
title = self.context.get("title_placeholders", {}).get(CONF_NAME) or info.model
|
||||
title = self._device_name or info.model
|
||||
return self.async_create_entry(title=title, data=user_input)
|
||||
|
||||
@@ -3,10 +3,15 @@
|
||||
"name": "SMLIGHT SLZB",
|
||||
"codeowners": ["@tl-sl"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
{
|
||||
"registered_devices": true
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/smlight",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["pysmlight==0.1.6"],
|
||||
"requirements": ["pysmlight==0.1.7"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_slzb-06._tcp.local."
|
||||
|
||||
@@ -272,7 +272,7 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@callback
|
||||
def _async_discover_devices(self) -> None:
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for connectable in (True, False):
|
||||
for discovery_info in async_discovered_service_info(self.hass, connectable):
|
||||
address = discovery_info.address
|
||||
|
||||
@@ -10,7 +10,12 @@ from aiohttp import StreamReader
|
||||
from synology_dsm.api.file_station import SynoFileStation
|
||||
from synology_dsm.exceptions import SynologyDSMAPIErrorException
|
||||
|
||||
from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
|
||||
from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
suggested_filename,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import ChunkAsyncStreamIterator
|
||||
@@ -28,6 +33,15 @@ from .models import SynologyDSMData
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
|
||||
"""Suggest filenames for the backup.
|
||||
|
||||
returns a tuple of tar_filename and meta_filename
|
||||
"""
|
||||
base_name = suggested_filename(backup).rsplit(".", 1)[0]
|
||||
return (f"{base_name}.tar", f"{base_name}_meta.json")
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
hass: HomeAssistant,
|
||||
) -> list[BackupAgent]:
|
||||
@@ -95,6 +109,19 @@ class SynologyDSMBackupAgent(BackupAgent):
|
||||
assert self.api.file_station
|
||||
return self.api.file_station
|
||||
|
||||
async def _async_suggested_filenames(
|
||||
self,
|
||||
backup_id: str,
|
||||
) -> tuple[str, str]:
|
||||
"""Suggest filenames for the backup.
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
:return: A tuple of tar_filename and meta_filename
|
||||
"""
|
||||
if (backup := await self.async_get_backup(backup_id)) is None:
|
||||
raise BackupAgentError("Backup not found")
|
||||
return suggested_filenames(backup)
|
||||
|
||||
async def async_download_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
@@ -105,10 +132,12 @@ class SynologyDSMBackupAgent(BackupAgent):
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
:return: An async iterator that yields bytes.
|
||||
"""
|
||||
(filename_tar, _) = await self._async_suggested_filenames(backup_id)
|
||||
|
||||
try:
|
||||
resp = await self._file_station.download_file(
|
||||
path=self.path,
|
||||
filename=f"{backup_id}.tar",
|
||||
filename=filename_tar,
|
||||
)
|
||||
except SynologyDSMAPIErrorException as err:
|
||||
raise BackupAgentError("Failed to download backup") from err
|
||||
@@ -131,11 +160,13 @@ class SynologyDSMBackupAgent(BackupAgent):
|
||||
:param backup: Metadata about the backup that should be uploaded.
|
||||
"""
|
||||
|
||||
(filename_tar, filename_meta) = suggested_filenames(backup)
|
||||
|
||||
# upload backup.tar file first
|
||||
try:
|
||||
await self._file_station.upload_file(
|
||||
path=self.path,
|
||||
filename=f"{backup.backup_id}.tar",
|
||||
filename=filename_tar,
|
||||
source=await open_stream(),
|
||||
create_parents=True,
|
||||
)
|
||||
@@ -146,7 +177,7 @@ class SynologyDSMBackupAgent(BackupAgent):
|
||||
try:
|
||||
await self._file_station.upload_file(
|
||||
path=self.path,
|
||||
filename=f"{backup.backup_id}_meta.json",
|
||||
filename=filename_meta,
|
||||
source=json_dumps(backup.as_dict()).encode(),
|
||||
)
|
||||
except SynologyDSMAPIErrorException as err:
|
||||
@@ -162,14 +193,27 @@ class SynologyDSMBackupAgent(BackupAgent):
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
"""
|
||||
try:
|
||||
await self._file_station.delete_file(
|
||||
path=self.path, filename=f"{backup_id}.tar"
|
||||
(filename_tar, filename_meta) = await self._async_suggested_filenames(
|
||||
backup_id
|
||||
)
|
||||
await self._file_station.delete_file(
|
||||
path=self.path, filename=f"{backup_id}_meta.json"
|
||||
)
|
||||
except SynologyDSMAPIErrorException as err:
|
||||
raise BackupAgentError("Failed to delete the backup") from err
|
||||
except BackupAgentError:
|
||||
# backup meta data could not be found, so we can't delete the backup
|
||||
return
|
||||
|
||||
for filename in (filename_tar, filename_meta):
|
||||
try:
|
||||
await self._file_station.delete_file(path=self.path, filename=filename)
|
||||
except SynologyDSMAPIErrorException as err:
|
||||
err_args: dict = err.args[0]
|
||||
if int(err_args.get("code", 0)) != 900 or (
|
||||
(err_details := err_args.get("details")) is not None
|
||||
and isinstance(err_details, list)
|
||||
and isinstance(err_details[0], dict)
|
||||
and int(err_details[0].get("code", 0))
|
||||
!= 408 # No such file or directory
|
||||
):
|
||||
LOGGER.error("Failed to delete backup: %s", err)
|
||||
raise BackupAgentError("Failed to delete backup") from err
|
||||
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable, Callable
|
||||
from collections.abc import AsyncGenerator, Awaitable, Callable
|
||||
import dataclasses
|
||||
from datetime import datetime
|
||||
import logging
|
||||
@@ -101,6 +101,57 @@ async def get_integration_info(
|
||||
return result
|
||||
|
||||
|
||||
async def _registered_domain_data(
|
||||
hass: HomeAssistant,
|
||||
) -> AsyncGenerator[tuple[str, dict[str, Any]]]:
|
||||
registrations: dict[str, SystemHealthRegistration] = hass.data[DOMAIN]
|
||||
for domain, domain_data in zip(
|
||||
registrations,
|
||||
await asyncio.gather(
|
||||
*(
|
||||
get_integration_info(hass, registration)
|
||||
for registration in registrations.values()
|
||||
)
|
||||
),
|
||||
strict=False,
|
||||
):
|
||||
yield domain, domain_data
|
||||
|
||||
|
||||
async def get_info(hass: HomeAssistant) -> dict[str, dict[str, str]]:
|
||||
"""Get the full set of system health information."""
|
||||
domains: dict[str, dict[str, Any]] = {}
|
||||
|
||||
async def _get_info_value(value: Any) -> Any:
|
||||
if not asyncio.iscoroutine(value):
|
||||
return value
|
||||
try:
|
||||
return await value
|
||||
except Exception as exception:
|
||||
_LOGGER.exception("Error fetching system info for %s - %s", domain, key)
|
||||
return f"Exception: {exception}"
|
||||
|
||||
async for domain, domain_data in _registered_domain_data(hass):
|
||||
domain_info: dict[str, Any] = {}
|
||||
for key, value in domain_data["info"].items():
|
||||
info_value = await _get_info_value(value)
|
||||
|
||||
if isinstance(info_value, datetime):
|
||||
domain_info[key] = info_value.isoformat()
|
||||
elif (
|
||||
isinstance(info_value, dict)
|
||||
and "type" in info_value
|
||||
and info_value["type"] == "failed"
|
||||
):
|
||||
domain_info[key] = f"Failed: {info_value.get('error', 'unknown')}"
|
||||
else:
|
||||
domain_info[key] = info_value
|
||||
|
||||
domains[domain] = domain_info
|
||||
|
||||
return domains
|
||||
|
||||
|
||||
@callback
|
||||
def _format_value(val: Any) -> Any:
|
||||
"""Format a system health value."""
|
||||
@@ -115,20 +166,10 @@ async def handle_info(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle an info request via a subscription."""
|
||||
registrations: dict[str, SystemHealthRegistration] = hass.data[DOMAIN]
|
||||
data = {}
|
||||
pending_info: dict[tuple[str, str], asyncio.Task] = {}
|
||||
|
||||
for domain, domain_data in zip(
|
||||
registrations,
|
||||
await asyncio.gather(
|
||||
*(
|
||||
get_integration_info(hass, registration)
|
||||
for registration in registrations.values()
|
||||
)
|
||||
),
|
||||
strict=False,
|
||||
):
|
||||
async for domain, domain_data in _registered_domain_data(hass):
|
||||
for key, value in domain_data["info"].items():
|
||||
if asyncio.iscoroutine(value):
|
||||
value = asyncio.create_task(value)
|
||||
|
||||
@@ -506,7 +506,7 @@ class TadoClimate(TadoZoneEntity, ClimateEntity):
|
||||
offset,
|
||||
)
|
||||
|
||||
self._tado.set_temperature_offset(self._device_id, offset)
|
||||
await self._tado.set_temperature_offset(self._device_id, offset)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
},
|
||||
"reconfigure": {
|
||||
"title": "Reconfigure your Tado",
|
||||
"description": "Reconfigure the entry, for your account: `{username}`.",
|
||||
"description": "Reconfigure the entry for your account: `{username}`.",
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
@@ -25,7 +25,7 @@
|
||||
},
|
||||
"error": {
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"no_homes": "There are no homes linked to this tado account.",
|
||||
"no_homes": "There are no homes linked to this Tado account.",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
}
|
||||
@@ -33,7 +33,7 @@
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"description": "Fallback mode lets you choose when to fallback to Smart Schedule from your manual zone overlay. (NEXT_TIME_BLOCK:= Change at next Smart Schedule change; MANUAL:= Dont change until you cancel; TADO_DEFAULT:= Change based on your setting in Tado App).",
|
||||
"description": "Fallback mode lets you choose when to fallback to Smart Schedule from your manual zone overlay. (NEXT_TIME_BLOCK:= Change at next Smart Schedule change; MANUAL:= Don't change until you cancel; TADO_DEFAULT:= Change based on your setting in the Tado app).",
|
||||
"data": {
|
||||
"fallback": "Choose fallback mode."
|
||||
},
|
||||
@@ -102,11 +102,11 @@
|
||||
},
|
||||
"time_period": {
|
||||
"name": "Time period",
|
||||
"description": "Choose this or Overlay. Set the time period for the change if you want to be specific. Alternatively use Overlay."
|
||||
"description": "Choose this or 'Overlay'. Set the time period for the change if you want to be specific."
|
||||
},
|
||||
"requested_overlay": {
|
||||
"name": "Overlay",
|
||||
"description": "Choose this or Time Period. Allows you to choose an overlay. MANUAL:=Overlay until user removes; NEXT_TIME_BLOCK:=Overlay until next timeblock; TADO_DEFAULT:=Overlay based on tado app setting."
|
||||
"description": "Choose this or 'Time period'. Allows you to choose an overlay. MANUAL:=Overlay until user removes; NEXT_TIME_BLOCK:=Overlay until next timeblock; TADO_DEFAULT:=Overlay based on Tado app setting."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -151,8 +151,8 @@
|
||||
},
|
||||
"issues": {
|
||||
"water_heater_fallback": {
|
||||
"title": "Tado Water Heater entities now support fallback options",
|
||||
"description": "Due to added support for water heaters entities, these entities may use different overlay. Please configure integration entity and tado app water heater zone overlay options. Otherwise, please configure the integration entity and Tado app water heater zone overlay options (under Settings -> Rooms & Devices -> Hot Water)."
|
||||
"title": "Tado water heater entities now support fallback options",
|
||||
"description": "Due to added support for water heaters entities, these entities may use a different overlay. Please configure the integration entity and Tado app water heater zone overlay options (under Settings -> Rooms & Devices -> Hot Water)."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/tesla_fleet",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tesla-fleet-api"],
|
||||
"requirements": ["tesla-fleet-api==0.9.2"]
|
||||
"requirements": ["tesla-fleet-api==0.9.8"]
|
||||
}
|
||||
|
||||
@@ -303,8 +303,8 @@ VEHICLE_TIME_DESCRIPTIONS: tuple[TeslaFleetTimeEntityDescription, ...] = (
|
||||
),
|
||||
)
|
||||
|
||||
ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
ENERGY_LIVE_DESCRIPTIONS: tuple[TeslaFleetSensorEntityDescription, ...] = (
|
||||
TeslaFleetSensorEntityDescription(
|
||||
key="solar_power",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
@@ -312,7 +312,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
|
||||
suggested_display_precision=2,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
TeslaFleetSensorEntityDescription(
|
||||
key="energy_left",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
@@ -321,7 +321,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.ENERGY_STORAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
TeslaFleetSensorEntityDescription(
|
||||
key="total_pack_energy",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
@@ -331,14 +331,15 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
TeslaFleetSensorEntityDescription(
|
||||
key="percentage_charged",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda value: value or 0,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
TeslaFleetSensorEntityDescription(
|
||||
key="battery_power",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
@@ -346,7 +347,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
|
||||
suggested_display_precision=2,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
TeslaFleetSensorEntityDescription(
|
||||
key="load_power",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
@@ -354,7 +355,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
|
||||
suggested_display_precision=2,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
TeslaFleetSensorEntityDescription(
|
||||
key="grid_power",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
@@ -362,7 +363,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
|
||||
suggested_display_precision=2,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
TeslaFleetSensorEntityDescription(
|
||||
key="grid_services_power",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
@@ -370,7 +371,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
|
||||
suggested_display_precision=2,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
TeslaFleetSensorEntityDescription(
|
||||
key="generator_power",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
@@ -379,7 +380,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
TeslaFleetSensorEntityDescription(
|
||||
key="island_status",
|
||||
options=[
|
||||
"island_status_unknown",
|
||||
@@ -550,12 +551,12 @@ class TeslaFleetVehicleTimeSensorEntity(TeslaFleetVehicleEntity, SensorEntity):
|
||||
class TeslaFleetEnergyLiveSensorEntity(TeslaFleetEnergyLiveEntity, SensorEntity):
|
||||
"""Base class for Tesla Fleet energy site metric sensors."""
|
||||
|
||||
entity_description: SensorEntityDescription
|
||||
entity_description: TeslaFleetSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data: TeslaFleetEnergyData,
|
||||
description: SensorEntityDescription,
|
||||
description: TeslaFleetSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.entity_description = description
|
||||
@@ -563,7 +564,7 @@ class TeslaFleetEnergyLiveSensorEntity(TeslaFleetEnergyLiveEntity, SensorEntity)
|
||||
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update the attributes of the sensor."""
|
||||
self._attr_native_value = self._value
|
||||
self._attr_native_value = self.entity_description.value_fn(self._value)
|
||||
|
||||
|
||||
class TeslaFleetEnergyHistorySensorEntity(TeslaFleetEnergyHistoryEntity, SensorEntity):
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/teslemetry",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tesla-fleet-api"],
|
||||
"requirements": ["tesla-fleet-api==0.9.2", "teslemetry-stream==0.6.6"]
|
||||
"requirements": ["tesla-fleet-api==0.9.8", "teslemetry-stream==0.6.6"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/tessie",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tessie", "tesla-fleet-api"],
|
||||
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.9.2"]
|
||||
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.9.8"]
|
||||
}
|
||||
|
||||
@@ -258,6 +258,7 @@ DESCRIPTIONS: tuple[TessieSensorEntityDescription, ...] = (
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
ENERGY_LIVE_DESCRIPTIONS: tuple[TessieSensorEntityDescription, ...] = (
|
||||
TessieSensorEntityDescription(
|
||||
key="solar_power",
|
||||
@@ -292,6 +293,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[TessieSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda value: value or 0,
|
||||
),
|
||||
TessieSensorEntityDescription(
|
||||
key="battery_power",
|
||||
|
||||
@@ -72,7 +72,7 @@ class ThermoProConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -72,7 +72,7 @@ class TiltConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -541,9 +541,8 @@ class TodoistProjectData:
|
||||
return None
|
||||
|
||||
# All task Labels (optional parameter).
|
||||
task[LABELS] = [
|
||||
label.name for label in self._labels if label.name in data.labels
|
||||
]
|
||||
labels = data.labels or []
|
||||
task[LABELS] = [label.name for label in self._labels if label.name in labels]
|
||||
if self._label_whitelist and (
|
||||
not any(label in task[LABELS] for label in self._label_whitelist)
|
||||
):
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/todoist",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["todoist"],
|
||||
"requirements": ["todoist-api-python==2.1.2"]
|
||||
"requirements": ["todoist-api-python==2.1.7"]
|
||||
}
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/tolo",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["tololib"],
|
||||
"requirements": ["tololib==1.1.0"]
|
||||
"requirements": ["tololib==1.2.2"]
|
||||
}
|
||||
|
||||
@@ -301,5 +301,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["kasa"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-kasa[speedups]==0.10.0"]
|
||||
"requirements": ["python-kasa[speedups]==0.10.1"]
|
||||
}
|
||||
|
||||
@@ -135,13 +135,17 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
|
||||
TPLinkSensorEntityDescription(
|
||||
key="clean_area",
|
||||
device_class=SensorDeviceClass.AREA,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
TPLinkSensorEntityDescription(
|
||||
entity_registry_enabled_default=False,
|
||||
key="clean_progress",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
TPLinkSensorEntityDescription(
|
||||
key="last_clean_time",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
suggested_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
|
||||
@@ -155,20 +159,26 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
),
|
||||
TPLinkSensorEntityDescription(
|
||||
entity_registry_enabled_default=False,
|
||||
key="total_clean_time",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
suggested_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
|
||||
),
|
||||
TPLinkSensorEntityDescription(
|
||||
entity_registry_enabled_default=False,
|
||||
key="total_clean_area",
|
||||
device_class=SensorDeviceClass.AREA,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
TPLinkSensorEntityDescription(
|
||||
key="total_clean_count",
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
TPLinkSensorEntityDescription(
|
||||
entity_registry_enabled_default=False,
|
||||
key="main_brush_remaining",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
@@ -176,6 +186,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
|
||||
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
|
||||
),
|
||||
TPLinkSensorEntityDescription(
|
||||
entity_registry_enabled_default=False,
|
||||
key="main_brush_used",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
@@ -183,6 +194,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
|
||||
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
|
||||
),
|
||||
TPLinkSensorEntityDescription(
|
||||
entity_registry_enabled_default=False,
|
||||
key="side_brush_remaining",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
@@ -190,6 +202,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
|
||||
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
|
||||
),
|
||||
TPLinkSensorEntityDescription(
|
||||
entity_registry_enabled_default=False,
|
||||
key="side_brush_used",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
@@ -197,6 +210,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
|
||||
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
|
||||
),
|
||||
TPLinkSensorEntityDescription(
|
||||
entity_registry_enabled_default=False,
|
||||
key="filter_remaining",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
@@ -204,6 +218,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
|
||||
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
|
||||
),
|
||||
TPLinkSensorEntityDescription(
|
||||
entity_registry_enabled_default=False,
|
||||
key="filter_used",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
@@ -211,6 +226,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
|
||||
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
|
||||
),
|
||||
TPLinkSensorEntityDescription(
|
||||
entity_registry_enabled_default=False,
|
||||
key="sensor_remaining",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
@@ -218,6 +234,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
|
||||
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
|
||||
),
|
||||
TPLinkSensorEntityDescription(
|
||||
entity_registry_enabled_default=False,
|
||||
key="sensor_used",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
@@ -225,6 +242,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
|
||||
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
|
||||
),
|
||||
TPLinkSensorEntityDescription(
|
||||
entity_registry_enabled_default=False,
|
||||
key="charging_contacts_remaining",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
@@ -232,6 +250,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
|
||||
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
|
||||
),
|
||||
TPLinkSensorEntityDescription(
|
||||
entity_registry_enabled_default=False,
|
||||
key="charging_contacts_used",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["uiprotect", "unifi_discovery"],
|
||||
"requirements": ["uiprotect==7.5.0", "unifi-discovery==1.2.0"],
|
||||
"requirements": ["uiprotect==7.5.1", "unifi-discovery==1.2.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Ubiquiti Networks",
|
||||
|
||||
@@ -49,7 +49,7 @@ link_deactivate:
|
||||
target:
|
||||
entity:
|
||||
integration: upb
|
||||
domain: light
|
||||
domain: scene
|
||||
|
||||
link_goto:
|
||||
target:
|
||||
|
||||
@@ -155,11 +155,15 @@ class VeSyncHumidifierHA(VeSyncBaseEntity, HumidifierEntity):
|
||||
"""Set the mode of the device."""
|
||||
if mode not in self.available_modes:
|
||||
raise HomeAssistantError(
|
||||
"{mode} is not one of the valid available modes: {self.available_modes}"
|
||||
f"{mode} is not one of the valid available modes: {self.available_modes}"
|
||||
)
|
||||
if not self.device.set_humidity_mode(self._get_vs_mode(mode)):
|
||||
raise HomeAssistantError(f"An error occurred while setting mode {mode}.")
|
||||
|
||||
if mode == MODE_SLEEP:
|
||||
# We successfully changed the mode. Consider it a success even if display operation fails.
|
||||
self.device.set_display(False)
|
||||
|
||||
# Changing mode while humidifier is off actually turns it on, as per the app. But
|
||||
# the library does not seem to update the device_status. It is also possible that
|
||||
# other attributes get updated. Scheduling a forced refresh to get device status.
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/vesync",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyvesync"],
|
||||
"requirements": ["pyvesync==2.1.16"]
|
||||
"requirements": ["pyvesync==2.1.17"]
|
||||
}
|
||||
|
||||
@@ -306,7 +306,7 @@ class XiaomiConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
return self._async_get_or_create_entry()
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -267,7 +267,7 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if discovery := self._discovery_info:
|
||||
self._discovered_devices[discovery.address] = discovery
|
||||
else:
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
current_unique_names = {
|
||||
entry.data.get(CONF_LOCAL_NAME)
|
||||
for entry in self._async_current_entries()
|
||||
|
||||
@@ -25,7 +25,7 @@ if TYPE_CHECKING:
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2025
|
||||
MINOR_VERSION: Final = 2
|
||||
PATCH_VERSION: Final = "0b4"
|
||||
PATCH_VERSION: Final = "0"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 0)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user