forked from home-assistant/core
Compare commits
110 Commits
2025.1.0b5
...
2025.1.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bceccd85ee | ||
|
|
0027d907a4 | ||
|
|
5d201406cb | ||
|
|
30924b561a | ||
|
|
1eddb4a21b | ||
|
|
42cdd25d90 | ||
|
|
b8b7daff5a | ||
|
|
7f3f550b7b | ||
|
|
3c14e2f0a8 | ||
|
|
9601455d9f | ||
|
|
902bd57b4b | ||
|
|
ab071d1c1b | ||
|
|
2c02eefa11 | ||
|
|
44808c02f9 | ||
|
|
d59a91a905 | ||
|
|
298f059488 | ||
|
|
7a5525951d | ||
|
|
9a9514d53b | ||
|
|
5337ab2e72 | ||
|
|
b815899fdc | ||
|
|
81a669c163 | ||
|
|
188def51c6 | ||
|
|
eb345971b4 | ||
|
|
9288dce7ed | ||
|
|
4867d3a187 | ||
|
|
c40771ba6a | ||
|
|
2fc489d17d | ||
|
|
279785b22e | ||
|
|
e5c986171b | ||
|
|
58805f721c | ||
|
|
29989e9034 | ||
|
|
fbd031a03d | ||
|
|
fe1ce39831 | ||
|
|
914c6459dc | ||
|
|
43ffdd0eef | ||
|
|
39d16ed5ce | ||
|
|
07f3d939e3 | ||
|
|
eda60073ee | ||
|
|
09ffa38ddf | ||
|
|
b32a791ea4 | ||
|
|
a4ea25631a | ||
|
|
bd8ea646a9 | ||
|
|
538a2ea057 | ||
|
|
b461bc2fb5 | ||
|
|
103960e0a7 | ||
|
|
1c4273ce91 | ||
|
|
0f0209d4bb | ||
|
|
27b8b8458b | ||
|
|
c022d91baa | ||
|
|
0daac09008 | ||
|
|
ca8416fe50 | ||
|
|
a14f6faaaf | ||
|
|
a9a14381d3 | ||
|
|
a4d0794fe4 | ||
|
|
9ead6fe362 | ||
|
|
017679abe1 | ||
|
|
0bd7b793fe | ||
|
|
c46a70fdcf | ||
|
|
8c2ec5e7c8 | ||
|
|
3063f0b565 | ||
|
|
aafc1ff074 | ||
|
|
45142b0cc0 | ||
|
|
a412acec0e | ||
|
|
ac4bd32137 | ||
|
|
7e1e63374f | ||
|
|
03fd6a901b | ||
|
|
46b2830699 | ||
|
|
b416ae1387 | ||
|
|
962b880146 | ||
|
|
9c98125d20 | ||
|
|
c9f1fee6bb | ||
|
|
9b8ed9643f | ||
|
|
7ea7178aa9 | ||
|
|
c5746291cc | ||
|
|
1af384bc0a | ||
|
|
ea82c1b73e | ||
|
|
96936f5f4a | ||
|
|
316f93f208 | ||
|
|
f719a14537 | ||
|
|
a830a14342 | ||
|
|
1b67d51e24 | ||
|
|
e1f6475623 | ||
|
|
59a3fe857b | ||
|
|
f364e29148 | ||
|
|
47190e4ac1 | ||
|
|
7fa1983da0 | ||
|
|
9b906e94c7 | ||
|
|
5ac4d5bef7 | ||
|
|
995e222959 | ||
|
|
61ac8e7e8c | ||
|
|
67ec71031d | ||
|
|
59f866bcf7 | ||
|
|
d75d970fc7 | ||
|
|
0a13516ddd | ||
|
|
21aca3c146 | ||
|
|
faf9c2ee40 | ||
|
|
e89a1da462 | ||
|
|
8ace126d9f | ||
|
|
ca6bae6b15 | ||
|
|
c9ba267fec | ||
|
|
0e79c17cb8 | ||
|
|
4cb413521d | ||
|
|
f97439eaab | ||
|
|
568b637dc5 | ||
|
|
3a8f71a64a | ||
|
|
fea3dfda94 | ||
|
|
554cdd1784 | ||
|
|
ce7a0650e4 | ||
|
|
5895aa4cde | ||
|
|
bd5477729a |
@@ -89,7 +89,7 @@ from .helpers import (
|
||||
)
|
||||
from .helpers.dispatcher import async_dispatcher_send_internal
|
||||
from .helpers.storage import get_internal_store_manager
|
||||
from .helpers.system_info import async_get_system_info, is_official_image
|
||||
from .helpers.system_info import async_get_system_info
|
||||
from .helpers.typing import ConfigType
|
||||
from .setup import (
|
||||
# _setup_started is marked as protected to make it clear
|
||||
@@ -106,6 +106,7 @@ from .util.async_ import create_eager_task
|
||||
from .util.hass_dict import HassKey
|
||||
from .util.logging import async_activate_log_queue_handler
|
||||
from .util.package import async_get_user_site, is_docker_env, is_virtual_env
|
||||
from .util.system_info import is_official_image
|
||||
|
||||
with contextlib.suppress(ImportError):
|
||||
# Ensure anyio backend is imported to avoid it being imported in the event loop
|
||||
|
||||
@@ -26,5 +26,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioacaia"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioacaia==0.1.11"]
|
||||
"requirements": ["aioacaia==0.1.13"]
|
||||
}
|
||||
|
||||
@@ -44,12 +44,12 @@
|
||||
}
|
||||
},
|
||||
"apps": {
|
||||
"title": "Configure Android Apps",
|
||||
"description": "Configure application id {app_id}",
|
||||
"title": "Configure Android apps",
|
||||
"description": "Configure application ID {app_id}",
|
||||
"data": {
|
||||
"app_name": "Application Name",
|
||||
"app_name": "Application name",
|
||||
"app_id": "Application ID",
|
||||
"app_icon": "Application Icon",
|
||||
"app_icon": "Application icon",
|
||||
"app_delete": "Check to delete this application"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,7 +98,6 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
VERSION = 1
|
||||
|
||||
scan_filter: str | None = None
|
||||
all_identifiers: set[str]
|
||||
atv: BaseConfig | None = None
|
||||
atv_identifiers: list[str] | None = None
|
||||
_host: str # host in zeroconf discovery info, should not be accessed by other flows
|
||||
@@ -118,6 +117,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a new AppleTVConfigFlow."""
|
||||
self.credentials: dict[int, str | None] = {} # Protocol -> credentials
|
||||
self.all_identifiers: set[str] = set()
|
||||
|
||||
@property
|
||||
def device_identifier(self) -> str | None:
|
||||
|
||||
@@ -120,6 +120,8 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
"""Wait for the client to be ready."""
|
||||
|
||||
if not self.data or Attribute.MAC_ADDRESS not in self.data:
|
||||
await self.client.read_mac_address()
|
||||
|
||||
data = await self.client.wait_for_response(
|
||||
FunctionalDomain.IDENTIFICATION, 2, WAIT_TIMEOUT
|
||||
)
|
||||
@@ -130,12 +132,9 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
|
||||
return False
|
||||
|
||||
if not self.data or Attribute.NAME not in self.data:
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.IDENTIFICATION, 4, WAIT_TIMEOUT
|
||||
)
|
||||
|
||||
if not self.data or Attribute.THERMOSTAT_MODES not in self.data:
|
||||
await self.client.read_thermostat_iaq_available()
|
||||
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.CONTROL, 7, WAIT_TIMEOUT
|
||||
)
|
||||
@@ -144,10 +143,16 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
not self.data
|
||||
or Attribute.INDOOR_TEMPERATURE_CONTROLLING_SENSOR_STATUS not in self.data
|
||||
):
|
||||
await self.client.read_sensors()
|
||||
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.SENSORS, 2, WAIT_TIMEOUT
|
||||
)
|
||||
|
||||
await self.client.read_thermostat_status()
|
||||
|
||||
await self.client.read_iaq_status()
|
||||
|
||||
await ready_callback(True)
|
||||
|
||||
return True
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyaprilaire"],
|
||||
"requirements": ["pyaprilaire==0.7.4"]
|
||||
"requirements": ["pyaprilaire==0.7.7"]
|
||||
}
|
||||
|
||||
@@ -31,8 +31,8 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"invalid_unique_id": "Impossible to determine a valid unique id for the device",
|
||||
"no_unique_id": "A device without a valid unique id is already configured. Configuration of multiple instance is not possible"
|
||||
"invalid_unique_id": "Impossible to determine a valid unique ID for the device",
|
||||
"no_unique_id": "A device without a valid unique ID is already configured. Configuration of multiple instances is not possible"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
@@ -42,7 +42,7 @@
|
||||
"consider_home": "Seconds to wait before considering a device away",
|
||||
"track_unknown": "Track unknown / unnamed devices",
|
||||
"interface": "The interface that you want statistics from (e.g. eth0, eth1 etc)",
|
||||
"dnsmasq": "The location in the router of the dnsmasq.leases files",
|
||||
"dnsmasq": "The location of the dnsmasq.leases file in the router",
|
||||
"require_ip": "Devices must have IP (for access point mode)"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,8 +21,10 @@ from .manager import (
|
||||
BackupManager,
|
||||
BackupPlatformProtocol,
|
||||
BackupReaderWriter,
|
||||
BackupReaderWriterError,
|
||||
CoreBackupReaderWriter,
|
||||
CreateBackupEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
NewBackup,
|
||||
WrittenBackup,
|
||||
@@ -39,8 +41,10 @@ __all__ = [
|
||||
"BackupAgentPlatformProtocol",
|
||||
"BackupPlatformProtocol",
|
||||
"BackupReaderWriter",
|
||||
"BackupReaderWriterError",
|
||||
"CreateBackupEvent",
|
||||
"Folder",
|
||||
"IncorrectPasswordError",
|
||||
"LocalBackupAgent",
|
||||
"NewBackup",
|
||||
"WrittenBackup",
|
||||
|
||||
@@ -7,6 +7,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass, field, replace
|
||||
from datetime import datetime, timedelta
|
||||
from enum import StrEnum
|
||||
import random
|
||||
from typing import TYPE_CHECKING, Self, TypedDict
|
||||
|
||||
from cronsim import CronSim
|
||||
@@ -17,7 +18,7 @@ from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import LOGGER
|
||||
from .models import Folder
|
||||
from .models import BackupManagerError, Folder
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .manager import BackupManager, ManagerBackup
|
||||
@@ -28,6 +29,10 @@ if TYPE_CHECKING:
|
||||
CRON_PATTERN_DAILY = "45 4 * * *"
|
||||
CRON_PATTERN_WEEKLY = "45 4 * * {}"
|
||||
|
||||
# Randomize the start time of the backup by up to 60 minutes to avoid
|
||||
# all backups running at the same time.
|
||||
BACKUP_START_TIME_JITTER = 60 * 60
|
||||
|
||||
|
||||
class StoredBackupConfig(TypedDict):
|
||||
"""Represent the stored backup config."""
|
||||
@@ -124,6 +129,7 @@ class BackupConfig:
|
||||
def load(self, stored_config: StoredBackupConfig) -> None:
|
||||
"""Load config."""
|
||||
self.data = BackupConfigData.from_dict(stored_config)
|
||||
self.data.retention.apply(self._manager)
|
||||
self.data.schedule.apply(self._manager)
|
||||
|
||||
async def update(
|
||||
@@ -160,8 +166,13 @@ class RetentionConfig:
|
||||
def apply(self, manager: BackupManager) -> None:
|
||||
"""Apply backup retention configuration."""
|
||||
if self.days is not None:
|
||||
LOGGER.debug(
|
||||
"Scheduling next automatic delete of backups older than %s in 1 day",
|
||||
self.days,
|
||||
)
|
||||
self._schedule_next(manager)
|
||||
else:
|
||||
LOGGER.debug("Unscheduling next automatic delete")
|
||||
self._unschedule_next(manager)
|
||||
|
||||
def to_dict(self) -> StoredRetentionConfig:
|
||||
@@ -318,11 +329,13 @@ class BackupSchedule:
|
||||
password=config_data.create_backup.password,
|
||||
with_automatic_settings=True,
|
||||
)
|
||||
except BackupManagerError as err:
|
||||
LOGGER.error("Error creating backup: %s", err)
|
||||
except Exception: # noqa: BLE001
|
||||
# another more specific exception will be added
|
||||
# and handled in the future
|
||||
LOGGER.exception("Unexpected error creating automatic backup")
|
||||
|
||||
next_time += timedelta(seconds=random.randint(0, BACKUP_START_TIME_JITTER))
|
||||
LOGGER.debug("Scheduling next automatic backup at %s", next_time)
|
||||
manager.remove_next_backup_event = async_track_point_in_time(
|
||||
manager.hass, _create_backup, next_time
|
||||
)
|
||||
|
||||
@@ -46,15 +46,11 @@ from .const import (
|
||||
EXCLUDE_FROM_BACKUP,
|
||||
LOGGER,
|
||||
)
|
||||
from .models import AgentBackup, Folder
|
||||
from .models import AgentBackup, BackupManagerError, Folder
|
||||
from .store import BackupStore
|
||||
from .util import make_backup_dir, read_backup, validate_password
|
||||
|
||||
|
||||
class IncorrectPasswordError(HomeAssistantError):
|
||||
"""Raised when the password is incorrect."""
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
class NewBackup:
|
||||
"""New backup class."""
|
||||
@@ -245,6 +241,14 @@ class BackupReaderWriter(abc.ABC):
|
||||
"""Restore a backup."""
|
||||
|
||||
|
||||
class BackupReaderWriterError(HomeAssistantError):
|
||||
"""Backup reader/writer error."""
|
||||
|
||||
|
||||
class IncorrectPasswordError(BackupReaderWriterError):
|
||||
"""Raised when the password is incorrect."""
|
||||
|
||||
|
||||
class BackupManager:
|
||||
"""Define the format that backup managers can have."""
|
||||
|
||||
@@ -373,7 +377,9 @@ class BackupManager:
|
||||
)
|
||||
for result in pre_backup_results:
|
||||
if isinstance(result, Exception):
|
||||
raise result
|
||||
raise BackupManagerError(
|
||||
f"Error during pre-backup: {result}"
|
||||
) from result
|
||||
|
||||
async def async_post_backup_actions(self) -> None:
|
||||
"""Perform post backup actions."""
|
||||
@@ -386,7 +392,9 @@ class BackupManager:
|
||||
)
|
||||
for result in post_backup_results:
|
||||
if isinstance(result, Exception):
|
||||
raise result
|
||||
raise BackupManagerError(
|
||||
f"Error during post-backup: {result}"
|
||||
) from result
|
||||
|
||||
async def load_platforms(self) -> None:
|
||||
"""Load backup platforms."""
|
||||
@@ -422,11 +430,22 @@ class BackupManager:
|
||||
return_exceptions=True,
|
||||
)
|
||||
for idx, result in enumerate(sync_backup_results):
|
||||
if isinstance(result, Exception):
|
||||
if isinstance(result, BackupReaderWriterError):
|
||||
# writer errors will affect all agents
|
||||
# no point in continuing
|
||||
raise BackupManagerError(str(result)) from result
|
||||
if isinstance(result, BackupAgentError):
|
||||
LOGGER.error("Error uploading to %s: %s", agent_ids[idx], result)
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
LOGGER.exception(
|
||||
"Error during backup upload - %s", result, exc_info=result
|
||||
)
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
# trap bugs from agents
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
LOGGER.error("Unexpected error: %s", result, exc_info=result)
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
|
||||
return agent_errors
|
||||
|
||||
async def async_get_backups(
|
||||
@@ -449,7 +468,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
for agent_backup in result:
|
||||
if (backup_id := agent_backup.backup_id) not in backups:
|
||||
if known_backup := self.known_backups.get(backup_id):
|
||||
@@ -499,7 +518,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
if not result:
|
||||
continue
|
||||
if backup is None:
|
||||
@@ -563,7 +582,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
|
||||
if not agent_errors:
|
||||
self.known_backups.remove(backup_id)
|
||||
@@ -578,7 +597,7 @@ class BackupManager:
|
||||
) -> None:
|
||||
"""Receive and store a backup file from upload."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
self.async_on_backup_event(
|
||||
ReceiveBackupEvent(stage=None, state=ReceiveBackupState.IN_PROGRESS)
|
||||
)
|
||||
@@ -652,6 +671,7 @@ class BackupManager:
|
||||
include_homeassistant=include_homeassistant,
|
||||
name=name,
|
||||
password=password,
|
||||
raise_task_error=True,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
assert self._backup_finish_task
|
||||
@@ -669,11 +689,12 @@ class BackupManager:
|
||||
include_homeassistant: bool,
|
||||
name: str | None,
|
||||
password: str | None,
|
||||
raise_task_error: bool = False,
|
||||
with_automatic_settings: bool = False,
|
||||
) -> NewBackup:
|
||||
"""Initiate generating a backup."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
|
||||
if with_automatic_settings:
|
||||
self.config.data.last_attempted_automatic_backup = dt_util.now()
|
||||
@@ -692,6 +713,7 @@ class BackupManager:
|
||||
include_homeassistant=include_homeassistant,
|
||||
name=name,
|
||||
password=password,
|
||||
raise_task_error=raise_task_error,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
except Exception:
|
||||
@@ -714,57 +736,81 @@ class BackupManager:
|
||||
include_homeassistant: bool,
|
||||
name: str | None,
|
||||
password: str | None,
|
||||
raise_task_error: bool,
|
||||
with_automatic_settings: bool,
|
||||
) -> NewBackup:
|
||||
"""Initiate generating a backup."""
|
||||
if not agent_ids:
|
||||
raise HomeAssistantError("At least one agent must be selected")
|
||||
if any(agent_id not in self.backup_agents for agent_id in agent_ids):
|
||||
raise HomeAssistantError("Invalid agent selected")
|
||||
raise BackupManagerError("At least one agent must be selected")
|
||||
if invalid_agents := [
|
||||
agent_id for agent_id in agent_ids if agent_id not in self.backup_agents
|
||||
]:
|
||||
raise BackupManagerError(f"Invalid agents selected: {invalid_agents}")
|
||||
if include_all_addons and include_addons:
|
||||
raise HomeAssistantError(
|
||||
raise BackupManagerError(
|
||||
"Cannot include all addons and specify specific addons"
|
||||
)
|
||||
|
||||
backup_name = (
|
||||
name
|
||||
or f"{"Automatic" if with_automatic_settings else "Custom"} {HAVERSION}"
|
||||
or f"{"Automatic" if with_automatic_settings else "Custom"} backup {HAVERSION}"
|
||||
)
|
||||
new_backup, self._backup_task = await self._reader_writer.async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
backup_name=backup_name,
|
||||
extra_metadata={
|
||||
"instance_id": await instance_id.async_get(self.hass),
|
||||
"with_automatic_settings": with_automatic_settings,
|
||||
},
|
||||
include_addons=include_addons,
|
||||
include_all_addons=include_all_addons,
|
||||
include_database=include_database,
|
||||
include_folders=include_folders,
|
||||
include_homeassistant=include_homeassistant,
|
||||
on_progress=self.async_on_backup_event,
|
||||
password=password,
|
||||
)
|
||||
self._backup_finish_task = self.hass.async_create_task(
|
||||
|
||||
try:
|
||||
(
|
||||
new_backup,
|
||||
self._backup_task,
|
||||
) = await self._reader_writer.async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
backup_name=backup_name,
|
||||
extra_metadata={
|
||||
"instance_id": await instance_id.async_get(self.hass),
|
||||
"with_automatic_settings": with_automatic_settings,
|
||||
},
|
||||
include_addons=include_addons,
|
||||
include_all_addons=include_all_addons,
|
||||
include_database=include_database,
|
||||
include_folders=include_folders,
|
||||
include_homeassistant=include_homeassistant,
|
||||
on_progress=self.async_on_backup_event,
|
||||
password=password,
|
||||
)
|
||||
except BackupReaderWriterError as err:
|
||||
raise BackupManagerError(str(err)) from err
|
||||
|
||||
backup_finish_task = self._backup_finish_task = self.hass.async_create_task(
|
||||
self._async_finish_backup(agent_ids, with_automatic_settings),
|
||||
name="backup_manager_finish_backup",
|
||||
)
|
||||
if not raise_task_error:
|
||||
|
||||
def log_finish_task_error(task: asyncio.Task[None]) -> None:
|
||||
if task.done() and not task.cancelled() and (err := task.exception()):
|
||||
if isinstance(err, BackupManagerError):
|
||||
LOGGER.error("Error creating backup: %s", err)
|
||||
else:
|
||||
LOGGER.error("Unexpected error: %s", err, exc_info=err)
|
||||
|
||||
backup_finish_task.add_done_callback(log_finish_task_error)
|
||||
|
||||
return new_backup
|
||||
|
||||
async def _async_finish_backup(
|
||||
self, agent_ids: list[str], with_automatic_settings: bool
|
||||
) -> None:
|
||||
"""Finish a backup."""
|
||||
if TYPE_CHECKING:
|
||||
assert self._backup_task is not None
|
||||
backup_success = False
|
||||
try:
|
||||
written_backup = await self._backup_task
|
||||
except Exception as err: # noqa: BLE001
|
||||
LOGGER.debug("Generating backup failed", exc_info=err)
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.FAILED)
|
||||
)
|
||||
except Exception as err:
|
||||
if with_automatic_settings:
|
||||
self._update_issue_backup_failed()
|
||||
|
||||
if isinstance(err, BackupReaderWriterError):
|
||||
raise BackupManagerError(str(err)) from err
|
||||
raise # unexpected error
|
||||
else:
|
||||
LOGGER.debug(
|
||||
"Generated new backup with backup_id %s, uploading to agents %s",
|
||||
@@ -777,28 +823,40 @@ class BackupManager:
|
||||
state=CreateBackupState.IN_PROGRESS,
|
||||
)
|
||||
)
|
||||
agent_errors = await self._async_upload_backup(
|
||||
backup=written_backup.backup,
|
||||
agent_ids=agent_ids,
|
||||
open_stream=written_backup.open_stream,
|
||||
)
|
||||
await written_backup.release_stream()
|
||||
if with_automatic_settings:
|
||||
# create backup was successful, update last_completed_automatic_backup
|
||||
self.config.data.last_completed_automatic_backup = dt_util.now()
|
||||
self.store.save()
|
||||
self._update_issue_after_agent_upload(agent_errors)
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
|
||||
try:
|
||||
agent_errors = await self._async_upload_backup(
|
||||
backup=written_backup.backup,
|
||||
agent_ids=agent_ids,
|
||||
open_stream=written_backup.open_stream,
|
||||
)
|
||||
finally:
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
if not agent_errors:
|
||||
if with_automatic_settings:
|
||||
# create backup was successful, update last_completed_automatic_backup
|
||||
self.config.data.last_completed_automatic_backup = dt_util.now()
|
||||
self.store.save()
|
||||
backup_success = True
|
||||
|
||||
if with_automatic_settings:
|
||||
self._update_issue_after_agent_upload(agent_errors)
|
||||
# delete old backups more numerous than copies
|
||||
# try this regardless of agent errors above
|
||||
await delete_backups_exceeding_configured_count(self)
|
||||
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED)
|
||||
)
|
||||
finally:
|
||||
self._backup_task = None
|
||||
self._backup_finish_task = None
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(
|
||||
stage=None,
|
||||
state=CreateBackupState.COMPLETED
|
||||
if backup_success
|
||||
else CreateBackupState.FAILED,
|
||||
)
|
||||
)
|
||||
self.async_on_backup_event(IdleEvent())
|
||||
|
||||
async def async_restore_backup(
|
||||
@@ -814,7 +872,7 @@ class BackupManager:
|
||||
) -> None:
|
||||
"""Initiate restoring a backup."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.IN_PROGRESS)
|
||||
@@ -854,7 +912,7 @@ class BackupManager:
|
||||
"""Initiate restoring a backup."""
|
||||
agent = self.backup_agents[agent_id]
|
||||
if not await agent.async_get_backup(backup_id):
|
||||
raise HomeAssistantError(
|
||||
raise BackupManagerError(
|
||||
f"Backup {backup_id} not found in agent {agent_id}"
|
||||
)
|
||||
|
||||
@@ -1027,11 +1085,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
backup_id = _generate_backup_id(date_str, backup_name)
|
||||
|
||||
if include_addons or include_all_addons or include_folders:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Addons and folders are not supported by core backup"
|
||||
)
|
||||
if not include_homeassistant:
|
||||
raise HomeAssistantError("Home Assistant must be included in backup")
|
||||
raise BackupReaderWriterError("Home Assistant must be included in backup")
|
||||
|
||||
backup_task = self._hass.async_create_task(
|
||||
self._async_create_backup(
|
||||
@@ -1102,6 +1160,13 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
password,
|
||||
local_agent_tar_file_path,
|
||||
)
|
||||
except (BackupManagerError, OSError, tarfile.TarError, ValueError) as err:
|
||||
# BackupManagerError from async_pre_backup_actions
|
||||
# OSError from file operations
|
||||
# TarError from tarfile
|
||||
# ValueError from json_bytes
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
else:
|
||||
backup = AgentBackup(
|
||||
addons=[],
|
||||
backup_id=backup_id,
|
||||
@@ -1119,12 +1184,15 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
async_add_executor_job = self._hass.async_add_executor_job
|
||||
|
||||
async def send_backup() -> AsyncIterator[bytes]:
|
||||
f = await async_add_executor_job(tar_file_path.open, "rb")
|
||||
try:
|
||||
while chunk := await async_add_executor_job(f.read, 2**20):
|
||||
yield chunk
|
||||
finally:
|
||||
await async_add_executor_job(f.close)
|
||||
f = await async_add_executor_job(tar_file_path.open, "rb")
|
||||
try:
|
||||
while chunk := await async_add_executor_job(f.read, 2**20):
|
||||
yield chunk
|
||||
finally:
|
||||
await async_add_executor_job(f.close)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
async def open_backup() -> AsyncIterator[bytes]:
|
||||
return send_backup()
|
||||
@@ -1132,14 +1200,20 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
async def remove_backup() -> None:
|
||||
if local_agent_tar_file_path:
|
||||
return
|
||||
await async_add_executor_job(tar_file_path.unlink, True)
|
||||
try:
|
||||
await async_add_executor_job(tar_file_path.unlink, True)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
return WrittenBackup(
|
||||
backup=backup, open_stream=open_backup, release_stream=remove_backup
|
||||
)
|
||||
finally:
|
||||
# Inform integrations the backup is done
|
||||
await manager.async_post_backup_actions()
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
def _mkdir_and_generate_backup_contents(
|
||||
self,
|
||||
@@ -1209,6 +1283,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
if self._local_agent_id in agent_ids:
|
||||
local_agent = manager.local_backup_agents[self._local_agent_id]
|
||||
tar_file_path = local_agent.get_backup_path(backup.backup_id)
|
||||
await async_add_executor_job(make_backup_dir, tar_file_path.parent)
|
||||
await async_add_executor_job(shutil.move, temp_file, tar_file_path)
|
||||
else:
|
||||
tar_file_path = temp_file
|
||||
@@ -1252,11 +1327,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
"""
|
||||
|
||||
if restore_addons or restore_folders:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Addons and folders are not supported in core restore"
|
||||
)
|
||||
if not restore_homeassistant and not restore_database:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Home Assistant or database must be included in restore"
|
||||
)
|
||||
|
||||
@@ -1301,7 +1376,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
)
|
||||
|
||||
await self._hass.async_add_executor_job(_write_restore_file)
|
||||
await self._hass.services.async_call("homeassistant", "restart", {})
|
||||
await self._hass.services.async_call("homeassistant", "restart", blocking=True)
|
||||
|
||||
|
||||
def _generate_backup_id(date: str, name: str) -> str:
|
||||
|
||||
@@ -6,6 +6,8 @@ from dataclasses import asdict, dataclass
|
||||
from enum import StrEnum
|
||||
from typing import Any, Self
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AddonInfo:
|
||||
@@ -67,3 +69,7 @@ class AgentBackup:
|
||||
protected=data["protected"],
|
||||
size=data["size"],
|
||||
)
|
||||
|
||||
|
||||
class BackupManagerError(HomeAssistantError):
|
||||
"""Backup manager error."""
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
"description": "The automatic backup could not be created. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_upload_agents": {
|
||||
"title": "Automatic backup could not be uploaded to agents",
|
||||
"description": "The automatic backup could not be uploaded to agents {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
"title": "Automatic backup could not be uploaded to the configured locations",
|
||||
"description": "The automatic backup could not be uploaded to the configured locations {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -20,6 +20,6 @@
|
||||
"bluetooth-auto-recovery==1.4.2",
|
||||
"bluetooth-data-tools==1.20.0",
|
||||
"dbus-fast==2.24.3",
|
||||
"habluetooth==3.6.0"
|
||||
"habluetooth==3.7.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bring",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["bring_api"],
|
||||
"requirements": ["bring-api==0.9.1"]
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
}
|
||||
},
|
||||
"discovery_confirm": {
|
||||
"description": "Do you want to setup {name}?"
|
||||
"description": "Do you want to set up {name}?"
|
||||
},
|
||||
"reconfigure": {
|
||||
"description": "Reconfigure your Cambridge Audio Streamer.",
|
||||
@@ -28,7 +28,7 @@
|
||||
"cannot_connect": "Failed to connect to Cambridge Audio device. Please make sure the device is powered up and connected to the network. Try power-cycling the device if it does not connect."
|
||||
},
|
||||
"abort": {
|
||||
"wrong_device": "This Cambridge Audio device does not match the existing device id. Please make sure you entered the correct IP address.",
|
||||
"wrong_device": "This Cambridge Audio device does not match the existing device ID. Please make sure you entered the correct IP address.",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
|
||||
@@ -516,6 +516,19 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Flag supported features."""
|
||||
return self._attr_supported_features
|
||||
|
||||
@property
|
||||
def supported_features_compat(self) -> CameraEntityFeature:
|
||||
"""Return the supported features as CameraEntityFeature.
|
||||
|
||||
Remove this compatibility shim in 2025.1 or later.
|
||||
"""
|
||||
features = self.supported_features
|
||||
if type(features) is int: # noqa: E721
|
||||
new_features = CameraEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
@cached_property
|
||||
def is_recording(self) -> bool:
|
||||
"""Return true if the device is recording."""
|
||||
@@ -569,7 +582,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
|
||||
self._deprecate_attr_frontend_stream_type_logged = True
|
||||
return self._attr_frontend_stream_type
|
||||
if CameraEntityFeature.STREAM not in self.supported_features:
|
||||
if CameraEntityFeature.STREAM not in self.supported_features_compat:
|
||||
return None
|
||||
if (
|
||||
self._webrtc_provider
|
||||
@@ -798,7 +811,9 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
self.__supports_stream = self.supported_features & CameraEntityFeature.STREAM
|
||||
self.__supports_stream = (
|
||||
self.supported_features_compat & CameraEntityFeature.STREAM
|
||||
)
|
||||
await self.async_refresh_providers(write_state=False)
|
||||
|
||||
async def async_refresh_providers(self, *, write_state: bool = True) -> None:
|
||||
@@ -838,7 +853,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
self, fn: Callable[[HomeAssistant, Camera], Coroutine[None, None, _T | None]]
|
||||
) -> _T | None:
|
||||
"""Get first provider that supports this camera."""
|
||||
if CameraEntityFeature.STREAM not in self.supported_features:
|
||||
if CameraEntityFeature.STREAM not in self.supported_features_compat:
|
||||
return None
|
||||
|
||||
return await fn(self.hass, self)
|
||||
@@ -896,7 +911,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def camera_capabilities(self) -> CameraCapabilities:
|
||||
"""Return the camera capabilities."""
|
||||
frontend_stream_types = set()
|
||||
if CameraEntityFeature.STREAM in self.supported_features:
|
||||
if CameraEntityFeature.STREAM in self.supported_features_compat:
|
||||
if self._supports_native_sync_webrtc or self._supports_native_async_webrtc:
|
||||
# The camera has a native WebRTC implementation
|
||||
frontend_stream_types.add(StreamType.WEB_RTC)
|
||||
@@ -916,7 +931,8 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""
|
||||
super().async_write_ha_state()
|
||||
if self.__supports_stream != (
|
||||
supports_stream := self.supported_features & CameraEntityFeature.STREAM
|
||||
supports_stream := self.supported_features_compat
|
||||
& CameraEntityFeature.STREAM
|
||||
):
|
||||
self.__supports_stream = supports_stream
|
||||
self._invalidate_camera_capabilities_cache()
|
||||
|
||||
@@ -2,9 +2,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import base64
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
|
||||
import hashlib
|
||||
import logging
|
||||
import random
|
||||
from typing import Any, Self
|
||||
|
||||
from aiohttp import ClientError, ClientTimeout, StreamReader
|
||||
@@ -23,7 +26,11 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from .client import CloudClient
|
||||
from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_STORAGE_BACKUP = "backup"
|
||||
_RETRY_LIMIT = 5
|
||||
_RETRY_SECONDS_MIN = 60
|
||||
_RETRY_SECONDS_MAX = 600
|
||||
|
||||
|
||||
async def _b64md5(stream: AsyncIterator[bytes]) -> str:
|
||||
@@ -136,13 +143,55 @@ class CloudBackupAgent(BackupAgent):
|
||||
raise BackupAgentError("Failed to get download details") from err
|
||||
|
||||
try:
|
||||
resp = await self._cloud.websession.get(details["url"])
|
||||
resp = await self._cloud.websession.get(
|
||||
details["url"],
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
|
||||
resp.raise_for_status()
|
||||
except ClientError as err:
|
||||
raise BackupAgentError("Failed to download backup") from err
|
||||
|
||||
return ChunkAsyncStreamIterator(resp.content)
|
||||
|
||||
async def _async_do_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
filename: str,
|
||||
base64md5hash: str,
|
||||
metadata: dict[str, Any],
|
||||
size: int,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
try:
|
||||
details = await async_files_upload_details(
|
||||
self._cloud,
|
||||
storage_type=_STORAGE_BACKUP,
|
||||
filename=filename,
|
||||
metadata=metadata,
|
||||
size=size,
|
||||
base64md5hash=base64md5hash,
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to get upload details") from err
|
||||
|
||||
try:
|
||||
upload_status = await self._cloud.websession.put(
|
||||
details["url"],
|
||||
data=await open_stream(),
|
||||
headers=details["headers"] | {"content-length": str(size)},
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
_LOGGER.log(
|
||||
logging.DEBUG if upload_status.status < 400 else logging.WARNING,
|
||||
"Backup upload status: %s",
|
||||
upload_status.status,
|
||||
)
|
||||
upload_status.raise_for_status()
|
||||
except (TimeoutError, ClientError) as err:
|
||||
raise BackupAgentError("Failed to upload backup") from err
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
@@ -159,29 +208,34 @@ class CloudBackupAgent(BackupAgent):
|
||||
raise BackupAgentError("Cloud backups must be protected")
|
||||
|
||||
base64md5hash = await _b64md5(await open_stream())
|
||||
filename = self._get_backup_filename()
|
||||
metadata = backup.as_dict()
|
||||
size = backup.size
|
||||
|
||||
try:
|
||||
details = await async_files_upload_details(
|
||||
self._cloud,
|
||||
storage_type=_STORAGE_BACKUP,
|
||||
filename=self._get_backup_filename(),
|
||||
metadata=backup.as_dict(),
|
||||
size=backup.size,
|
||||
base64md5hash=base64md5hash,
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to get upload details") from err
|
||||
|
||||
try:
|
||||
upload_status = await self._cloud.websession.put(
|
||||
details["url"],
|
||||
data=await open_stream(),
|
||||
headers=details["headers"] | {"content-length": str(backup.size)},
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
upload_status.raise_for_status()
|
||||
except (TimeoutError, ClientError) as err:
|
||||
raise BackupAgentError("Failed to upload backup") from err
|
||||
tries = 1
|
||||
while tries <= _RETRY_LIMIT:
|
||||
try:
|
||||
await self._async_do_upload_backup(
|
||||
open_stream=open_stream,
|
||||
filename=filename,
|
||||
base64md5hash=base64md5hash,
|
||||
metadata=metadata,
|
||||
size=size,
|
||||
)
|
||||
break
|
||||
except BackupAgentError as err:
|
||||
if tries == _RETRY_LIMIT:
|
||||
raise
|
||||
tries += 1
|
||||
retry_timer = random.randint(_RETRY_SECONDS_MIN, _RETRY_SECONDS_MAX)
|
||||
_LOGGER.info(
|
||||
"Failed to upload backup, retrying (%s/%s) in %ss: %s",
|
||||
tries,
|
||||
_RETRY_LIMIT,
|
||||
retry_timer,
|
||||
err,
|
||||
)
|
||||
await asyncio.sleep(retry_timer)
|
||||
|
||||
async def async_delete_backup(
|
||||
self,
|
||||
@@ -208,6 +262,7 @@ class CloudBackupAgent(BackupAgent):
|
||||
"""List backups."""
|
||||
try:
|
||||
backups = await async_files_list(self._cloud, storage_type=_STORAGE_BACKUP)
|
||||
_LOGGER.debug("Cloud backups: %s", backups)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to list backups") from err
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.1.0", "home-assistant-intents==2024.12.20"]
|
||||
"requirements": ["hassil==2.1.0", "home-assistant-intents==2025.1.1"]
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from cookidoo_api import Cookidoo, CookidooConfig, CookidooLocalizationConfig
|
||||
from cookidoo_api import Cookidoo, CookidooConfig, get_localization_options
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_COUNTRY,
|
||||
@@ -22,15 +22,17 @@ PLATFORMS: list[Platform] = [Platform.TODO]
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool:
|
||||
"""Set up Cookidoo from a config entry."""
|
||||
|
||||
localizations = await get_localization_options(
|
||||
country=entry.data[CONF_COUNTRY].lower(),
|
||||
language=entry.data[CONF_LANGUAGE],
|
||||
)
|
||||
|
||||
cookidoo = Cookidoo(
|
||||
async_get_clientsession(hass),
|
||||
CookidooConfig(
|
||||
email=entry.data[CONF_EMAIL],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
localization=CookidooLocalizationConfig(
|
||||
country_code=entry.data[CONF_COUNTRY].lower(),
|
||||
language=entry.data[CONF_LANGUAGE],
|
||||
),
|
||||
localization=localizations[0],
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ from cookidoo_api import (
|
||||
Cookidoo,
|
||||
CookidooAuthException,
|
||||
CookidooConfig,
|
||||
CookidooLocalizationConfig,
|
||||
CookidooRequestException,
|
||||
get_country_options,
|
||||
get_localization_options,
|
||||
@@ -219,18 +218,19 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
data_input[CONF_LANGUAGE] = (
|
||||
await get_localization_options(country=data_input[CONF_COUNTRY].lower())
|
||||
)[0] # Pick any language to test login
|
||||
)[0].language # Pick any language to test login
|
||||
|
||||
localizations = await get_localization_options(
|
||||
country=data_input[CONF_COUNTRY].lower(),
|
||||
language=data_input[CONF_LANGUAGE],
|
||||
)
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
cookidoo = Cookidoo(
|
||||
session,
|
||||
async_get_clientsession(self.hass),
|
||||
CookidooConfig(
|
||||
email=data_input[CONF_EMAIL],
|
||||
password=data_input[CONF_PASSWORD],
|
||||
localization=CookidooLocalizationConfig(
|
||||
country_code=data_input[CONF_COUNTRY].lower(),
|
||||
language=data_input[CONF_LANGUAGE],
|
||||
),
|
||||
localization=localizations[0],
|
||||
),
|
||||
)
|
||||
try:
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/cookidoo",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["cookidoo_api"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["cookidoo-api==0.10.0"]
|
||||
"requirements": ["cookidoo-api==0.12.2"]
|
||||
}
|
||||
|
||||
@@ -300,6 +300,10 @@ class CoverEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def supported_features(self) -> CoverEntityFeature:
|
||||
"""Flag supported features."""
|
||||
if (features := self._attr_supported_features) is not None:
|
||||
if type(features) is int: # noqa: E721
|
||||
new_features = CoverEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
supported_features = (
|
||||
|
||||
@@ -266,7 +266,7 @@ class DeconzBaseLight[_LightDeviceT: Group | Light](
|
||||
@property
|
||||
def color_temp_kelvin(self) -> int | None:
|
||||
"""Return the CT color value."""
|
||||
if self._device.color_temp is None:
|
||||
if self._device.color_temp is None or self._device.color_temp == 0:
|
||||
return None
|
||||
return color_temperature_mired_to_kelvin(self._device.color_temp)
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==10.0.1"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==10.1.0"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["openwebif"],
|
||||
"requirements": ["openwebifpy==4.3.0"]
|
||||
"requirements": ["openwebifpy==4.3.1"]
|
||||
}
|
||||
|
||||
@@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==1.1.0"]
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.0.0"]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ from typing import TYPE_CHECKING
|
||||
|
||||
from aioesphomeapi import APIClient, DeviceInfo
|
||||
from bleak_esphome import connect_scanner
|
||||
from bleak_esphome.backend.cache import ESPHomeBluetoothCache
|
||||
|
||||
from homeassistant.components.bluetooth import async_register_scanner
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback as hass_callback
|
||||
@@ -28,10 +27,9 @@ def async_connect_scanner(
|
||||
entry_data: RuntimeEntryData,
|
||||
cli: APIClient,
|
||||
device_info: DeviceInfo,
|
||||
cache: ESPHomeBluetoothCache,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Connect scanner."""
|
||||
client_data = connect_scanner(cli, device_info, cache, entry_data.available)
|
||||
client_data = connect_scanner(cli, device_info, entry_data.available)
|
||||
entry_data.bluetooth_device = client_data.bluetooth_device
|
||||
client_data.disconnect_callbacks = entry_data.disconnect_callbacks
|
||||
scanner = client_data.scanner
|
||||
|
||||
@@ -6,8 +6,6 @@ from dataclasses import dataclass, field
|
||||
from functools import cache
|
||||
from typing import Self
|
||||
|
||||
from bleak_esphome.backend.cache import ESPHomeBluetoothCache
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.json import JSONEncoder
|
||||
|
||||
@@ -22,9 +20,6 @@ class DomainData:
|
||||
"""Define a class that stores global esphome data in hass.data[DOMAIN]."""
|
||||
|
||||
_stores: dict[str, ESPHomeStorage] = field(default_factory=dict)
|
||||
bluetooth_cache: ESPHomeBluetoothCache = field(
|
||||
default_factory=ESPHomeBluetoothCache
|
||||
)
|
||||
|
||||
def get_entry_data(self, entry: ESPHomeConfigEntry) -> RuntimeEntryData:
|
||||
"""Return the runtime entry data associated with this config entry.
|
||||
|
||||
@@ -423,9 +423,7 @@ class ESPHomeManager:
|
||||
|
||||
if device_info.bluetooth_proxy_feature_flags_compat(api_version):
|
||||
entry_data.disconnect_callbacks.add(
|
||||
async_connect_scanner(
|
||||
hass, entry_data, cli, device_info, self.domain_data.bluetooth_cache
|
||||
)
|
||||
async_connect_scanner(hass, entry_data, cli, device_info)
|
||||
)
|
||||
|
||||
if device_info.voice_assistant_feature_flags_compat(api_version) and (
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
"requirements": [
|
||||
"aioesphomeapi==28.0.0",
|
||||
"esphome-dashboard-api==1.2.3",
|
||||
"bleak-esphome==1.1.0"
|
||||
"bleak-esphome==2.0.0"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -23,10 +23,10 @@ from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.system_info import is_official_image
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.signal_type import SignalType
|
||||
from homeassistant.util.system_info import is_official_image
|
||||
|
||||
DOMAIN = "ffmpeg"
|
||||
|
||||
|
||||
@@ -2,10 +2,11 @@
|
||||
|
||||
from datetime import datetime as dt
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import jwt
|
||||
from pyflick import FlickAPI
|
||||
from pyflick.authentication import AbstractFlickAuth
|
||||
from pyflick.authentication import SimpleFlickAuth
|
||||
from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -93,16 +94,22 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
return True
|
||||
|
||||
|
||||
class HassFlickAuth(AbstractFlickAuth):
|
||||
class HassFlickAuth(SimpleFlickAuth):
|
||||
"""Implementation of AbstractFlickAuth based on a Home Assistant entity config."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
def __init__(self, hass: HomeAssistant, entry: FlickConfigEntry) -> None:
|
||||
"""Flick authentication based on a Home Assistant entity config."""
|
||||
super().__init__(aiohttp_client.async_get_clientsession(hass))
|
||||
super().__init__(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
client_id=entry.data.get(CONF_CLIENT_ID, DEFAULT_CLIENT_ID),
|
||||
client_secret=entry.data.get(CONF_CLIENT_SECRET, DEFAULT_CLIENT_SECRET),
|
||||
websession=aiohttp_client.async_get_clientsession(hass),
|
||||
)
|
||||
self._entry = entry
|
||||
self._hass = hass
|
||||
|
||||
async def _get_entry_token(self):
|
||||
async def _get_entry_token(self) -> dict[str, Any]:
|
||||
# No token saved, generate one
|
||||
if (
|
||||
CONF_TOKEN_EXPIRY not in self._entry.data
|
||||
@@ -119,13 +126,8 @@ class HassFlickAuth(AbstractFlickAuth):
|
||||
async def _update_token(self):
|
||||
_LOGGER.debug("Fetching new access token")
|
||||
|
||||
token = await self.get_new_token(
|
||||
username=self._entry.data[CONF_USERNAME],
|
||||
password=self._entry.data[CONF_PASSWORD],
|
||||
client_id=self._entry.data.get(CONF_CLIENT_ID, DEFAULT_CLIENT_ID),
|
||||
client_secret=self._entry.data.get(
|
||||
CONF_CLIENT_SECRET, DEFAULT_CLIENT_SECRET
|
||||
),
|
||||
token = await super().get_new_token(
|
||||
self._username, self._password, self._client_id, self._client_secret
|
||||
)
|
||||
|
||||
_LOGGER.debug("New token: %s", token)
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyflick"],
|
||||
"requirements": ["PyFlick==1.1.2"]
|
||||
"requirements": ["PyFlick==1.1.3"]
|
||||
}
|
||||
|
||||
@@ -51,19 +51,19 @@ class FlickPricingSensor(CoordinatorEntity[FlickElectricDataCoordinator], Sensor
|
||||
_LOGGER.warning(
|
||||
"Unexpected quantity for unit price: %s", self.coordinator.data
|
||||
)
|
||||
return self.coordinator.data.cost
|
||||
return self.coordinator.data.cost * 100
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the state attributes."""
|
||||
components: dict[str, Decimal] = {}
|
||||
components: dict[str, float] = {}
|
||||
|
||||
for component in self.coordinator.data.components:
|
||||
if component.charge_setter not in ATTR_COMPONENTS:
|
||||
_LOGGER.warning("Found unknown component: %s", component.charge_setter)
|
||||
continue
|
||||
|
||||
components[component.charge_setter] = component.value
|
||||
components[component.charge_setter] = float(component.value * 100)
|
||||
|
||||
return {
|
||||
ATTR_START_AT: self.coordinator.data.start_at,
|
||||
|
||||
@@ -214,6 +214,18 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
||||
self._options = options
|
||||
await self.hass.async_add_executor_job(self.setup)
|
||||
|
||||
device_registry = dr.async_get(self.hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=self.config_entry.entry_id,
|
||||
configuration_url=f"http://{self.host}",
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, self.mac)},
|
||||
identifiers={(DOMAIN, self.unique_id)},
|
||||
manufacturer="AVM",
|
||||
model=self.model,
|
||||
name=self.config_entry.title,
|
||||
sw_version=self.current_firmware,
|
||||
)
|
||||
|
||||
def setup(self) -> None:
|
||||
"""Set up FritzboxTools class."""
|
||||
|
||||
|
||||
@@ -68,23 +68,14 @@ class FritzBoxBaseEntity:
|
||||
"""Init device info class."""
|
||||
self._avm_wrapper = avm_wrapper
|
||||
self._device_name = device_name
|
||||
|
||||
@property
|
||||
def mac_address(self) -> str:
|
||||
"""Return the mac address of the main device."""
|
||||
return self._avm_wrapper.mac
|
||||
self.mac_address = self._avm_wrapper.mac
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return the device information."""
|
||||
return DeviceInfo(
|
||||
configuration_url=f"http://{self._avm_wrapper.host}",
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, self.mac_address)},
|
||||
identifiers={(DOMAIN, self._avm_wrapper.unique_id)},
|
||||
manufacturer="AVM",
|
||||
model=self._avm_wrapper.model,
|
||||
name=self._device_name,
|
||||
sw_version=self._avm_wrapper.current_firmware,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"domain": "frontend",
|
||||
"name": "Home Assistant Frontend",
|
||||
"after_dependencies": ["backup"],
|
||||
"codeowners": ["@home-assistant/frontend"],
|
||||
"dependencies": [
|
||||
"api",
|
||||
@@ -20,5 +21,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20241231.0"]
|
||||
"requirements": ["home-assistant-frontend==20250109.0"]
|
||||
}
|
||||
|
||||
@@ -34,6 +34,18 @@
|
||||
"moderate": "Moderate",
|
||||
"good": "Good",
|
||||
"very_good": "Very good"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"c6h6": {
|
||||
@@ -51,6 +63,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"o3_index": {
|
||||
@@ -62,6 +86,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"pm10_index": {
|
||||
@@ -73,6 +109,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"pm25_index": {
|
||||
@@ -84,6 +132,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"so2_index": {
|
||||
@@ -95,6 +155,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ from typing import Any, cast
|
||||
|
||||
from aiohasupervisor.exceptions import (
|
||||
SupervisorBadRequestError,
|
||||
SupervisorError,
|
||||
SupervisorNotFoundError,
|
||||
)
|
||||
from aiohasupervisor.models import (
|
||||
@@ -23,8 +24,10 @@ from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupReaderWriter,
|
||||
BackupReaderWriterError,
|
||||
CreateBackupEvent,
|
||||
Folder,
|
||||
IncorrectPasswordError,
|
||||
NewBackup,
|
||||
WrittenBackup,
|
||||
)
|
||||
@@ -213,6 +216,10 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
password: str | None,
|
||||
) -> tuple[NewBackup, asyncio.Task[WrittenBackup]]:
|
||||
"""Create a backup."""
|
||||
if not include_homeassistant and include_database:
|
||||
raise HomeAssistantError(
|
||||
"Cannot create a backup with database but without Home Assistant"
|
||||
)
|
||||
manager = self._hass.data[DATA_MANAGER]
|
||||
|
||||
include_addons_set: supervisor_backups.AddonSet | set[str] | None = None
|
||||
@@ -233,20 +240,23 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
]
|
||||
locations = [agent.location for agent in hassio_agents]
|
||||
|
||||
backup = await self._client.backups.partial_backup(
|
||||
supervisor_backups.PartialBackupOptions(
|
||||
addons=include_addons_set,
|
||||
folders=include_folders_set,
|
||||
homeassistant=include_homeassistant,
|
||||
name=backup_name,
|
||||
password=password,
|
||||
compressed=True,
|
||||
location=locations or LOCATION_CLOUD_BACKUP,
|
||||
homeassistant_exclude_database=not include_database,
|
||||
background=True,
|
||||
extra=extra_metadata,
|
||||
try:
|
||||
backup = await self._client.backups.partial_backup(
|
||||
supervisor_backups.PartialBackupOptions(
|
||||
addons=include_addons_set,
|
||||
folders=include_folders_set,
|
||||
homeassistant=include_homeassistant,
|
||||
name=backup_name,
|
||||
password=password,
|
||||
compressed=True,
|
||||
location=locations or LOCATION_CLOUD_BACKUP,
|
||||
homeassistant_exclude_database=not include_database,
|
||||
background=True,
|
||||
extra=extra_metadata,
|
||||
)
|
||||
)
|
||||
)
|
||||
except SupervisorError as err:
|
||||
raise BackupReaderWriterError(f"Error creating backup: {err}") from err
|
||||
backup_task = self._hass.async_create_task(
|
||||
self._async_wait_for_backup(
|
||||
backup, remove_after_upload=not bool(locations)
|
||||
@@ -278,22 +288,35 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
finally:
|
||||
unsub()
|
||||
if not backup_id:
|
||||
raise HomeAssistantError("Backup failed")
|
||||
raise BackupReaderWriterError("Backup failed")
|
||||
|
||||
async def open_backup() -> AsyncIterator[bytes]:
|
||||
return await self._client.backups.download_backup(backup_id)
|
||||
try:
|
||||
return await self._client.backups.download_backup(backup_id)
|
||||
except SupervisorError as err:
|
||||
raise BackupReaderWriterError(
|
||||
f"Error downloading backup: {err}"
|
||||
) from err
|
||||
|
||||
async def remove_backup() -> None:
|
||||
if not remove_after_upload:
|
||||
return
|
||||
await self._client.backups.remove_backup(
|
||||
backup_id,
|
||||
options=supervisor_backups.RemoveBackupOptions(
|
||||
location={LOCATION_CLOUD_BACKUP}
|
||||
),
|
||||
)
|
||||
try:
|
||||
await self._client.backups.remove_backup(
|
||||
backup_id,
|
||||
options=supervisor_backups.RemoveBackupOptions(
|
||||
location={LOCATION_CLOUD_BACKUP}
|
||||
),
|
||||
)
|
||||
except SupervisorError as err:
|
||||
raise BackupReaderWriterError(f"Error removing backup: {err}") from err
|
||||
|
||||
details = await self._client.backups.backup_info(backup_id)
|
||||
try:
|
||||
details = await self._client.backups.backup_info(backup_id)
|
||||
except SupervisorError as err:
|
||||
raise BackupReaderWriterError(
|
||||
f"Error getting backup details: {err}"
|
||||
) from err
|
||||
|
||||
return WrittenBackup(
|
||||
backup=_backup_details_to_agent_backup(details),
|
||||
@@ -359,8 +382,16 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
restore_homeassistant: bool,
|
||||
) -> None:
|
||||
"""Restore a backup."""
|
||||
if restore_homeassistant and not restore_database:
|
||||
raise HomeAssistantError("Cannot restore Home Assistant without database")
|
||||
manager = self._hass.data[DATA_MANAGER]
|
||||
# The backup manager has already checked that the backup exists so we don't need to
|
||||
# check that here.
|
||||
backup = await manager.backup_agents[agent_id].async_get_backup(backup_id)
|
||||
if (
|
||||
backup
|
||||
and restore_homeassistant
|
||||
and restore_database != backup.database_included
|
||||
):
|
||||
raise HomeAssistantError("Restore database must match backup")
|
||||
if not restore_homeassistant and restore_database:
|
||||
raise HomeAssistantError("Cannot restore database without Home Assistant")
|
||||
restore_addons_set = set(restore_addons) if restore_addons else None
|
||||
@@ -370,7 +401,6 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
else None
|
||||
)
|
||||
|
||||
manager = self._hass.data[DATA_MANAGER]
|
||||
restore_location: str | None
|
||||
if manager.backup_agents[agent_id].domain != DOMAIN:
|
||||
# Download the backup to the supervisor. Supervisor will clean up the backup
|
||||
@@ -385,17 +415,24 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
agent = cast(SupervisorBackupAgent, manager.backup_agents[agent_id])
|
||||
restore_location = agent.location
|
||||
|
||||
job = await self._client.backups.partial_restore(
|
||||
backup_id,
|
||||
supervisor_backups.PartialRestoreOptions(
|
||||
addons=restore_addons_set,
|
||||
folders=restore_folders_set,
|
||||
homeassistant=restore_homeassistant,
|
||||
password=password,
|
||||
background=True,
|
||||
location=restore_location,
|
||||
),
|
||||
)
|
||||
try:
|
||||
job = await self._client.backups.partial_restore(
|
||||
backup_id,
|
||||
supervisor_backups.PartialRestoreOptions(
|
||||
addons=restore_addons_set,
|
||||
folders=restore_folders_set,
|
||||
homeassistant=restore_homeassistant,
|
||||
password=password,
|
||||
background=True,
|
||||
location=restore_location,
|
||||
),
|
||||
)
|
||||
except SupervisorBadRequestError as err:
|
||||
# Supervisor currently does not transmit machine parsable error types
|
||||
message = err.args[0]
|
||||
if message.startswith("Invalid password for backup"):
|
||||
raise IncorrectPasswordError(message) from err
|
||||
raise HomeAssistantError(message) from err
|
||||
|
||||
restore_complete = asyncio.Event()
|
||||
|
||||
|
||||
@@ -114,6 +114,7 @@ class HiveDeviceLight(HiveEntity, LightEntity):
|
||||
self._attr_hs_color = color_util.color_RGB_to_hs(*rgb)
|
||||
self._attr_color_mode = ColorMode.HS
|
||||
else:
|
||||
color_temp = self.device["status"].get("color_temp")
|
||||
self._attr_color_temp_kelvin = (
|
||||
None
|
||||
if color_temp is None
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.63", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.64", "babel==2.15.0"]
|
||||
}
|
||||
|
||||
@@ -168,7 +168,7 @@ async def _run_appliance_service[*_Ts](
|
||||
error_translation_placeholders: dict[str, str],
|
||||
) -> None:
|
||||
try:
|
||||
await hass.async_add_executor_job(getattr(appliance, method), args)
|
||||
await hass.async_add_executor_job(getattr(appliance, method), *args)
|
||||
except api.HomeConnectError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
|
||||
@@ -220,7 +220,7 @@ async def async_setup_entry(
|
||||
with contextlib.suppress(HomeConnectError):
|
||||
programs = device.appliance.get_programs_available()
|
||||
if programs:
|
||||
for program in programs:
|
||||
for program in programs.copy():
|
||||
if program not in PROGRAMS_TRANSLATION_KEYS_MAP:
|
||||
programs.remove(program)
|
||||
if program not in programs_not_found:
|
||||
|
||||
@@ -12,6 +12,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["homewizard_energy"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-homewizard-energy==v7.0.0"],
|
||||
"requirements": ["python-homewizard-energy==v7.0.1"],
|
||||
"zeroconf": ["_hwenergy._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioautomower"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioautomower==2024.12.0"]
|
||||
"requirements": ["aioautomower==2025.1.0"]
|
||||
}
|
||||
|
||||
@@ -385,7 +385,7 @@ class InputDatetime(collection.CollectionEntity, RestoreEntity):
|
||||
@callback
|
||||
def async_set_datetime(self, date=None, time=None, datetime=None, timestamp=None):
|
||||
"""Set a new date / time."""
|
||||
if timestamp:
|
||||
if timestamp is not None:
|
||||
datetime = dt_util.as_local(dt_util.utc_from_timestamp(timestamp))
|
||||
|
||||
if datetime:
|
||||
|
||||
@@ -188,8 +188,8 @@ PINECIL_NUMBER_DESCRIPTIONS: tuple[IronOSNumberEntityDescription, ...] = (
|
||||
characteristic=CharSetting.POWER_LIMIT,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=0,
|
||||
native_max_value=12,
|
||||
native_step=0.1,
|
||||
native_max_value=120,
|
||||
native_step=5,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
entity_registry_enabled_default=False,
|
||||
|
||||
@@ -128,8 +128,8 @@
|
||||
"temp_unit": {
|
||||
"name": "Temperature display unit",
|
||||
"state": {
|
||||
"celsius": "Celsius (C°)",
|
||||
"fahrenheit": "Fahrenheit (F°)"
|
||||
"celsius": "Celsius (°C)",
|
||||
"fahrenheit": "Fahrenheit (°F)"
|
||||
}
|
||||
},
|
||||
"desc_scroll_speed": {
|
||||
|
||||
@@ -6,5 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ituran",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["pyituran==0.1.4"]
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["demetriek"],
|
||||
"requirements": ["demetriek==1.1.0"],
|
||||
"requirements": ["demetriek==1.1.1"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:LaMetric:1"
|
||||
|
||||
@@ -50,7 +50,7 @@ NUMBERS = [
|
||||
native_step=1,
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
has_fn=lambda device: bool(device.audio),
|
||||
has_fn=lambda device: bool(device.audio and device.audio.available),
|
||||
value_fn=lambda device: device.audio.volume if device.audio else 0,
|
||||
set_value_fn=lambda api, volume: api.audio(volume=int(volume)),
|
||||
),
|
||||
|
||||
@@ -53,6 +53,6 @@
|
||||
"requirements": [
|
||||
"aiolifx==1.1.2",
|
||||
"aiolifx-effects==0.3.2",
|
||||
"aiolifx-themes==0.5.5"
|
||||
"aiolifx-themes==0.6.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -354,7 +354,7 @@ def filter_turn_off_params(
|
||||
if not params:
|
||||
return params
|
||||
|
||||
supported_features = light.supported_features
|
||||
supported_features = light.supported_features_compat
|
||||
|
||||
if LightEntityFeature.FLASH not in supported_features:
|
||||
params.pop(ATTR_FLASH, None)
|
||||
@@ -366,7 +366,7 @@ def filter_turn_off_params(
|
||||
|
||||
def filter_turn_on_params(light: LightEntity, params: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Filter out params not supported by the light."""
|
||||
supported_features = light.supported_features
|
||||
supported_features = light.supported_features_compat
|
||||
|
||||
if LightEntityFeature.EFFECT not in supported_features:
|
||||
params.pop(ATTR_EFFECT, None)
|
||||
@@ -1093,7 +1093,7 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def capability_attributes(self) -> dict[str, Any]:
|
||||
"""Return capability attributes."""
|
||||
data: dict[str, Any] = {}
|
||||
supported_features = self.supported_features
|
||||
supported_features = self.supported_features_compat
|
||||
supported_color_modes = self._light_internal_supported_color_modes
|
||||
|
||||
if ColorMode.COLOR_TEMP in supported_color_modes:
|
||||
@@ -1255,11 +1255,12 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return state attributes."""
|
||||
data: dict[str, Any] = {}
|
||||
supported_features = self.supported_features
|
||||
supported_features = self.supported_features_compat
|
||||
supported_color_modes = self.supported_color_modes
|
||||
legacy_supported_color_modes = (
|
||||
supported_color_modes or self._light_internal_supported_color_modes
|
||||
)
|
||||
supported_features_value = supported_features.value
|
||||
_is_on = self.is_on
|
||||
color_mode = self._light_internal_color_mode if _is_on else None
|
||||
|
||||
@@ -1278,6 +1279,13 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
data[ATTR_BRIGHTNESS] = self.brightness
|
||||
else:
|
||||
data[ATTR_BRIGHTNESS] = None
|
||||
elif supported_features_value & _DEPRECATED_SUPPORT_BRIGHTNESS.value:
|
||||
# Backwards compatibility for ambiguous / incomplete states
|
||||
# Warning is printed by supported_features_compat, remove in 2025.1
|
||||
if _is_on:
|
||||
data[ATTR_BRIGHTNESS] = self.brightness
|
||||
else:
|
||||
data[ATTR_BRIGHTNESS] = None
|
||||
|
||||
if color_temp_supported(supported_color_modes):
|
||||
if color_mode == ColorMode.COLOR_TEMP:
|
||||
@@ -1292,6 +1300,21 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
else:
|
||||
data[ATTR_COLOR_TEMP_KELVIN] = None
|
||||
data[_DEPRECATED_ATTR_COLOR_TEMP.value] = None
|
||||
elif supported_features_value & _DEPRECATED_SUPPORT_COLOR_TEMP.value:
|
||||
# Backwards compatibility
|
||||
# Warning is printed by supported_features_compat, remove in 2025.1
|
||||
if _is_on:
|
||||
color_temp_kelvin = self.color_temp_kelvin
|
||||
data[ATTR_COLOR_TEMP_KELVIN] = color_temp_kelvin
|
||||
if color_temp_kelvin:
|
||||
data[_DEPRECATED_ATTR_COLOR_TEMP.value] = (
|
||||
color_util.color_temperature_kelvin_to_mired(color_temp_kelvin)
|
||||
)
|
||||
else:
|
||||
data[_DEPRECATED_ATTR_COLOR_TEMP.value] = None
|
||||
else:
|
||||
data[ATTR_COLOR_TEMP_KELVIN] = None
|
||||
data[_DEPRECATED_ATTR_COLOR_TEMP.value] = None
|
||||
|
||||
if color_supported(legacy_supported_color_modes) or color_temp_supported(
|
||||
legacy_supported_color_modes
|
||||
@@ -1329,7 +1352,24 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
type(self),
|
||||
report_issue,
|
||||
)
|
||||
return {ColorMode.ONOFF}
|
||||
supported_features = self.supported_features_compat
|
||||
supported_features_value = supported_features.value
|
||||
supported_color_modes: set[ColorMode] = set()
|
||||
|
||||
if supported_features_value & _DEPRECATED_SUPPORT_COLOR_TEMP.value:
|
||||
supported_color_modes.add(ColorMode.COLOR_TEMP)
|
||||
if supported_features_value & _DEPRECATED_SUPPORT_COLOR.value:
|
||||
supported_color_modes.add(ColorMode.HS)
|
||||
if (
|
||||
not supported_color_modes
|
||||
and supported_features_value & _DEPRECATED_SUPPORT_BRIGHTNESS.value
|
||||
):
|
||||
supported_color_modes = {ColorMode.BRIGHTNESS}
|
||||
|
||||
if not supported_color_modes:
|
||||
supported_color_modes = {ColorMode.ONOFF}
|
||||
|
||||
return supported_color_modes
|
||||
|
||||
@cached_property
|
||||
def supported_color_modes(self) -> set[ColorMode] | set[str] | None:
|
||||
@@ -1341,6 +1381,37 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Flag supported features."""
|
||||
return self._attr_supported_features
|
||||
|
||||
@property
|
||||
def supported_features_compat(self) -> LightEntityFeature:
|
||||
"""Return the supported features as LightEntityFeature.
|
||||
|
||||
Remove this compatibility shim in 2025.1 or later.
|
||||
"""
|
||||
features = self.supported_features
|
||||
if type(features) is not int: # noqa: E721
|
||||
return features
|
||||
new_features = LightEntityFeature(features)
|
||||
if self._deprecated_supported_features_reported is True:
|
||||
return new_features
|
||||
self._deprecated_supported_features_reported = True
|
||||
report_issue = self._suggest_report_issue()
|
||||
report_issue += (
|
||||
" and reference "
|
||||
"https://developers.home-assistant.io/blog/2023/12/28/support-feature-magic-numbers-deprecation"
|
||||
)
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"Entity %s (%s) is using deprecated supported features"
|
||||
" values which will be removed in HA Core 2025.1. Instead it should use"
|
||||
" %s and color modes, please %s"
|
||||
),
|
||||
self.entity_id,
|
||||
type(self),
|
||||
repr(new_features),
|
||||
report_issue,
|
||||
)
|
||||
return new_features
|
||||
|
||||
def __should_report_light_issue(self) -> bool:
|
||||
"""Return if light color mode issues should be reported."""
|
||||
if not self.platform:
|
||||
|
||||
@@ -57,6 +57,9 @@
|
||||
},
|
||||
"valve_position": {
|
||||
"default": "mdi:valve"
|
||||
},
|
||||
"battery_replacement_description": {
|
||||
"default": "mdi:battery-sync-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -773,6 +773,19 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Flag media player features that are supported."""
|
||||
return self._attr_supported_features
|
||||
|
||||
@property
|
||||
def supported_features_compat(self) -> MediaPlayerEntityFeature:
|
||||
"""Return the supported features as MediaPlayerEntityFeature.
|
||||
|
||||
Remove this compatibility shim in 2025.1 or later.
|
||||
"""
|
||||
features = self.supported_features
|
||||
if type(features) is int: # noqa: E721
|
||||
new_features = MediaPlayerEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
def turn_on(self) -> None:
|
||||
"""Turn the media player on."""
|
||||
raise NotImplementedError
|
||||
@@ -912,85 +925,87 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
@property
|
||||
def support_play(self) -> bool:
|
||||
"""Boolean if play is supported."""
|
||||
return MediaPlayerEntityFeature.PLAY in self.supported_features
|
||||
return MediaPlayerEntityFeature.PLAY in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_pause(self) -> bool:
|
||||
"""Boolean if pause is supported."""
|
||||
return MediaPlayerEntityFeature.PAUSE in self.supported_features
|
||||
return MediaPlayerEntityFeature.PAUSE in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_stop(self) -> bool:
|
||||
"""Boolean if stop is supported."""
|
||||
return MediaPlayerEntityFeature.STOP in self.supported_features
|
||||
return MediaPlayerEntityFeature.STOP in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_seek(self) -> bool:
|
||||
"""Boolean if seek is supported."""
|
||||
return MediaPlayerEntityFeature.SEEK in self.supported_features
|
||||
return MediaPlayerEntityFeature.SEEK in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_volume_set(self) -> bool:
|
||||
"""Boolean if setting volume is supported."""
|
||||
return MediaPlayerEntityFeature.VOLUME_SET in self.supported_features
|
||||
return MediaPlayerEntityFeature.VOLUME_SET in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_volume_mute(self) -> bool:
|
||||
"""Boolean if muting volume is supported."""
|
||||
return MediaPlayerEntityFeature.VOLUME_MUTE in self.supported_features
|
||||
return MediaPlayerEntityFeature.VOLUME_MUTE in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_previous_track(self) -> bool:
|
||||
"""Boolean if previous track command supported."""
|
||||
return MediaPlayerEntityFeature.PREVIOUS_TRACK in self.supported_features
|
||||
return MediaPlayerEntityFeature.PREVIOUS_TRACK in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_next_track(self) -> bool:
|
||||
"""Boolean if next track command supported."""
|
||||
return MediaPlayerEntityFeature.NEXT_TRACK in self.supported_features
|
||||
return MediaPlayerEntityFeature.NEXT_TRACK in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_play_media(self) -> bool:
|
||||
"""Boolean if play media command supported."""
|
||||
return MediaPlayerEntityFeature.PLAY_MEDIA in self.supported_features
|
||||
return MediaPlayerEntityFeature.PLAY_MEDIA in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_select_source(self) -> bool:
|
||||
"""Boolean if select source command supported."""
|
||||
return MediaPlayerEntityFeature.SELECT_SOURCE in self.supported_features
|
||||
return MediaPlayerEntityFeature.SELECT_SOURCE in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_select_sound_mode(self) -> bool:
|
||||
"""Boolean if select sound mode command supported."""
|
||||
return MediaPlayerEntityFeature.SELECT_SOUND_MODE in self.supported_features
|
||||
return (
|
||||
MediaPlayerEntityFeature.SELECT_SOUND_MODE in self.supported_features_compat
|
||||
)
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_clear_playlist(self) -> bool:
|
||||
"""Boolean if clear playlist command supported."""
|
||||
return MediaPlayerEntityFeature.CLEAR_PLAYLIST in self.supported_features
|
||||
return MediaPlayerEntityFeature.CLEAR_PLAYLIST in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_shuffle_set(self) -> bool:
|
||||
"""Boolean if shuffle is supported."""
|
||||
return MediaPlayerEntityFeature.SHUFFLE_SET in self.supported_features
|
||||
return MediaPlayerEntityFeature.SHUFFLE_SET in self.supported_features_compat
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_grouping(self) -> bool:
|
||||
"""Boolean if player grouping is supported."""
|
||||
return MediaPlayerEntityFeature.GROUPING in self.supported_features
|
||||
return MediaPlayerEntityFeature.GROUPING in self.supported_features_compat
|
||||
|
||||
async def async_toggle(self) -> None:
|
||||
"""Toggle the power on the media player."""
|
||||
@@ -1019,7 +1034,7 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
if (
|
||||
self.volume_level is not None
|
||||
and self.volume_level < 1
|
||||
and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features
|
||||
and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features_compat
|
||||
):
|
||||
await self.async_set_volume_level(
|
||||
min(1, self.volume_level + self.volume_step)
|
||||
@@ -1037,7 +1052,7 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
if (
|
||||
self.volume_level is not None
|
||||
and self.volume_level > 0
|
||||
and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features
|
||||
and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features_compat
|
||||
):
|
||||
await self.async_set_volume_level(
|
||||
max(0, self.volume_level - self.volume_step)
|
||||
@@ -1080,7 +1095,7 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def capability_attributes(self) -> dict[str, Any]:
|
||||
"""Return capability attributes."""
|
||||
data: dict[str, Any] = {}
|
||||
supported_features = self.supported_features
|
||||
supported_features = self.supported_features_compat
|
||||
|
||||
if (
|
||||
source_list := self.source_list
|
||||
@@ -1286,7 +1301,7 @@ async def websocket_browse_media(
|
||||
connection.send_error(msg["id"], "entity_not_found", "Entity not found")
|
||||
return
|
||||
|
||||
if MediaPlayerEntityFeature.BROWSE_MEDIA not in player.supported_features:
|
||||
if MediaPlayerEntityFeature.BROWSE_MEDIA not in player.supported_features_compat:
|
||||
connection.send_message(
|
||||
websocket_api.error_message(
|
||||
msg["id"], ERR_NOT_SUPPORTED, "Player does not support browsing media"
|
||||
|
||||
@@ -6,6 +6,7 @@ import logging
|
||||
from meteofrance_api.client import MeteoFranceClient
|
||||
from meteofrance_api.helpers import is_valid_warning_department
|
||||
from meteofrance_api.model import CurrentPhenomenons, Forecast, Rain
|
||||
from requests import RequestException
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -83,7 +84,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
update_method=_async_update_data_rain,
|
||||
update_interval=SCAN_INTERVAL_RAIN,
|
||||
)
|
||||
await coordinator_rain.async_config_entry_first_refresh()
|
||||
try:
|
||||
await coordinator_rain._async_refresh(log_failures=False) # noqa: SLF001
|
||||
except RequestException:
|
||||
_LOGGER.warning(
|
||||
"1 hour rain forecast not available: %s is not in covered zone",
|
||||
entry.title,
|
||||
)
|
||||
|
||||
department = coordinator_forecast.data.position.get("dept")
|
||||
_LOGGER.debug(
|
||||
@@ -128,8 +135,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
hass.data[DOMAIN][entry.entry_id] = {
|
||||
UNDO_UPDATE_LISTENER: undo_listener,
|
||||
COORDINATOR_FORECAST: coordinator_forecast,
|
||||
COORDINATOR_RAIN: coordinator_rain,
|
||||
}
|
||||
if coordinator_rain and coordinator_rain.last_update_success:
|
||||
hass.data[DOMAIN][entry.entry_id][COORDINATOR_RAIN] = coordinator_rain
|
||||
if coordinator_alert and coordinator_alert.last_update_success:
|
||||
hass.data[DOMAIN][entry.entry_id][COORDINATOR_ALERT] = coordinator_alert
|
||||
|
||||
|
||||
@@ -187,7 +187,7 @@ async def async_setup_entry(
|
||||
"""Set up the Meteo-France sensor platform."""
|
||||
data = hass.data[DOMAIN][entry.entry_id]
|
||||
coordinator_forecast: DataUpdateCoordinator[Forecast] = data[COORDINATOR_FORECAST]
|
||||
coordinator_rain: DataUpdateCoordinator[Rain] | None = data[COORDINATOR_RAIN]
|
||||
coordinator_rain: DataUpdateCoordinator[Rain] | None = data.get(COORDINATOR_RAIN)
|
||||
coordinator_alert: DataUpdateCoordinator[CurrentPhenomenons] | None = data.get(
|
||||
COORDINATOR_ALERT
|
||||
)
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, time
|
||||
|
||||
from open_meteo import Forecast as OpenMeteoForecast
|
||||
|
||||
from homeassistant.components.weather import (
|
||||
@@ -107,8 +109,9 @@ class OpenMeteoWeatherEntity(
|
||||
|
||||
daily = self.coordinator.data.daily
|
||||
for index, date in enumerate(self.coordinator.data.daily.time):
|
||||
_datetime = datetime.combine(date=date, time=time(0), tzinfo=dt_util.UTC)
|
||||
forecast = Forecast(
|
||||
datetime=date.isoformat(),
|
||||
datetime=_datetime.isoformat(),
|
||||
)
|
||||
|
||||
if daily.weathercode is not None:
|
||||
@@ -155,12 +158,14 @@ class OpenMeteoWeatherEntity(
|
||||
today = dt_util.utcnow()
|
||||
|
||||
hourly = self.coordinator.data.hourly
|
||||
for index, datetime in enumerate(self.coordinator.data.hourly.time):
|
||||
if dt_util.as_utc(datetime) < today:
|
||||
for index, _datetime in enumerate(self.coordinator.data.hourly.time):
|
||||
if _datetime.tzinfo is None:
|
||||
_datetime = _datetime.replace(tzinfo=dt_util.UTC)
|
||||
if _datetime < today:
|
||||
continue
|
||||
|
||||
forecast = Forecast(
|
||||
datetime=datetime.isoformat(),
|
||||
datetime=_datetime.isoformat(),
|
||||
)
|
||||
|
||||
if hourly.weather_code is not None:
|
||||
|
||||
@@ -6,7 +6,7 @@ from typing import Any, cast
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from pyoverkiz.enums import OverkizCommand, Protocol
|
||||
from pyoverkiz.exceptions import OverkizException
|
||||
from pyoverkiz.exceptions import BaseOverkizException
|
||||
from pyoverkiz.models import Command, Device, StateDefinition
|
||||
from pyoverkiz.types import StateType as OverkizStateType
|
||||
|
||||
@@ -105,7 +105,7 @@ class OverkizExecutor:
|
||||
"Home Assistant",
|
||||
)
|
||||
# Catch Overkiz exceptions to support `continue_on_error` functionality
|
||||
except OverkizException as exception:
|
||||
except BaseOverkizException as exception:
|
||||
raise HomeAssistantError(exception) from exception
|
||||
|
||||
# ExecutionRegisteredEvent doesn't contain the device_url, thus we need to register it here
|
||||
|
||||
@@ -27,7 +27,7 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
_host: str
|
||||
_discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -137,8 +137,15 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(sn)
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.host})
|
||||
|
||||
self._host = discovery_info.host
|
||||
self.context.update({"configuration_url": f"http://{discovery_info.host}"})
|
||||
self._discovery_info = discovery_info
|
||||
self.context.update(
|
||||
{
|
||||
"title_placeholders": {
|
||||
"name": discovery_info.name.replace("._http._tcp.local.", "")
|
||||
},
|
||||
"configuration_url": f"http://{discovery_info.host}",
|
||||
},
|
||||
)
|
||||
return await self.async_step_zeroconf_confirm()
|
||||
|
||||
async def async_step_zeroconf_confirm(
|
||||
@@ -149,7 +156,7 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if user_input is not None:
|
||||
peblar = Peblar(
|
||||
host=self._host,
|
||||
host=self._discovery_info.host,
|
||||
session=async_create_clientsession(
|
||||
self.hass, cookie_jar=CookieJar(unsafe=True)
|
||||
),
|
||||
@@ -165,7 +172,7 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(
|
||||
title="Peblar",
|
||||
data={
|
||||
CONF_HOST: self._host,
|
||||
CONF_HOST: self._discovery_info.host,
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
},
|
||||
)
|
||||
@@ -179,6 +186,10 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
),
|
||||
}
|
||||
),
|
||||
description_placeholders={
|
||||
"hostname": self._discovery_info.name.replace("._http._tcp.local.", ""),
|
||||
"host": self._discovery_info.host,
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["peblar==0.3.0"],
|
||||
"requirements": ["peblar==0.3.3"],
|
||||
"zeroconf": [{ "type": "_http._tcp.local.", "name": "pblr-*" }]
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"data_description": {
|
||||
"password": "[%key:component::peblar::config::step::user::data_description::password%]"
|
||||
},
|
||||
"description": "Reauthenticate with your Peblar EV charger.\n\nTo do so, you will need to enter your new password you use to log into Peblar EV charger' web interface."
|
||||
"description": "Reauthenticate with your Peblar EV charger.\n\nTo do so, you will need to enter your new password you use to log in to the Peblar EV charger's web interface."
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
@@ -31,7 +31,7 @@
|
||||
"host": "[%key:component::peblar::config::step::user::data_description::host%]",
|
||||
"password": "[%key:component::peblar::config::step::user::data_description::password%]"
|
||||
},
|
||||
"description": "Reconfigure your Peblar EV charger.\n\nThis allows you to change the IP address of your Peblar EV charger and the password you use to log into its web interface."
|
||||
"description": "Reconfigure your Peblar EV charger.\n\nThis allows you to change the IP address of your Peblar EV charger and the password you use to log in to its web interface."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
@@ -40,9 +40,9 @@
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your Peblar EV charger on your home network.",
|
||||
"password": "The same password as you use to log in to the Peblar EV charger' local web interface."
|
||||
"password": "The same password as you use to log in to the Peblar EV charger's local web interface."
|
||||
},
|
||||
"description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need to get the IP address of your Peblar EV charger and the password you use to log into its web interface.\n\nHome Assistant will automatically configure your Peblar EV charger for use with Home Assistant."
|
||||
"description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need to get the IP address of your Peblar EV charger and the password you use to log in to its web interface.\n\nHome Assistant will automatically configure your Peblar EV charger for use with Home Assistant."
|
||||
},
|
||||
"zeroconf_confirm": {
|
||||
"data": {
|
||||
@@ -51,7 +51,7 @@
|
||||
"data_description": {
|
||||
"password": "[%key:component::peblar::config::step::user::data_description::password%]"
|
||||
},
|
||||
"description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need the password you use to log into the Peblar EV charger' web interface.\n\nHome Assistant will automatically configure your Peblar EV charger for use with Home Assistant."
|
||||
"description": "Set up your Peblar EV charger {hostname}, on IP address {host}, to integrate with Home Assistant\n\nTo do so, you will need the password you use to log in to the Peblar EV charger's web interface.\n\nHome Assistant will automatically configure your Peblar EV charger for use with Home Assistant."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -27,8 +27,9 @@ PARALLEL_UPDATES = 1
|
||||
class PeblarUpdateEntityDescription(UpdateEntityDescription):
|
||||
"""Describe an Peblar update entity."""
|
||||
|
||||
installed_fn: Callable[[PeblarVersionInformation], str | None]
|
||||
available_fn: Callable[[PeblarVersionInformation], str | None]
|
||||
has_fn: Callable[[PeblarVersionInformation], bool] = lambda _: True
|
||||
installed_fn: Callable[[PeblarVersionInformation], str | None]
|
||||
|
||||
|
||||
DESCRIPTIONS: tuple[PeblarUpdateEntityDescription, ...] = (
|
||||
@@ -36,13 +37,15 @@ DESCRIPTIONS: tuple[PeblarUpdateEntityDescription, ...] = (
|
||||
key="firmware",
|
||||
device_class=UpdateDeviceClass.FIRMWARE,
|
||||
installed_fn=lambda x: x.current.firmware,
|
||||
has_fn=lambda x: x.current.firmware is not None,
|
||||
available_fn=lambda x: x.available.firmware,
|
||||
),
|
||||
PeblarUpdateEntityDescription(
|
||||
key="customization",
|
||||
translation_key="customization",
|
||||
installed_fn=lambda x: x.current.customization,
|
||||
available_fn=lambda x: x.available.customization,
|
||||
has_fn=lambda x: x.current.customization is not None,
|
||||
installed_fn=lambda x: x.current.customization,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -60,6 +63,7 @@ async def async_setup_entry(
|
||||
description=description,
|
||||
)
|
||||
for description in DESCRIPTIONS
|
||||
if description.has_fn(entry.runtime_data.version_coordinator.data)
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ from powerfox import (
|
||||
Powerfox,
|
||||
PowerfoxAuthenticationError,
|
||||
PowerfoxConnectionError,
|
||||
PowerfoxNoDataError,
|
||||
Poweropti,
|
||||
)
|
||||
|
||||
@@ -45,5 +46,5 @@ class PowerfoxDataUpdateCoordinator(DataUpdateCoordinator[Poweropti]):
|
||||
return await self.client.device(device_id=self.device.id)
|
||||
except PowerfoxAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(err) from err
|
||||
except PowerfoxConnectionError as err:
|
||||
except (PowerfoxConnectionError, PowerfoxNoDataError) as err:
|
||||
raise UpdateFailed(err) from err
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/powerfox",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["powerfox==1.0.0"],
|
||||
"requirements": ["powerfox==1.2.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_http._tcp.local.",
|
||||
|
||||
@@ -180,7 +180,7 @@ def guarded_import(
|
||||
# Allow import of _strptime needed by datetime.datetime.strptime
|
||||
if name == "_strptime":
|
||||
return __import__(name, globals, locals, fromlist, level)
|
||||
raise ScriptError(f"Not allowed to import {name}")
|
||||
raise ImportError(f"Not allowed to import {name}")
|
||||
|
||||
|
||||
def guarded_inplacevar(op: str, target: Any, operand: Any) -> Any:
|
||||
|
||||
@@ -712,12 +712,24 @@ class Recorder(threading.Thread):
|
||||
setup_result = self._setup_recorder()
|
||||
|
||||
if not setup_result:
|
||||
_LOGGER.error("Recorder setup failed, recorder shutting down")
|
||||
# Give up if we could not connect
|
||||
return
|
||||
|
||||
schema_status = migration.validate_db_schema(self.hass, self, self.get_session)
|
||||
if schema_status is None:
|
||||
# Give up if we could not validate the schema
|
||||
_LOGGER.error("Failed to validate schema, recorder shutting down")
|
||||
return
|
||||
if schema_status.current_version > SCHEMA_VERSION:
|
||||
_LOGGER.error(
|
||||
"The database schema version %s is newer than %s which is the maximum "
|
||||
"database schema version supported by the installed version of "
|
||||
"Home Assistant Core, either upgrade Home Assistant Core or restore "
|
||||
"the database from a backup compatible with this version",
|
||||
schema_status.current_version,
|
||||
SCHEMA_VERSION,
|
||||
)
|
||||
return
|
||||
self.schema_version = schema_status.current_version
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@ from .exceptions import PasswordIncompatible, ReolinkException, UserNotAdmin
|
||||
from .host import ReolinkHost
|
||||
from .services import async_setup_services
|
||||
from .util import ReolinkConfigEntry, ReolinkData, get_device_uid_and_ch
|
||||
from .views import PlaybackProxyView
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -189,6 +190,8 @@ async def async_setup_entry(
|
||||
|
||||
migrate_entity_ids(hass, config_entry.entry_id, host)
|
||||
|
||||
hass.http.register_view(PlaybackProxyView(hass))
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
|
||||
config_entry.async_on_unload(
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Reolink",
|
||||
"codeowners": ["@starkillerOG"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["webhook"],
|
||||
"dependencies": ["http", "webhook"],
|
||||
"dhcp": [
|
||||
{
|
||||
"hostname": "reolink*"
|
||||
|
||||
@@ -23,8 +23,8 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
|
||||
from .const import DOMAIN
|
||||
from .host import ReolinkHost
|
||||
from .util import ReolinkConfigEntry
|
||||
from .util import get_host
|
||||
from .views import async_generate_playback_proxy_url
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -47,15 +47,6 @@ def res_name(stream: str) -> str:
|
||||
return "Low res."
|
||||
|
||||
|
||||
def get_host(hass: HomeAssistant, config_entry_id: str) -> ReolinkHost:
|
||||
"""Return the Reolink host from the config entry id."""
|
||||
config_entry: ReolinkConfigEntry | None = hass.config_entries.async_get_entry(
|
||||
config_entry_id
|
||||
)
|
||||
assert config_entry is not None
|
||||
return config_entry.runtime_data.host
|
||||
|
||||
|
||||
class ReolinkVODMediaSource(MediaSource):
|
||||
"""Provide Reolink camera VODs as media sources."""
|
||||
|
||||
@@ -90,22 +81,22 @@ class ReolinkVODMediaSource(MediaSource):
|
||||
|
||||
vod_type = get_vod_type()
|
||||
|
||||
if vod_type in [VodRequestType.DOWNLOAD, VodRequestType.PLAYBACK]:
|
||||
proxy_url = async_generate_playback_proxy_url(
|
||||
config_entry_id, channel, filename, stream_res, vod_type.value
|
||||
)
|
||||
return PlayMedia(proxy_url, "video/mp4")
|
||||
|
||||
mime_type, url = await host.api.get_vod_source(
|
||||
channel, filename, stream_res, vod_type
|
||||
)
|
||||
if _LOGGER.isEnabledFor(logging.DEBUG):
|
||||
url_log = url
|
||||
if "&user=" in url_log:
|
||||
url_log = f"{url_log.split('&user=')[0]}&user=xxxxx&password=xxxxx"
|
||||
elif "&token=" in url_log:
|
||||
url_log = f"{url_log.split('&token=')[0]}&token=xxxxx"
|
||||
_LOGGER.debug(
|
||||
"Opening VOD stream from %s: %s", host.api.camera_name(channel), url_log
|
||||
"Opening VOD stream from %s: %s",
|
||||
host.api.camera_name(channel),
|
||||
host.api.hide_password(url),
|
||||
)
|
||||
|
||||
if mime_type == "video/mp4":
|
||||
return PlayMedia(url, mime_type)
|
||||
|
||||
stream = create_stream(self.hass, url, {}, DynamicStreamSettings())
|
||||
stream.add_provider("hls", timeout=3600)
|
||||
stream_url: str = stream.endpoint_url("hls")
|
||||
|
||||
@@ -22,6 +22,7 @@ from reolink_aio.exceptions import (
|
||||
)
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.media_source import Unresolvable
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
@@ -51,6 +52,18 @@ def is_connected(hass: HomeAssistant, config_entry: config_entries.ConfigEntry)
|
||||
)
|
||||
|
||||
|
||||
def get_host(hass: HomeAssistant, config_entry_id: str) -> ReolinkHost:
|
||||
"""Return the Reolink host from the config entry id."""
|
||||
config_entry: ReolinkConfigEntry | None = hass.config_entries.async_get_entry(
|
||||
config_entry_id
|
||||
)
|
||||
if config_entry is None:
|
||||
raise Unresolvable(
|
||||
f"Could not find Reolink config entry id '{config_entry_id}'."
|
||||
)
|
||||
return config_entry.runtime_data.host
|
||||
|
||||
|
||||
def get_device_uid_and_ch(
|
||||
device: dr.DeviceEntry, host: ReolinkHost
|
||||
) -> tuple[list[str], int | None, bool]:
|
||||
@@ -69,7 +82,8 @@ def get_device_uid_and_ch(
|
||||
ch = int(device_uid[1][5:])
|
||||
is_chime = True
|
||||
else:
|
||||
ch = host.api.channel_for_uid(device_uid[1])
|
||||
device_uid_part = "_".join(device_uid[1:])
|
||||
ch = host.api.channel_for_uid(device_uid_part)
|
||||
return (device_uid, ch, is_chime)
|
||||
|
||||
|
||||
|
||||
147
homeassistant/components/reolink/views.py
Normal file
147
homeassistant/components/reolink/views.py
Normal file
@@ -0,0 +1,147 @@
|
||||
"""Reolink Integration views."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from base64 import urlsafe_b64decode, urlsafe_b64encode
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
|
||||
from aiohttp import ClientError, ClientTimeout, web
|
||||
from reolink_aio.enums import VodRequestType
|
||||
from reolink_aio.exceptions import ReolinkError
|
||||
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.components.media_source import Unresolvable
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.util.ssl import SSLCipherList
|
||||
|
||||
from .util import get_host
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
def async_generate_playback_proxy_url(
|
||||
config_entry_id: str, channel: int, filename: str, stream_res: str, vod_type: str
|
||||
) -> str:
|
||||
"""Generate proxy URL for event video."""
|
||||
|
||||
url_format = PlaybackProxyView.url
|
||||
return url_format.format(
|
||||
config_entry_id=config_entry_id,
|
||||
channel=channel,
|
||||
filename=urlsafe_b64encode(filename.encode("utf-8")).decode("utf-8"),
|
||||
stream_res=stream_res,
|
||||
vod_type=vod_type,
|
||||
)
|
||||
|
||||
|
||||
class PlaybackProxyView(HomeAssistantView):
|
||||
"""View to proxy playback video from Reolink."""
|
||||
|
||||
requires_auth = True
|
||||
url = "/api/reolink/video/{config_entry_id}/{channel}/{stream_res}/{vod_type}/{filename}"
|
||||
name = "api:reolink_playback"
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize a proxy view."""
|
||||
self.hass = hass
|
||||
self.session = async_get_clientsession(
|
||||
hass,
|
||||
verify_ssl=False,
|
||||
ssl_cipher=SSLCipherList.INSECURE,
|
||||
)
|
||||
|
||||
async def get(
|
||||
self,
|
||||
request: web.Request,
|
||||
config_entry_id: str,
|
||||
channel: str,
|
||||
stream_res: str,
|
||||
vod_type: str,
|
||||
filename: str,
|
||||
retry: int = 2,
|
||||
) -> web.StreamResponse:
|
||||
"""Get playback proxy video response."""
|
||||
retry = retry - 1
|
||||
|
||||
filename_decoded = urlsafe_b64decode(filename.encode("utf-8")).decode("utf-8")
|
||||
ch = int(channel)
|
||||
try:
|
||||
host = get_host(self.hass, config_entry_id)
|
||||
except Unresolvable:
|
||||
err_str = f"Reolink playback proxy could not find config entry id: {config_entry_id}"
|
||||
_LOGGER.warning(err_str)
|
||||
return web.Response(body=err_str, status=HTTPStatus.BAD_REQUEST)
|
||||
|
||||
try:
|
||||
mime_type, reolink_url = await host.api.get_vod_source(
|
||||
ch, filename_decoded, stream_res, VodRequestType(vod_type)
|
||||
)
|
||||
except ReolinkError as err:
|
||||
_LOGGER.warning("Reolink playback proxy error: %s", str(err))
|
||||
return web.Response(body=str(err), status=HTTPStatus.BAD_REQUEST)
|
||||
|
||||
if _LOGGER.isEnabledFor(logging.DEBUG):
|
||||
_LOGGER.debug(
|
||||
"Opening VOD stream from %s: %s",
|
||||
host.api.camera_name(ch),
|
||||
host.api.hide_password(reolink_url),
|
||||
)
|
||||
|
||||
try:
|
||||
reolink_response = await self.session.get(
|
||||
reolink_url,
|
||||
timeout=ClientTimeout(
|
||||
connect=15, sock_connect=15, sock_read=5, total=None
|
||||
),
|
||||
)
|
||||
except ClientError as err:
|
||||
err_str = host.api.hide_password(
|
||||
f"Reolink playback error while getting mp4: {err!s}"
|
||||
)
|
||||
if retry <= 0:
|
||||
_LOGGER.warning(err_str)
|
||||
return web.Response(body=err_str, status=HTTPStatus.BAD_REQUEST)
|
||||
_LOGGER.debug("%s, renewing token", err_str)
|
||||
await host.api.expire_session(unsubscribe=False)
|
||||
return await self.get(
|
||||
request, config_entry_id, channel, stream_res, vod_type, filename, retry
|
||||
)
|
||||
|
||||
# Reolink typo "apolication/octet-stream" instead of "application/octet-stream"
|
||||
if reolink_response.content_type not in [
|
||||
"video/mp4",
|
||||
"application/octet-stream",
|
||||
"apolication/octet-stream",
|
||||
]:
|
||||
err_str = f"Reolink playback expected video/mp4 but got {reolink_response.content_type}"
|
||||
_LOGGER.error(err_str)
|
||||
return web.Response(body=err_str, status=HTTPStatus.BAD_REQUEST)
|
||||
|
||||
response = web.StreamResponse(
|
||||
status=200,
|
||||
reason="OK",
|
||||
headers={
|
||||
"Content-Type": "video/mp4",
|
||||
},
|
||||
)
|
||||
|
||||
if reolink_response.content_length is not None:
|
||||
response.content_length = reolink_response.content_length
|
||||
|
||||
await response.prepare(request)
|
||||
|
||||
try:
|
||||
async for chunk in reolink_response.content.iter_chunked(65536):
|
||||
await response.write(chunk)
|
||||
except TimeoutError:
|
||||
_LOGGER.debug(
|
||||
"Timeout while reading Reolink playback from %s, writing EOF",
|
||||
host.api.nvr_name,
|
||||
)
|
||||
|
||||
reolink_response.release()
|
||||
await response.write_eof()
|
||||
return response
|
||||
@@ -9,7 +9,13 @@ from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from roborock import HomeDataRoom, RoborockException, RoborockInvalidCredentials
|
||||
from roborock import (
|
||||
HomeDataRoom,
|
||||
RoborockException,
|
||||
RoborockInvalidCredentials,
|
||||
RoborockInvalidUserAgreement,
|
||||
RoborockNoUserAgreement,
|
||||
)
|
||||
from roborock.containers import DeviceData, HomeDataDevice, HomeDataProduct, UserData
|
||||
from roborock.version_1_apis.roborock_mqtt_client_v1 import RoborockMqttClientV1
|
||||
from roborock.version_a01_apis import RoborockMqttClientA01
|
||||
@@ -60,12 +66,23 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_credentials",
|
||||
) from err
|
||||
except RoborockInvalidUserAgreement as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_user_agreement",
|
||||
) from err
|
||||
except RoborockNoUserAgreement as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_user_agreement",
|
||||
) from err
|
||||
except RoborockException as err:
|
||||
raise ConfigEntryNotReady(
|
||||
"Failed to get Roborock home data",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="home_data_fail",
|
||||
) from err
|
||||
|
||||
_LOGGER.debug("Got home data %s", home_data)
|
||||
all_devices: list[HomeDataDevice] = home_data.devices + home_data.received_devices
|
||||
device_map: dict[str, HomeDataDevice] = {
|
||||
|
||||
@@ -60,7 +60,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
username = user_input[CONF_USERNAME]
|
||||
await self.async_set_unique_id(username.lower())
|
||||
self._abort_if_unique_id_configured()
|
||||
self._abort_if_unique_id_configured(error="already_configured_account")
|
||||
self._username = username
|
||||
_LOGGER.debug("Requesting code for Roborock account")
|
||||
self._client = RoborockApiClient(username)
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"already_configured_account": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
}
|
||||
},
|
||||
@@ -422,6 +422,12 @@
|
||||
},
|
||||
"update_options_failed": {
|
||||
"message": "Failed to update Roborock options"
|
||||
},
|
||||
"invalid_user_agreement": {
|
||||
"message": "User agreement must be accepted again. Open your Roborock app and accept the agreement."
|
||||
},
|
||||
"no_user_agreement": {
|
||||
"message": "You have not valid user agreement. Open your Roborock app and accept the agreement."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -73,7 +73,6 @@ class SlideConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return {}
|
||||
|
||||
# API version 2 is not working, try API version 1 instead
|
||||
await slide.slide_del(user_input[CONF_HOST])
|
||||
await slide.slide_add(
|
||||
user_input[CONF_HOST],
|
||||
user_input.get(CONF_PASSWORD, ""),
|
||||
@@ -185,14 +184,15 @@ class SlideConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
await self.async_set_unique_id(self._mac)
|
||||
|
||||
self._abort_if_unique_id_configured(
|
||||
{CONF_HOST: discovery_info.host}, reload_on_update=True
|
||||
)
|
||||
ip = str(discovery_info.ip_address)
|
||||
_LOGGER.debug("Slide device discovered, ip %s", ip)
|
||||
|
||||
self._abort_if_unique_id_configured({CONF_HOST: ip}, reload_on_update=True)
|
||||
|
||||
errors = {}
|
||||
if errors := await self.async_test_connection(
|
||||
{
|
||||
CONF_HOST: self._host,
|
||||
CONF_HOST: ip,
|
||||
}
|
||||
):
|
||||
return self.async_abort(
|
||||
@@ -202,7 +202,7 @@ class SlideConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
)
|
||||
|
||||
self._host = discovery_info.host
|
||||
self._host = ip
|
||||
|
||||
return await self.async_step_zeroconf_confirm()
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/solax",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["solax"],
|
||||
"requirements": ["solax==3.2.1"]
|
||||
"requirements": ["solax==3.2.3"]
|
||||
}
|
||||
|
||||
@@ -331,9 +331,16 @@ class SQLSensor(ManualTriggerSensorEntity):
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
manufacturer="SQL",
|
||||
name=self.name,
|
||||
name=self._rendered.get(CONF_NAME),
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self) -> str | None:
|
||||
"""Name of the entity."""
|
||||
if self.has_entity_name:
|
||||
return self._attr_name
|
||||
return self._rendered.get(CONF_NAME)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Call when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@@ -115,6 +115,7 @@ async def build_item_response(
|
||||
item_type = CONTENT_TYPE_TO_CHILD_TYPE[search_type]
|
||||
|
||||
children = []
|
||||
list_playable = []
|
||||
for item in result["items"]:
|
||||
item_id = str(item["id"])
|
||||
item_thumbnail: str | None = None
|
||||
@@ -131,7 +132,7 @@ async def build_item_response(
|
||||
child_media_class = CONTENT_TYPE_MEDIA_CLASS[MediaType.ALBUM]
|
||||
can_expand = True
|
||||
can_play = True
|
||||
elif item["hasitems"]:
|
||||
elif item["hasitems"] and not item["isaudio"]:
|
||||
child_item_type = "Favorites"
|
||||
child_media_class = CONTENT_TYPE_MEDIA_CLASS["Favorites"]
|
||||
can_expand = True
|
||||
@@ -139,8 +140,8 @@ async def build_item_response(
|
||||
else:
|
||||
child_item_type = "Favorites"
|
||||
child_media_class = CONTENT_TYPE_MEDIA_CLASS[MediaType.TRACK]
|
||||
can_expand = False
|
||||
can_play = True
|
||||
can_expand = item["hasitems"]
|
||||
can_play = item["isaudio"] and item.get("url")
|
||||
|
||||
if artwork_track_id := item.get("artwork_track_id"):
|
||||
if internal_request:
|
||||
@@ -166,6 +167,7 @@ async def build_item_response(
|
||||
thumbnail=item_thumbnail,
|
||||
)
|
||||
)
|
||||
list_playable.append(can_play)
|
||||
|
||||
if children is None:
|
||||
raise BrowseError(f"Media not found: {search_type} / {search_id}")
|
||||
@@ -179,7 +181,7 @@ async def build_item_response(
|
||||
children_media_class=media_class["children"],
|
||||
media_content_id=search_id,
|
||||
media_content_type=search_type,
|
||||
can_play=search_type != "Favorites",
|
||||
can_play=any(list_playable),
|
||||
children=children,
|
||||
can_expand=True,
|
||||
)
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pysuez", "regex"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pysuezV2==1.3.5"]
|
||||
"requirements": ["pysuezV2==2.0.3"]
|
||||
}
|
||||
|
||||
@@ -85,6 +85,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) -
|
||||
|
||||
scopes = calls[0]["scopes"]
|
||||
region = calls[0]["region"]
|
||||
vehicle_metadata = calls[0]["vehicles"]
|
||||
products = calls[1]["response"]
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
@@ -102,7 +103,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) -
|
||||
)
|
||||
|
||||
for product in products:
|
||||
if "vin" in product and Scope.VEHICLE_DEVICE_DATA in scopes:
|
||||
if (
|
||||
"vin" in product
|
||||
and vehicle_metadata.get(product["vin"], {}).get("access")
|
||||
and Scope.VEHICLE_DEVICE_DATA in scopes
|
||||
):
|
||||
# Remove the protobuff 'cached_data' that we do not use to save memory
|
||||
product.pop("cached_data", None)
|
||||
vin = product["vin"]
|
||||
|
||||
@@ -300,5 +300,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/tplink",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["kasa"],
|
||||
"requirements": ["python-kasa[speedups]==0.9.0"]
|
||||
"requirements": ["python-kasa[speedups]==0.9.1"]
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
},
|
||||
"user_auth_confirm": {
|
||||
"title": "Authenticate",
|
||||
"description": "The device requires authentication, please input your TP-Link credentials below.",
|
||||
"description": "The device requires authentication, please input your TP-Link credentials below. Note, that both e-mail and password are case-sensitive.",
|
||||
"data": {
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["twentemilieu"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["twentemilieu==2.2.0"]
|
||||
"requirements": ["twentemilieu==2.2.1"]
|
||||
}
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["uiprotect", "unifi_discovery"],
|
||||
"requirements": ["uiprotect==7.1.0", "unifi-discovery==1.2.0"],
|
||||
"requirements": ["uiprotect==7.4.1", "unifi-discovery==1.2.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Ubiquiti Networks",
|
||||
|
||||
@@ -312,7 +312,7 @@ class StateVacuumEntity(
|
||||
@property
|
||||
def capability_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return capability attributes."""
|
||||
if VacuumEntityFeature.FAN_SPEED in self.supported_features:
|
||||
if VacuumEntityFeature.FAN_SPEED in self.supported_features_compat:
|
||||
return {ATTR_FAN_SPEED_LIST: self.fan_speed_list}
|
||||
return None
|
||||
|
||||
@@ -330,7 +330,7 @@ class StateVacuumEntity(
|
||||
def state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes of the vacuum cleaner."""
|
||||
data: dict[str, Any] = {}
|
||||
supported_features = self.supported_features
|
||||
supported_features = self.supported_features_compat
|
||||
|
||||
if VacuumEntityFeature.BATTERY in supported_features:
|
||||
data[ATTR_BATTERY_LEVEL] = self.battery_level
|
||||
@@ -369,6 +369,19 @@ class StateVacuumEntity(
|
||||
"""Flag vacuum cleaner features that are supported."""
|
||||
return self._attr_supported_features
|
||||
|
||||
@property
|
||||
def supported_features_compat(self) -> VacuumEntityFeature:
|
||||
"""Return the supported features as VacuumEntityFeature.
|
||||
|
||||
Remove this compatibility shim in 2025.1 or later.
|
||||
"""
|
||||
features = self.supported_features
|
||||
if type(features) is int: # noqa: E721
|
||||
new_features = VacuumEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
def stop(self, **kwargs: Any) -> None:
|
||||
"""Stop the vacuum cleaner."""
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -135,7 +135,18 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
in_use_platforms = []
|
||||
if hass.data[DOMAIN][VS_SWITCHES]:
|
||||
in_use_platforms.append(Platform.SWITCH)
|
||||
if hass.data[DOMAIN][VS_FANS]:
|
||||
in_use_platforms.append(Platform.FAN)
|
||||
if hass.data[DOMAIN][VS_LIGHTS]:
|
||||
in_use_platforms.append(Platform.LIGHT)
|
||||
if hass.data[DOMAIN][VS_SENSORS]:
|
||||
in_use_platforms.append(Platform.SENSOR)
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(
|
||||
entry, in_use_platforms
|
||||
)
|
||||
if unload_ok:
|
||||
hass.data.pop(DOMAIN)
|
||||
|
||||
|
||||
@@ -56,6 +56,7 @@ SKU_TO_BASE_DEVICE = {
|
||||
"LAP-V201S-WEU": "Vital200S", # Alt ID Model Vital200S
|
||||
"LAP-V201S-WUS": "Vital200S", # Alt ID Model Vital200S
|
||||
"LAP-V201-AUSR": "Vital200S", # Alt ID Model Vital200S
|
||||
"LAP-V201S-AEUR": "Vital200S", # Alt ID Model Vital200S
|
||||
"LAP-V201S-AUSR": "Vital200S", # Alt ID Model Vital200S
|
||||
"Vital100S": "Vital100S",
|
||||
"LAP-V102S-WUS": "Vital100S", # Alt ID Model Vital100S
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "For Origin and Destination, enter the address or the GPS coordinates of the location (GPS coordinates has to be separated by a comma). You can also enter an entity id which provides this information in its state, an entity id with latitude and longitude attributes, or zone friendly name.",
|
||||
"description": "For Origin and Destination, enter the address or the GPS coordinates of the location (GPS coordinates has to be separated by a comma). You can also enter an entity ID which provides this information in its state, an entity ID with latitude and longitude attributes, or zone friendly name.",
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"origin": "Origin",
|
||||
@@ -26,13 +26,13 @@
|
||||
"description": "Some options will allow you to force the integration to use a particular route or avoid a particular route in its time travel calculation.",
|
||||
"data": {
|
||||
"units": "Units",
|
||||
"vehicle_type": "Vehicle Type",
|
||||
"vehicle_type": "Vehicle type",
|
||||
"incl_filter": "Exact streetname which must be part of the selected route",
|
||||
"excl_filter": "Exact streetname which must NOT be part of the selected route",
|
||||
"realtime": "Realtime Travel Time?",
|
||||
"avoid_toll_roads": "Avoid Toll Roads?",
|
||||
"avoid_ferries": "Avoid Ferries?",
|
||||
"avoid_subscription_roads": "Avoid Roads Needing a Vignette / Subscription?"
|
||||
"realtime": "Realtime travel time?",
|
||||
"avoid_toll_roads": "Avoid toll roads?",
|
||||
"avoid_ferries": "Avoid ferries?",
|
||||
"avoid_subscription_roads": "Avoid roads needing a vignette / subscription?"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -47,8 +47,8 @@
|
||||
},
|
||||
"units": {
|
||||
"options": {
|
||||
"metric": "Metric System",
|
||||
"imperial": "Imperial System"
|
||||
"metric": "Metric system",
|
||||
"imperial": "Imperial system"
|
||||
}
|
||||
},
|
||||
"region": {
|
||||
@@ -63,8 +63,8 @@
|
||||
},
|
||||
"services": {
|
||||
"get_travel_times": {
|
||||
"name": "Get Travel Times",
|
||||
"description": "Get route alternatives and travel times between two locations.",
|
||||
"name": "Get travel times",
|
||||
"description": "Retrieves route alternatives and travel times between two locations.",
|
||||
"fields": {
|
||||
"origin": {
|
||||
"name": "[%key:component::waze_travel_time::config::step::user::data::origin%]",
|
||||
@@ -76,7 +76,7 @@
|
||||
},
|
||||
"region": {
|
||||
"name": "[%key:component::waze_travel_time::config::step::user::data::region%]",
|
||||
"description": "The region. Controls which waze server is used."
|
||||
"description": "The region. Controls which Waze server is used."
|
||||
},
|
||||
"units": {
|
||||
"name": "[%key:component::waze_travel_time::options::step::init::data::units%]",
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["whirlpool"],
|
||||
"requirements": ["whirlpool-sixth-sense==0.18.8"]
|
||||
"requirements": ["whirlpool-sixth-sense==0.18.11"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["holidays"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["holidays==0.63"]
|
||||
"requirements": ["holidays==0.64"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["zabbix_utils"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["zabbix-utils==2.0.1"]
|
||||
"requirements": ["zabbix-utils==2.0.2"]
|
||||
}
|
||||
|
||||
@@ -87,7 +87,7 @@ class ZHAEntity(LogMixin, RestoreEntity, Entity):
|
||||
manufacturer=zha_device_info[ATTR_MANUFACTURER],
|
||||
model=zha_device_info[ATTR_MODEL],
|
||||
name=zha_device_info[ATTR_NAME],
|
||||
via_device=(DOMAIN, zha_gateway.state.node_info.ieee),
|
||||
via_device=(DOMAIN, str(zha_gateway.state.node_info.ieee)),
|
||||
)
|
||||
|
||||
@callback
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"zha",
|
||||
"universal_silabs_flasher"
|
||||
],
|
||||
"requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.43"],
|
||||
"requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.45"],
|
||||
"usb": [
|
||||
{
|
||||
"vid": "10C4",
|
||||
|
||||
@@ -879,6 +879,12 @@
|
||||
},
|
||||
"regulator_set_point": {
|
||||
"name": "Regulator set point"
|
||||
},
|
||||
"detection_delay": {
|
||||
"name": "Detection delay"
|
||||
},
|
||||
"fading_time": {
|
||||
"name": "Fading time"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
@@ -1237,6 +1243,9 @@
|
||||
},
|
||||
"local_temperature_floor": {
|
||||
"name": "Floor temperature"
|
||||
},
|
||||
"self_test": {
|
||||
"name": "Self test result"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
|
||||
@@ -25,7 +25,7 @@ if TYPE_CHECKING:
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2025
|
||||
MINOR_VERSION: Final = 1
|
||||
PATCH_VERSION: Final = "0b5"
|
||||
PATCH_VERSION: Final = "2"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)
|
||||
|
||||
@@ -7,7 +7,7 @@ import asyncio
|
||||
from collections import deque
|
||||
from collections.abc import Callable, Coroutine, Iterable, Mapping
|
||||
import dataclasses
|
||||
from enum import Enum, auto
|
||||
from enum import Enum, IntFlag, auto
|
||||
import functools as ft
|
||||
import logging
|
||||
import math
|
||||
@@ -1639,6 +1639,31 @@ class Entity(
|
||||
self.hass, integration_domain=platform_name, module=type(self).__module__
|
||||
)
|
||||
|
||||
@callback
|
||||
def _report_deprecated_supported_features_values(
|
||||
self, replacement: IntFlag
|
||||
) -> None:
|
||||
"""Report deprecated supported features values."""
|
||||
if self._deprecated_supported_features_reported is True:
|
||||
return
|
||||
self._deprecated_supported_features_reported = True
|
||||
report_issue = self._suggest_report_issue()
|
||||
report_issue += (
|
||||
" and reference "
|
||||
"https://developers.home-assistant.io/blog/2023/12/28/support-feature-magic-numbers-deprecation"
|
||||
)
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"Entity %s (%s) is using deprecated supported features"
|
||||
" values which will be removed in HA Core 2025.1. Instead it should use"
|
||||
" %s, please %s"
|
||||
),
|
||||
self.entity_id,
|
||||
type(self),
|
||||
repr(replacement),
|
||||
report_issue,
|
||||
)
|
||||
|
||||
|
||||
class ToggleEntityDescription(EntityDescription, frozen_or_thawed=True):
|
||||
"""A class that describes toggle entities."""
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
from functools import cache
|
||||
from getpass import getuser
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
@@ -13,6 +12,7 @@ from homeassistant.const import __version__ as current_version
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.package import is_docker_env, is_virtual_env
|
||||
from homeassistant.util.system_info import is_official_image
|
||||
|
||||
from .hassio import is_hassio
|
||||
from .importlib import async_import_module
|
||||
@@ -23,12 +23,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
_DATA_MAC_VER = "system_info_mac_ver"
|
||||
|
||||
|
||||
@cache
|
||||
def is_official_image() -> bool:
|
||||
"""Return True if Home Assistant is running in an official container."""
|
||||
return os.path.isfile("/OFFICIAL_IMAGE")
|
||||
|
||||
|
||||
@singleton(_DATA_MAC_VER)
|
||||
async def async_get_mac_ver(hass: HomeAssistant) -> str:
|
||||
"""Return the macOS version."""
|
||||
|
||||
@@ -31,12 +31,12 @@ dbus-fast==2.24.3
|
||||
fnv-hash-fast==1.0.2
|
||||
go2rtc-client==0.1.2
|
||||
ha-ffmpeg==3.2.2
|
||||
habluetooth==3.6.0
|
||||
habluetooth==3.7.0
|
||||
hass-nabucasa==0.87.0
|
||||
hassil==2.1.0
|
||||
home-assistant-bluetooth==1.13.0
|
||||
home-assistant-frontend==20241231.0
|
||||
home-assistant-intents==2024.12.20
|
||||
home-assistant-frontend==20250109.0
|
||||
home-assistant-intents==2025.1.1
|
||||
httpx==0.27.2
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.5
|
||||
|
||||
@@ -15,6 +15,8 @@ from urllib.parse import urlparse
|
||||
|
||||
from packaging.requirements import InvalidRequirement, Requirement
|
||||
|
||||
from .system_info import is_official_image
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -28,8 +30,13 @@ def is_virtual_env() -> bool:
|
||||
|
||||
@cache
|
||||
def is_docker_env() -> bool:
|
||||
"""Return True if we run in a docker env."""
|
||||
return Path("/.dockerenv").exists()
|
||||
"""Return True if we run in a container env."""
|
||||
return (
|
||||
Path("/.dockerenv").exists()
|
||||
or Path("/run/.containerenv").exists()
|
||||
or "KUBERNETES_SERVICE_HOST" in os.environ
|
||||
or is_official_image()
|
||||
)
|
||||
|
||||
|
||||
def get_installed_versions(specifiers: set[str]) -> set[str]:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user