Compare commits

..

14 Commits

Author SHA1 Message Date
Franck Nijhof a4474b2794 Bump version to 2025.2.0b10 2025-02-05 12:26:27 +00:00
Erik Montnemery 72a69d7e41 Adjust backup filename scheme (#137424)
* Adjust backup filename scheme

* Update tests
2025-02-05 12:16:11 +00:00
Erik Montnemery e8314fb286 Adjust logic for per-backup agent encryption (#137420) 2025-02-05 12:16:07 +00:00
Erik Montnemery 30c099ef4e Allow creating backup if at least one agent is available (#137409) 2025-02-05 12:16:04 +00:00
Paulus Schoutsen c506c9080a Simplify llm calendar tool (#137402)
* Simplify calendar tool

* Clean up exposed entities
2025-02-05 12:16:01 +00:00
Brett Adams 79563f3746 Handle powerwall at zero percent in Tesla Fleet and Tessie (#137393)
* Handle powerwall zero

* Add missing value_fn call
2025-02-05 12:15:56 +00:00
Brett Adams 0764c7e773 Bump Tesla Fleet API to v0.9.8 (#137379)
* v0.9.7

* v0.9.8
2025-02-05 12:14:14 +00:00
J. Nick Koston fa83591148 Allow ignored Bluetooth adapters to be set up from the user flow (#137373) 2025-02-05 11:57:16 +00:00
J. Nick Koston df2b29aef1 Bump led-ble to 1.1.6 (#137369) 2025-02-05 11:57:13 +00:00
Jan-Philipp Benecke da8d300f29 Fix sqlalchemy deprecation warning that declarative_base has moved (#137360) 2025-02-05 11:57:09 +00:00
Marc Mueller 2c5fd4ee2a Update led-ble to 1.1.5 (#137347) 2025-02-05 11:57:06 +00:00
J. Nick Koston 16d9270833 Fix memory leak when unloading DataUpdateCoordinator (#137338)
* check wiz

* Fix memory leak when unloading DataUpdateCoordinator

fixes #137237

* handle namespace conflict

* handle namespace conflict

* address review comments
2025-02-05 11:57:02 +00:00
Erik Montnemery d8179dacc6 Report progress while restoring supervisor backup (#137313) 2025-02-05 11:56:56 +00:00
TimL 3dc075f287 Bump pysmlight to v0.1.7 (#137390) 2025-02-05 09:43:38 +01:00
30 changed files with 533 additions and 137 deletions
@@ -33,6 +33,7 @@ from .manager import (
ManagerBackup,
NewBackup,
RestoreBackupEvent,
RestoreBackupStage,
RestoreBackupState,
WrittenBackup,
)
@@ -61,6 +62,7 @@ __all__ = [
"ManagerBackup",
"NewBackup",
"RestoreBackupEvent",
"RestoreBackupStage",
"RestoreBackupState",
"WrittenBackup",
"async_get_manager",
+49 -18
View File
@@ -9,6 +9,7 @@ from dataclasses import dataclass, replace
from enum import StrEnum
import hashlib
import io
from itertools import chain
import json
from pathlib import Path, PurePath
import shutil
@@ -827,7 +828,7 @@ class BackupManager:
password=None,
)
await written_backup.release_stream()
self.known_backups.add(written_backup.backup, agent_errors)
self.known_backups.add(written_backup.backup, agent_errors, [])
return written_backup.backup.backup_id
async def async_create_backup(
@@ -951,12 +952,23 @@ class BackupManager:
with_automatic_settings: bool,
) -> NewBackup:
"""Initiate generating a backup."""
if not agent_ids:
raise BackupManagerError("At least one agent must be selected")
if invalid_agents := [
unavailable_agents = [
agent_id for agent_id in agent_ids if agent_id not in self.backup_agents
]:
raise BackupManagerError(f"Invalid agents selected: {invalid_agents}")
]
if not (
available_agents := [
agent_id for agent_id in agent_ids if agent_id in self.backup_agents
]
):
raise BackupManagerError(
f"At least one available backup agent must be selected, got {agent_ids}"
)
if unavailable_agents:
LOGGER.warning(
"Backup agents %s are not available, will backupp to %s",
unavailable_agents,
available_agents,
)
if include_all_addons and include_addons:
raise BackupManagerError(
"Cannot include all addons and specify specific addons"
@@ -973,7 +985,7 @@ class BackupManager:
new_backup,
self._backup_task,
) = await self._reader_writer.async_create_backup(
agent_ids=agent_ids,
agent_ids=available_agents,
backup_name=backup_name,
extra_metadata=extra_metadata
| {
@@ -992,7 +1004,9 @@ class BackupManager:
raise BackupManagerError(str(err)) from err
backup_finish_task = self._backup_finish_task = self.hass.async_create_task(
self._async_finish_backup(agent_ids, with_automatic_settings, password),
self._async_finish_backup(
available_agents, unavailable_agents, with_automatic_settings, password
),
name="backup_manager_finish_backup",
)
if not raise_task_error:
@@ -1009,7 +1023,11 @@ class BackupManager:
return new_backup
async def _async_finish_backup(
self, agent_ids: list[str], with_automatic_settings: bool, password: str | None
self,
available_agents: list[str],
unavailable_agents: list[str],
with_automatic_settings: bool,
password: str | None,
) -> None:
"""Finish a backup."""
if TYPE_CHECKING:
@@ -1028,7 +1046,7 @@ class BackupManager:
LOGGER.debug(
"Generated new backup with backup_id %s, uploading to agents %s",
written_backup.backup.backup_id,
agent_ids,
available_agents,
)
self.async_on_backup_event(
CreateBackupEvent(
@@ -1041,13 +1059,15 @@ class BackupManager:
try:
agent_errors = await self._async_upload_backup(
backup=written_backup.backup,
agent_ids=agent_ids,
agent_ids=available_agents,
open_stream=written_backup.open_stream,
password=password,
)
finally:
await written_backup.release_stream()
self.known_backups.add(written_backup.backup, agent_errors)
self.known_backups.add(
written_backup.backup, agent_errors, unavailable_agents
)
if not agent_errors:
if with_automatic_settings:
# create backup was successful, update last_completed_automatic_backup
@@ -1056,7 +1076,7 @@ class BackupManager:
backup_success = True
if with_automatic_settings:
self._update_issue_after_agent_upload(agent_errors)
self._update_issue_after_agent_upload(agent_errors, unavailable_agents)
# delete old backups more numerous than copies
# try this regardless of agent errors above
await delete_backups_exceeding_configured_count(self)
@@ -1216,10 +1236,10 @@ class BackupManager:
)
def _update_issue_after_agent_upload(
self, agent_errors: dict[str, Exception]
self, agent_errors: dict[str, Exception], unavailable_agents: list[str]
) -> None:
"""Update issue registry after a backup is uploaded to agents."""
if not agent_errors:
if not agent_errors and not unavailable_agents:
ir.async_delete_issue(self.hass, DOMAIN, "automatic_backup_failed")
return
ir.async_create_issue(
@@ -1233,7 +1253,13 @@ class BackupManager:
translation_key="automatic_backup_failed_upload_agents",
translation_placeholders={
"failed_agents": ", ".join(
self.backup_agents[agent_id].name for agent_id in agent_errors
chain(
(
self.backup_agents[agent_id].name
for agent_id in agent_errors
),
unavailable_agents,
)
)
},
)
@@ -1302,11 +1328,12 @@ class KnownBackups:
self,
backup: AgentBackup,
agent_errors: dict[str, Exception],
unavailable_agents: list[str],
) -> None:
"""Add a backup."""
self._backups[backup.backup_id] = KnownBackup(
backup_id=backup.backup_id,
failed_agent_ids=list(agent_errors),
failed_agent_ids=list(chain(agent_errors, unavailable_agents)),
)
self._manager.store.save()
@@ -1412,7 +1439,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
manager = self._hass.data[DATA_MANAGER]
agent_config = manager.config.data.agents.get(self._local_agent_id)
if agent_config and not agent_config.protected:
if (
self._local_agent_id in agent_ids
and agent_config
and not agent_config.protected
):
password = None
backup = AgentBackup(
+1 -1
View File
@@ -122,7 +122,7 @@ def read_backup(backup_path: Path) -> AgentBackup:
def suggested_filename_from_name_date(name: str, date_str: str) -> str:
"""Suggest a filename for the backup."""
date = dt_util.parse_datetime(date_str, raise_on_error=True)
return "_".join(f"{name} - {date.strftime('%Y-%m-%d %H.%M %S%f')}.tar".split())
return "_".join(f"{name} {date.strftime('%Y-%m-%d %H.%M %S%f')}.tar".split())
def suggested_filename(backup: AgentBackup) -> str:
@@ -140,7 +140,7 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
title=adapter_title(adapter, details), data={}
)
configured_addresses = self._async_current_ids()
configured_addresses = self._async_current_ids(include_ignore=False)
bluetooth_adapters = get_adapters()
await bluetooth_adapters.refresh()
self._adapters = bluetooth_adapters.adapters
@@ -155,12 +155,8 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
and not (system == "Linux" and details[ADAPTER_ADDRESS] == DEFAULT_ADDRESS)
]
if not unconfigured_adapters:
ignored_adapters = len(
self._async_current_entries(include_ignore=True)
) - len(self._async_current_entries(include_ignore=False))
return self.async_abort(
reason="no_adapters",
description_placeholders={"ignored_adapters": str(ignored_adapters)},
)
if len(unconfigured_adapters) == 1:
self._adapter = list(self._adapters)[0]
@@ -23,7 +23,7 @@
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"no_adapters": "No unconfigured Bluetooth adapters found. There are {ignored_adapters} ignored adapters."
"no_adapters": "No unconfigured Bluetooth adapters found."
}
},
"options": {
+26 -6
View File
@@ -39,6 +39,7 @@ from homeassistant.components.backup import (
ManagerBackup,
NewBackup,
RestoreBackupEvent,
RestoreBackupStage,
RestoreBackupState,
WrittenBackup,
async_get_manager as async_get_backup_manager,
@@ -548,6 +549,14 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
@callback
def on_job_progress(data: Mapping[str, Any]) -> None:
"""Handle backup restore progress."""
if not (stage := try_parse_enum(RestoreBackupStage, data.get("stage"))):
_LOGGER.debug("Unknown restore stage: %s", data.get("stage"))
else:
on_progress(
RestoreBackupEvent(
reason=None, stage=stage, state=RestoreBackupState.IN_PROGRESS
)
)
if data.get("done") is True:
restore_complete.set()
restore_errors.extend(data.get("errors", []))
@@ -574,15 +583,26 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
_LOGGER.debug("Found restore job ID %s in environment", restore_job_id)
sent_event = False
@callback
def on_job_progress(data: Mapping[str, Any]) -> None:
"""Handle backup restore progress."""
nonlocal sent_event
if not (stage := try_parse_enum(RestoreBackupStage, data.get("stage"))):
_LOGGER.debug("Unknown restore stage: %s", data.get("stage"))
if data.get("done") is not True:
on_progress(
RestoreBackupEvent(
reason="", stage=None, state=RestoreBackupState.IN_PROGRESS
if stage or not sent_event:
sent_event = True
on_progress(
RestoreBackupEvent(
reason=None,
stage=stage,
state=RestoreBackupState.IN_PROGRESS,
)
)
)
return
restore_errors = data.get("errors", [])
@@ -592,14 +612,14 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
on_progress(
RestoreBackupEvent(
reason="unknown_error",
stage=None,
stage=stage,
state=RestoreBackupState.FAILED,
)
)
else:
on_progress(
RestoreBackupEvent(
reason="", stage=None, state=RestoreBackupState.COMPLETED
reason=None, stage=stage, state=RestoreBackupState.COMPLETED
)
)
on_progress(IdleEvent())
@@ -35,5 +35,5 @@
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/led_ble",
"iot_class": "local_polling",
"requirements": ["bluetooth-data-tools==1.23.3", "led-ble==1.1.4"]
"requirements": ["bluetooth-data-tools==1.23.3", "led-ble==1.1.6"]
}
@@ -35,13 +35,13 @@ class StatelessAssistAPI(llm.AssistAPI):
"""Return the prompt for the exposed entities."""
prompt = []
if exposed_entities:
if exposed_entities and exposed_entities["entities"]:
prompt.append(
"An overview of the areas and the devices in this smart home:"
)
entities = [
{k: v for k, v in entity_info.items() if k in EXPOSED_ENTITY_FIELDS}
for entity_info in exposed_entities.values()
for entity_info in exposed_entities["entities"].values()
]
prompt.append(yaml_util.dump(list(entities)))
@@ -11,7 +11,7 @@
"documentation": "https://www.home-assistant.io/integrations/smlight",
"integration_type": "device",
"iot_class": "local_push",
"requirements": ["pysmlight==0.1.6"],
"requirements": ["pysmlight==0.1.7"],
"zeroconf": [
{
"type": "_slzb-06._tcp.local."
@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/tesla_fleet",
"iot_class": "cloud_polling",
"loggers": ["tesla-fleet-api"],
"requirements": ["tesla-fleet-api==0.9.6"]
"requirements": ["tesla-fleet-api==0.9.8"]
}
+15 -14
View File
@@ -303,8 +303,8 @@ VEHICLE_TIME_DESCRIPTIONS: tuple[TeslaFleetTimeEntityDescription, ...] = (
),
)
ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
ENERGY_LIVE_DESCRIPTIONS: tuple[TeslaFleetSensorEntityDescription, ...] = (
TeslaFleetSensorEntityDescription(
key="solar_power",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
@@ -312,7 +312,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
suggested_display_precision=2,
device_class=SensorDeviceClass.POWER,
),
SensorEntityDescription(
TeslaFleetSensorEntityDescription(
key="energy_left",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
@@ -321,7 +321,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
device_class=SensorDeviceClass.ENERGY_STORAGE,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
TeslaFleetSensorEntityDescription(
key="total_pack_energy",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
@@ -331,14 +331,15 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
TeslaFleetSensorEntityDescription(
key="percentage_charged",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.BATTERY,
suggested_display_precision=2,
value_fn=lambda value: value or 0,
),
SensorEntityDescription(
TeslaFleetSensorEntityDescription(
key="battery_power",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
@@ -346,7 +347,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
suggested_display_precision=2,
device_class=SensorDeviceClass.POWER,
),
SensorEntityDescription(
TeslaFleetSensorEntityDescription(
key="load_power",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
@@ -354,7 +355,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
suggested_display_precision=2,
device_class=SensorDeviceClass.POWER,
),
SensorEntityDescription(
TeslaFleetSensorEntityDescription(
key="grid_power",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
@@ -362,7 +363,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
suggested_display_precision=2,
device_class=SensorDeviceClass.POWER,
),
SensorEntityDescription(
TeslaFleetSensorEntityDescription(
key="grid_services_power",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
@@ -370,7 +371,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
suggested_display_precision=2,
device_class=SensorDeviceClass.POWER,
),
SensorEntityDescription(
TeslaFleetSensorEntityDescription(
key="generator_power",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
@@ -379,7 +380,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
device_class=SensorDeviceClass.POWER,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
TeslaFleetSensorEntityDescription(
key="island_status",
options=[
"island_status_unknown",
@@ -550,12 +551,12 @@ class TeslaFleetVehicleTimeSensorEntity(TeslaFleetVehicleEntity, SensorEntity):
class TeslaFleetEnergyLiveSensorEntity(TeslaFleetEnergyLiveEntity, SensorEntity):
"""Base class for Tesla Fleet energy site metric sensors."""
entity_description: SensorEntityDescription
entity_description: TeslaFleetSensorEntityDescription
def __init__(
self,
data: TeslaFleetEnergyData,
description: SensorEntityDescription,
description: TeslaFleetSensorEntityDescription,
) -> None:
"""Initialize the sensor."""
self.entity_description = description
@@ -563,7 +564,7 @@ class TeslaFleetEnergyLiveSensorEntity(TeslaFleetEnergyLiveEntity, SensorEntity)
def _async_update_attrs(self) -> None:
"""Update the attributes of the sensor."""
self._attr_native_value = self._value
self._attr_native_value = self.entity_description.value_fn(self._value)
class TeslaFleetEnergyHistorySensorEntity(TeslaFleetEnergyHistoryEntity, SensorEntity):
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/teslemetry",
"iot_class": "cloud_polling",
"loggers": ["tesla-fleet-api"],
"requirements": ["tesla-fleet-api==0.9.6", "teslemetry-stream==0.6.6"]
"requirements": ["tesla-fleet-api==0.9.8", "teslemetry-stream==0.6.6"]
}
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/tessie",
"iot_class": "cloud_polling",
"loggers": ["tessie", "tesla-fleet-api"],
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.9.6"]
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.9.8"]
}
@@ -258,6 +258,7 @@ DESCRIPTIONS: tuple[TessieSensorEntityDescription, ...] = (
),
)
ENERGY_LIVE_DESCRIPTIONS: tuple[TessieSensorEntityDescription, ...] = (
TessieSensorEntityDescription(
key="solar_power",
@@ -292,6 +293,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[TessieSensorEntityDescription, ...] = (
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.BATTERY,
suggested_display_precision=2,
value_fn=lambda value: value or 0,
),
TessieSensorEntityDescription(
key="battery_power",
+1 -1
View File
@@ -25,7 +25,7 @@ if TYPE_CHECKING:
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2025
MINOR_VERSION: Final = 2
PATCH_VERSION: Final = "0b9"
PATCH_VERSION: Final = "0b10"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 0)
+41 -28
View File
@@ -326,7 +326,7 @@ class AssistAPI(API):
def _async_get_api_prompt(
self, llm_context: LLMContext, exposed_entities: dict | None
) -> str:
if not exposed_entities:
if not exposed_entities or not exposed_entities["entities"]:
return (
"Only if the user wants to control a device, tell them to expose entities "
"to their voice assistant in Home Assistant."
@@ -389,11 +389,11 @@ class AssistAPI(API):
"""Return the prompt for the API for exposed entities."""
prompt = []
if exposed_entities:
if exposed_entities and exposed_entities["entities"]:
prompt.append(
"An overview of the areas and the devices in this smart home:"
)
prompt.append(yaml_util.dump(list(exposed_entities.values())))
prompt.append(yaml_util.dump(list(exposed_entities["entities"].values())))
return prompt
@@ -425,8 +425,9 @@ class AssistAPI(API):
exposed_domains: set[str] | None = None
if exposed_entities is not None:
exposed_domains = {
split_entity_id(entity_id)[0] for entity_id in exposed_entities
info["domain"] for info in exposed_entities["entities"].values()
}
intent_handlers = [
intent_handler
for intent_handler in intent_handlers
@@ -438,25 +439,29 @@ class AssistAPI(API):
IntentTool(self.cached_slugify(intent_handler.intent_type), intent_handler)
for intent_handler in intent_handlers
]
if exposed_domains and CALENDAR_DOMAIN in exposed_domains:
tools.append(CalendarGetEventsTool())
if llm_context.assistant is not None:
for state in self.hass.states.async_all(SCRIPT_DOMAIN):
if not async_should_expose(
self.hass, llm_context.assistant, state.entity_id
):
continue
if exposed_entities:
if exposed_entities[CALENDAR_DOMAIN]:
names = []
for info in exposed_entities[CALENDAR_DOMAIN].values():
names.extend(info["names"].split(", "))
tools.append(CalendarGetEventsTool(names))
tools.append(ScriptTool(self.hass, state.entity_id))
tools.extend(
ScriptTool(self.hass, script_entity_id)
for script_entity_id in exposed_entities[SCRIPT_DOMAIN]
)
return tools
def _get_exposed_entities(
hass: HomeAssistant, assistant: str
) -> dict[str, dict[str, Any]]:
"""Get exposed entities."""
) -> dict[str, dict[str, dict[str, Any]]]:
"""Get exposed entities.
Splits out calendars and scripts.
"""
area_registry = ar.async_get(hass)
entity_registry = er.async_get(hass)
device_registry = dr.async_get(hass)
@@ -477,12 +482,13 @@ def _get_exposed_entities(
}
entities = {}
data: dict[str, dict[str, Any]] = {
SCRIPT_DOMAIN: {},
CALENDAR_DOMAIN: {},
}
for state in hass.states.async_all():
if (
not async_should_expose(hass, assistant, state.entity_id)
or state.domain == SCRIPT_DOMAIN
):
if not async_should_expose(hass, assistant, state.entity_id):
continue
description: str | None = None
@@ -529,9 +535,13 @@ def _get_exposed_entities(
}:
info["attributes"] = attributes
entities[state.entity_id] = info
if state.domain in data:
data[state.domain][state.entity_id] = info
else:
entities[state.entity_id] = info
return entities
data["entities"] = entities
return data
def _selector_serializer(schema: Any) -> Any: # noqa: C901
@@ -813,15 +823,18 @@ class CalendarGetEventsTool(Tool):
name = "calendar_get_events"
description = (
"Get events from a calendar. "
"When asked when something happens, search the whole week. "
"When asked if something happens, search the whole week. "
"Results are RFC 5545 which means 'end' is exclusive."
)
parameters = vol.Schema(
{
vol.Required("calendar"): cv.string,
vol.Required("range"): vol.In(["today", "week"]),
}
)
def __init__(self, calendars: list[str]) -> None:
"""Init the get events tool."""
self.parameters = vol.Schema(
{
vol.Required("calendar"): vol.In(calendars),
vol.Required("range"): vol.In(["today", "week"]),
}
)
async def async_call(
self, hass: HomeAssistant, tool_input: ToolInput, llm_context: LLMContext
+18 -11
View File
@@ -6,6 +6,7 @@ from abc import abstractmethod
import asyncio
from collections.abc import Awaitable, Callable, Coroutine, Generator
from datetime import datetime, timedelta
from functools import partial
import logging
from random import randint
from time import monotonic
@@ -103,7 +104,8 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]):
randint(event.RANDOM_MICROSECOND_MIN, event.RANDOM_MICROSECOND_MAX) / 10**6
)
self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {}
self._listeners: dict[int, tuple[CALLBACK_TYPE, object | None]] = {}
self._last_listener_id: int = 0
self._unsub_refresh: CALLBACK_TYPE | None = None
self._unsub_shutdown: CALLBACK_TYPE | None = None
self._request_refresh_task: asyncio.TimerHandle | None = None
@@ -148,21 +150,26 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]):
) -> Callable[[], None]:
"""Listen for data updates."""
schedule_refresh = not self._listeners
@callback
def remove_listener() -> None:
"""Remove update listener."""
self._listeners.pop(remove_listener)
if not self._listeners:
self._unschedule_refresh()
self._listeners[remove_listener] = (update_callback, context)
self._last_listener_id += 1
self._listeners[self._last_listener_id] = (update_callback, context)
# This is the first listener, set up interval.
if schedule_refresh:
self._schedule_refresh()
return remove_listener
return partial(self.__async_remove_listener_internal, self._last_listener_id)
@callback
def __async_remove_listener_internal(self, listener_id: int) -> None:
"""Remove a listener.
This is an internal function that is not to be overridden
in subclasses as it may change in the future.
"""
self._listeners.pop(listener_id)
if not self._listeners:
self._unschedule_refresh()
self._debounced_refresh.async_cancel()
@callback
def async_update_listeners(self) -> None:
+1 -1
View File
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2025.2.0b9"
version = "2025.2.0b10"
license = {text = "Apache-2.0"}
description = "Open-source home automation platform running on Python 3."
readme = "README.rst"
+3 -3
View File
@@ -1299,7 +1299,7 @@ ld2410-ble==0.1.1
leaone-ble==0.1.0
# homeassistant.components.led_ble
led-ble==1.1.4
led-ble==1.1.6
# homeassistant.components.lektrico
lektricowifi==0.0.43
@@ -2310,7 +2310,7 @@ pysmarty2==0.10.1
pysml==0.0.12
# homeassistant.components.smlight
pysmlight==0.1.6
pysmlight==0.1.7
# homeassistant.components.snmp
pysnmp==6.2.6
@@ -2854,7 +2854,7 @@ temperusb==1.6.1
# homeassistant.components.tesla_fleet
# homeassistant.components.teslemetry
# homeassistant.components.tessie
tesla-fleet-api==0.9.6
tesla-fleet-api==0.9.8
# homeassistant.components.powerwall
tesla-powerwall==0.5.2
+3 -3
View File
@@ -1098,7 +1098,7 @@ ld2410-ble==0.1.1
leaone-ble==0.1.0
# homeassistant.components.led_ble
led-ble==1.1.4
led-ble==1.1.6
# homeassistant.components.lektrico
lektricowifi==0.0.43
@@ -1882,7 +1882,7 @@ pysmarty2==0.10.1
pysml==0.0.12
# homeassistant.components.smlight
pysmlight==0.1.6
pysmlight==0.1.7
# homeassistant.components.snmp
pysnmp==6.2.6
@@ -2294,7 +2294,7 @@ temperusb==1.6.1
# homeassistant.components.tesla_fleet
# homeassistant.components.teslemetry
# homeassistant.components.tessie
tesla-fleet-api==0.9.6
tesla-fleet-api==0.9.8
# homeassistant.components.powerwall
tesla-powerwall==0.5.2
+1 -3
View File
@@ -103,9 +103,7 @@ async def test_upload(
assert resp.status == 201
assert open_mock.call_count == 1
assert move_mock.call_count == 1
assert (
move_mock.mock_calls[0].args[1].name == "Test_-_1970-01-01_00.00_00000000.tar"
)
assert move_mock.mock_calls[0].args[1].name == "Test_1970-01-01_00.00_00000000.tar"
@pytest.mark.usefixtures("read_backup")
+129 -13
View File
@@ -46,6 +46,7 @@ from homeassistant.components.backup.manager import (
RestoreBackupState,
WrittenBackup,
)
from homeassistant.components.backup.util import password_to_key
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import issue_registry as ir
@@ -359,8 +360,14 @@ async def test_create_backup_when_busy(
@pytest.mark.parametrize(
("parameters", "expected_error"),
[
({"agent_ids": []}, "At least one agent must be selected"),
({"agent_ids": ["non_existing"]}, "Invalid agents selected: ['non_existing']"),
(
{"agent_ids": []},
"At least one available backup agent must be selected, got []",
),
(
{"agent_ids": ["non_existing"]},
"At least one available backup agent must be selected, got ['non_existing']",
),
(
{"include_addons": ["ssl"], "include_all_addons": True},
"Cannot include all addons and specify specific addons",
@@ -410,6 +417,8 @@ async def test_create_backup_wrong_parameters(
"name",
"expected_name",
"expected_filename",
"expected_agent_ids",
"expected_failed_agent_ids",
"temp_file_unlink_call_count",
),
[
@@ -418,7 +427,9 @@ async def test_create_backup_wrong_parameters(
"backups",
None,
"Custom backup 2025.1.0",
"Custom_backup_2025.1.0_-_2025-01-30_05.42_12345678.tar",
"Custom_backup_2025.1.0_2025-01-30_05.42_12345678.tar",
[LOCAL_AGENT_ID],
[],
0,
),
(
@@ -427,6 +438,8 @@ async def test_create_backup_wrong_parameters(
None,
"Custom backup 2025.1.0",
"abc123.tar", # We don't use friendly name for temporary backups
["test.remote"],
[],
1,
),
(
@@ -434,7 +447,9 @@ async def test_create_backup_wrong_parameters(
"backups",
None,
"Custom backup 2025.1.0",
"Custom_backup_2025.1.0_-_2025-01-30_05.42_12345678.tar",
"Custom_backup_2025.1.0_2025-01-30_05.42_12345678.tar",
[LOCAL_AGENT_ID, "test.remote"],
[],
0,
),
(
@@ -442,7 +457,9 @@ async def test_create_backup_wrong_parameters(
"backups",
"custom_name",
"custom_name",
"custom_name_-_2025-01-30_05.42_12345678.tar",
"custom_name_2025-01-30_05.42_12345678.tar",
[LOCAL_AGENT_ID],
[],
0,
),
(
@@ -451,6 +468,8 @@ async def test_create_backup_wrong_parameters(
"custom_name",
"custom_name",
"abc123.tar", # We don't use friendly name for temporary backups
["test.remote"],
[],
1,
),
(
@@ -458,7 +477,20 @@ async def test_create_backup_wrong_parameters(
"backups",
"custom_name",
"custom_name",
"custom_name_-_2025-01-30_05.42_12345678.tar",
"custom_name_2025-01-30_05.42_12345678.tar",
[LOCAL_AGENT_ID, "test.remote"],
[],
0,
),
(
# Test we create a backup when at least one agent is available
[LOCAL_AGENT_ID, "test.unavailable"],
"backups",
"custom_name",
"custom_name",
"custom_name_2025-01-30_05.42_12345678.tar",
[LOCAL_AGENT_ID],
["test.unavailable"],
0,
),
],
@@ -486,6 +518,8 @@ async def test_initiate_backup(
name: str | None,
expected_name: str,
expected_filename: str,
expected_agent_ids: list[str],
expected_failed_agent_ids: list[str],
temp_file_unlink_call_count: int,
) -> None:
"""Test generate backup."""
@@ -620,13 +654,13 @@ async def test_initiate_backup(
"addons": [],
"agents": {
agent_id: {"protected": bool(password), "size": ANY}
for agent_id in agent_ids
for agent_id in expected_agent_ids
},
"backup_id": backup_id,
"database_included": include_database,
"date": ANY,
"extra_metadata": {"instance_id": "our_uuid", "with_automatic_settings": False},
"failed_agent_ids": [],
"failed_agent_ids": expected_failed_agent_ids,
"folders": [],
"homeassistant_included": True,
"homeassistant_version": "2025.1.0",
@@ -959,6 +993,7 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
@pytest.mark.parametrize(
(
"automatic_agents",
"create_backup_command",
"create_backup_side_effect",
"agent_upload_side_effect",
@@ -968,6 +1003,7 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
[
# No error
(
["test.remote"],
{"type": "backup/generate", "agent_ids": ["test.remote"]},
None,
None,
@@ -975,14 +1011,38 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
{},
),
(
["test.remote"],
{"type": "backup/generate_with_automatic_settings"},
None,
None,
True,
{},
),
# One agent unavailable
(
["test.remote", "test.unknown"],
{"type": "backup/generate", "agent_ids": ["test.remote", "test.unknown"]},
None,
None,
True,
{},
),
(
["test.remote", "test.unknown"],
{"type": "backup/generate_with_automatic_settings"},
None,
None,
True,
{
(DOMAIN, "automatic_backup_failed"): {
"translation_key": "automatic_backup_failed_upload_agents",
"translation_placeholders": {"failed_agents": "test.unknown"},
}
},
),
# Error raised in async_initiate_backup
(
["test.remote"],
{"type": "backup/generate", "agent_ids": ["test.remote"]},
Exception("Boom!"),
None,
@@ -990,6 +1050,7 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
{},
),
(
["test.remote"],
{"type": "backup/generate_with_automatic_settings"},
Exception("Boom!"),
None,
@@ -1003,6 +1064,7 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
),
# Error raised when awaiting the backup task
(
["test.remote"],
{"type": "backup/generate", "agent_ids": ["test.remote"]},
delayed_boom,
None,
@@ -1010,6 +1072,7 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
{},
),
(
["test.remote"],
{"type": "backup/generate_with_automatic_settings"},
delayed_boom,
None,
@@ -1023,6 +1086,7 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
),
# Error raised in async_upload_backup
(
["test.remote"],
{"type": "backup/generate", "agent_ids": ["test.remote"]},
None,
Exception("Boom!"),
@@ -1030,6 +1094,7 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
{},
),
(
["test.remote"],
{"type": "backup/generate_with_automatic_settings"},
None,
Exception("Boom!"),
@@ -1047,6 +1112,7 @@ async def test_create_backup_failure_raises_issue(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
create_backup: AsyncMock,
automatic_agents: list[str],
create_backup_command: dict[str, Any],
create_backup_side_effect: Exception | None,
agent_upload_side_effect: Exception | None,
@@ -1077,7 +1143,7 @@ async def test_create_backup_failure_raises_issue(
await ws_client.send_json_auto_id(
{
"type": "backup/config/update",
"create_backup": {"agent_ids": ["test.remote"]},
"create_backup": {"agent_ids": automatic_agents},
}
)
result = await ws_client.receive_json()
@@ -1611,7 +1677,7 @@ async def test_exception_platform_post(hass: HomeAssistant) -> None:
"agent_id=backup.local&agent_id=test.remote",
2,
1,
["Test_-_1970-01-01_00.00_00000000.tar"],
["Test_1970-01-01_00.00_00000000.tar"],
{TEST_BACKUP_ABC123.backup_id: TEST_BACKUP_ABC123},
b"test",
0,
@@ -1620,7 +1686,7 @@ async def test_exception_platform_post(hass: HomeAssistant) -> None:
"agent_id=backup.local",
1,
1,
["Test_-_1970-01-01_00.00_00000000.tar"],
["Test_1970-01-01_00.00_00000000.tar"],
{},
None,
0,
@@ -3142,17 +3208,21 @@ async def test_restore_backup_file_error(
@pytest.mark.parametrize(
("commands", "password", "protected_backup"),
("commands", "agent_ids", "password", "protected_backup", "inner_tar_key"),
[
(
[],
["backup.local", "test.remote"],
None,
{"backup.local": False, "test.remote": False},
None,
),
(
[],
["backup.local", "test.remote"],
"hunter2",
{"backup.local": True, "test.remote": True},
password_to_key("hunter2"),
),
(
[
@@ -3164,8 +3234,10 @@ async def test_restore_backup_file_error(
},
}
],
["backup.local", "test.remote"],
"hunter2",
{"backup.local": False, "test.remote": False},
None, # None of the agents are protected
),
(
[
@@ -3177,8 +3249,10 @@ async def test_restore_backup_file_error(
},
}
],
["backup.local", "test.remote"],
"hunter2",
{"backup.local": False, "test.remote": True},
None, # Local agent is not protected
),
(
[
@@ -3190,8 +3264,10 @@ async def test_restore_backup_file_error(
},
}
],
["backup.local", "test.remote"],
"hunter2",
{"backup.local": True, "test.remote": False},
password_to_key("hunter2"), # Local agent is protected
),
(
[
@@ -3203,8 +3279,10 @@ async def test_restore_backup_file_error(
},
}
],
["backup.local", "test.remote"],
"hunter2",
{"backup.local": True, "test.remote": True},
password_to_key("hunter2"),
),
(
[
@@ -3216,8 +3294,40 @@ async def test_restore_backup_file_error(
},
}
],
["backup.local", "test.remote"],
None,
{"backup.local": False, "test.remote": False},
None, # No password supplied
),
(
[
{
"type": "backup/config/update",
"agents": {
"backup.local": {"protected": False},
"test.remote": {"protected": True},
},
}
],
["test.remote"],
"hunter2",
{"test.remote": True},
password_to_key("hunter2"),
),
(
[
{
"type": "backup/config/update",
"agents": {
"backup.local": {"protected": False},
"test.remote": {"protected": False},
},
}
],
["test.remote"],
"hunter2",
{"test.remote": False},
password_to_key("hunter2"), # Temporary backup protected when password set
),
],
)
@@ -3226,13 +3336,15 @@ async def test_initiate_backup_per_agent_encryption(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
generate_backup_id: MagicMock,
mocked_tarfile: Mock,
path_glob: MagicMock,
commands: dict[str, Any],
agent_ids: list[str],
password: str | None,
protected_backup: dict[str, bool],
inner_tar_key: bytes | None,
) -> None:
"""Test generate backup where encryption is selectively set on agents."""
agent_ids = ["backup.local", "test.remote"]
local_agent = local_backup_platform.CoreLocalBackupAgent(hass)
remote_agent = BackupAgentTest("remote", backups=[])
@@ -3308,6 +3420,10 @@ async def test_initiate_backup_per_agent_encryption(
await hass.async_block_till_done()
mocked_tarfile.return_value.create_inner_tar.assert_called_once_with(
ANY, gzip=True, key=inner_tar_key
)
result = await ws_client.receive_json()
assert result["event"] == {
"manager_state": BackupManagerState.CREATE_BACKUP,
+4 -4
View File
@@ -529,10 +529,10 @@ async def test_encrypted_backup_streamer_error(hass: HomeAssistant) -> None:
@pytest.mark.parametrize(
("name", "resulting_filename"),
[
("test", "test_-_2025-01-30_13.42_12345678.tar"),
(" leading spaces", "leading_spaces_-_2025-01-30_13.42_12345678.tar"),
("trailing spaces ", "trailing_spaces_-_2025-01-30_13.42_12345678.tar"),
("double spaces ", "double_spaces_-_2025-01-30_13.42_12345678.tar"),
("test", "test_2025-01-30_13.42_12345678.tar"),
(" leading spaces", "leading_spaces_2025-01-30_13.42_12345678.tar"),
("trailing spaces ", "trailing_spaces_2025-01-30_13.42_12345678.tar"),
("double spaces ", "double_spaces_2025-01-30_13.42_12345678.tar"),
],
)
def test_suggested_filename(name: str, resulting_filename: str) -> None:
+17 -5
View File
@@ -517,8 +517,10 @@ async def test_options_flow_local_no_passive_support(hass: HomeAssistant) -> Non
@pytest.mark.usefixtures("one_adapter")
async def test_async_step_user_linux_adapter_is_ignored(hass: HomeAssistant) -> None:
"""Test we give a hint that the adapter is ignored."""
async def test_async_step_user_linux_adapter_replace_ignored(
hass: HomeAssistant,
) -> None:
"""Test we can replace an ignored adapter from user flow."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id="00:00:00:00:00:01",
@@ -530,9 +532,19 @@ async def test_async_step_user_linux_adapter_is_ignored(hass: HomeAssistant) ->
context={"source": config_entries.SOURCE_USER},
data={},
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "no_adapters"
assert result["description_placeholders"] == {"ignored_adapters": "1"}
with (
patch("homeassistant.components.bluetooth.async_setup", return_value=True),
patch(
"homeassistant.components.bluetooth.async_setup_entry", return_value=True
) as mock_setup_entry,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result2["type"] is FlowResultType.CREATE_ENTRY
assert result2["title"] == "ACME Bluetooth Adapter 5.0 (00:00:00:00:00:01)"
assert result2["data"] == {}
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.usefixtures("enable_bluetooth")
@@ -140,7 +140,7 @@
tuple(
dict({
'description': '{"addons": [{"name": "Test", "slug": "test", "version": "1.0.0"}], "backup_id": "test-backup", "date": "2025-01-01T01:23:45.678Z", "database_included": true, "extra_metadata": {"with_automatic_settings": false}, "folders": [], "homeassistant_included": true, "homeassistant_version": "2024.12.0", "name": "Test", "protected": false, "size": 987}',
'name': 'Test_-_2025-01-01_01.23_45678000.tar',
'name': 'Test_2025-01-01_01.23_45678000.tar',
'parents': list([
'HA folder ID',
]),
@@ -211,7 +211,7 @@
tuple(
dict({
'description': '{"addons": [{"name": "Test", "slug": "test", "version": "1.0.0"}], "backup_id": "test-backup", "date": "2025-01-01T01:23:45.678Z", "database_included": true, "extra_metadata": {"with_automatic_settings": false}, "folders": [], "homeassistant_included": true, "homeassistant_version": "2024.12.0", "name": "Test", "protected": false, "size": 987}',
'name': 'Test_-_2025-01-01_01.23_45678000.tar',
'name': 'Test_2025-01-01_01.23_45678000.tar',
'parents': list([
'new folder id',
]),
+188 -3
View File
@@ -887,7 +887,7 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions(
"supervisor.backup_request_date": "2025-01-30T05:42:12.345678-08:00",
"with_automatic_settings": False,
},
filename=PurePath("Test_-_2025-01-30_05.42_12345678.tar"),
filename=PurePath("Test_2025-01-30_05.42_12345678.tar"),
folders={"ssl"},
homeassistant_exclude_database=False,
homeassistant=True,
@@ -1400,7 +1400,7 @@ async def test_reader_writer_create_per_agent_encryption(
upload_locations
)
for call in supervisor_client.backups.upload_backup.mock_calls:
assert call.args[1].filename == PurePath("Test_-_2025-01-30_05.42_12345678.tar")
assert call.args[1].filename == PurePath("Test_2025-01-30_05.42_12345678.tar")
upload_call_locations: set = call.args[1].location
assert len(upload_call_locations) == 1
assert upload_call_locations.pop() in upload_locations
@@ -2032,6 +2032,109 @@ async def test_reader_writer_restore(
assert response["result"] is None
@pytest.mark.usefixtures("hassio_client", "setup_integration")
async def test_reader_writer_restore_report_progress(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
supervisor_client: AsyncMock,
) -> None:
"""Test restoring a backup."""
client = await hass_ws_client(hass)
supervisor_client.backups.partial_restore.return_value.job_id = TEST_JOB_ID
supervisor_client.backups.list.return_value = [TEST_BACKUP]
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
await client.send_json_auto_id({"type": "backup/subscribe_events"})
response = await client.receive_json()
assert response["event"] == {
"manager_state": "idle",
}
response = await client.receive_json()
assert response["success"]
await client.send_json_auto_id(
{"type": "backup/restore", "agent_id": "hassio.local", "backup_id": "abc123"}
)
response = await client.receive_json()
assert response["event"] == {
"manager_state": "restore_backup",
"reason": None,
"stage": None,
"state": "in_progress",
}
supervisor_client.backups.partial_restore.assert_called_once_with(
"abc123",
supervisor_backups.PartialRestoreOptions(
addons=None,
background=True,
folders=None,
homeassistant=True,
location=None,
password=None,
),
)
supervisor_event_base = {"uuid": TEST_JOB_ID, "reference": "test_slug"}
supervisor_events = [
supervisor_event_base | {"done": False, "stage": "addon_repositories"},
supervisor_event_base | {"done": False, "stage": None}, # Will be skipped
supervisor_event_base | {"done": False, "stage": "unknown"}, # Will be skipped
supervisor_event_base | {"done": False, "stage": "home_assistant"},
supervisor_event_base | {"done": True, "stage": "addons"},
]
expected_manager_events = [
"addon_repositories",
"home_assistant",
"addons",
]
for supervisor_event in supervisor_events:
await client.send_json_auto_id(
{
"type": "supervisor/event",
"data": {"event": "job", "data": supervisor_event},
}
)
acks = 0
events = []
for _ in range(len(supervisor_events) + len(expected_manager_events)):
response = await client.receive_json()
if "event" in response:
events.append(response)
continue
assert response["success"]
acks += 1
assert acks == len(supervisor_events)
assert len(events) == len(expected_manager_events)
for i, event in enumerate(events):
assert event["event"] == {
"manager_state": "restore_backup",
"reason": None,
"stage": expected_manager_events[i],
"state": "in_progress",
}
response = await client.receive_json()
assert response["event"] == {
"manager_state": "restore_backup",
"reason": None,
"stage": None,
"state": "completed",
}
response = await client.receive_json()
assert response["event"] == {"manager_state": "idle"}
response = await client.receive_json()
assert response["success"]
assert response["result"] is None
@pytest.mark.parametrize(
("supervisor_error", "expected_error_code", "expected_reason"),
[
@@ -2261,7 +2364,7 @@ async def test_reader_writer_restore_wrong_parameters(
TEST_JOB_DONE,
{
"manager_state": "restore_backup",
"reason": "",
"reason": None,
"stage": None,
"state": "completed",
},
@@ -2302,6 +2405,88 @@ async def test_restore_progress_after_restart(
assert response["result"]["state"] == "idle"
@pytest.mark.usefixtures("hassio_client")
async def test_restore_progress_after_restart_report_progress(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
supervisor_client: AsyncMock,
) -> None:
"""Test restore backup progress after restart."""
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
with patch.dict(os.environ, MOCK_ENVIRON | {RESTORE_JOB_ID_ENV: TEST_JOB_ID}):
assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}})
client = await hass_ws_client(hass)
await client.send_json_auto_id({"type": "backup/subscribe_events"})
response = await client.receive_json()
assert response["event"] == {
"manager_state": "restore_backup",
"reason": None,
"stage": None,
"state": "in_progress",
}
response = await client.receive_json()
assert response["success"]
supervisor_event_base = {"uuid": TEST_JOB_ID, "reference": "test_slug"}
supervisor_events = [
supervisor_event_base | {"done": False, "stage": "addon_repositories"},
supervisor_event_base | {"done": False, "stage": None}, # Will be skipped
supervisor_event_base | {"done": False, "stage": "unknown"}, # Will be skipped
supervisor_event_base | {"done": False, "stage": "home_assistant"},
supervisor_event_base | {"done": True, "stage": "addons"},
]
expected_manager_events = ["addon_repositories", "home_assistant", "addons"]
expected_manager_states = ["in_progress", "in_progress", "completed"]
for supervisor_event in supervisor_events:
await client.send_json_auto_id(
{
"type": "supervisor/event",
"data": {"event": "job", "data": supervisor_event},
}
)
acks = 0
events = []
for _ in range(len(supervisor_events) + len(expected_manager_events)):
response = await client.receive_json()
if "event" in response:
events.append(response)
continue
assert response["success"]
acks += 1
assert acks == len(supervisor_events)
assert len(events) == len(expected_manager_events)
for i, event in enumerate(events):
assert event["event"] == {
"manager_state": "restore_backup",
"reason": None,
"stage": expected_manager_events[i],
"state": expected_manager_states[i],
}
response = await client.receive_json()
assert response["event"] == {"manager_state": "idle"}
await client.send_json_auto_id({"type": "backup/info"})
response = await client.receive_json()
assert response["success"]
assert response["result"]["last_non_idle_event"] == {
"manager_state": "restore_backup",
"reason": None,
"stage": "addons",
"state": "completed",
}
assert response["result"]["state"] == "idle"
@pytest.mark.usefixtures("hassio_client")
async def test_restore_progress_after_restart_unknown_job(
hass: HomeAssistant,
+5
View File
@@ -2,6 +2,7 @@
from datetime import timedelta
from unittest.mock import MagicMock
import weakref
from freezegun.api import FrozenDateTimeFactory
from homewizard_energy.errors import DisabledError, UnauthorizedError
@@ -25,6 +26,9 @@ async def test_load_unload_v1(
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
weak_ref = weakref.ref(mock_config_entry.runtime_data)
assert weak_ref() is not None
assert mock_config_entry.state is ConfigEntryState.LOADED
assert len(mock_homewizardenergy.combined.mock_calls) == 1
@@ -32,6 +36,7 @@ async def test_load_unload_v1(
await hass.async_block_till_done()
assert mock_config_entry.state is ConfigEntryState.NOT_LOADED
assert weak_ref() is None
async def test_load_unload_v2(
+1 -2
View File
@@ -19,8 +19,7 @@ from sqlalchemy import (
Text,
distinct,
)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy.orm import declarative_base, relationship
from sqlalchemy.orm.session import Session
from homeassistant.core import Context, Event, EventOrigin, State, split_entity_id
+3 -3
View File
@@ -36,7 +36,7 @@ from .consts import HOST, MACS, PASSWORD, PORT, SERIAL, USE_SSL, USERNAME
from tests.common import MockConfigEntry
from tests.typing import ClientSessionGenerator, WebSocketGenerator
BASE_FILENAME = "Automatic_backup_2025.2.0.dev0_-_2025-01-09_20.14_35457323"
BASE_FILENAME = "Automatic_backup_2025.2.0.dev0_2025-01-09_20.14_35457323"
class MockStreamReaderChunked(MockStreamReader):
@@ -525,7 +525,7 @@ async def test_agents_upload(
protected=True,
size=0,
)
base_filename = "Test_-_1970-01-01_00.00_00000000"
base_filename = "Test_1970-01-01_00.00_00000000"
with (
patch(
@@ -576,7 +576,7 @@ async def test_agents_upload_error(
protected=True,
size=0,
)
base_filename = "Test_-_1970-01-01_00.00_00000000"
base_filename = "Test_1970-01-01_00.00_00000000"
# fail to upload the tar file
with (
+12 -3
View File
@@ -1170,7 +1170,9 @@ async def test_selector_serializer(
async def test_calendar_get_events_tool(hass: HomeAssistant) -> None:
"""Test the calendar get events tool."""
assert await async_setup_component(hass, "homeassistant", {})
hass.states.async_set("calendar.test_calendar", "on", {"friendly_name": "Test"})
hass.states.async_set(
"calendar.test_calendar", "on", {"friendly_name": "Mock Calendar Name"}
)
async_expose_entity(hass, "conversation", "calendar.test_calendar", True)
context = Context()
llm_context = llm.LLMContext(
@@ -1182,7 +1184,11 @@ async def test_calendar_get_events_tool(hass: HomeAssistant) -> None:
device_id=None,
)
api = await llm.async_get_api(hass, "assist", llm_context)
assert [tool for tool in api.tools if tool.name == "calendar_get_events"]
tool = next(
(tool for tool in api.tools if tool.name == "calendar_get_events"), None
)
assert tool is not None
assert tool.parameters.schema["calendar"].container == ["Mock Calendar Name"]
calls = async_mock_service(
hass,
@@ -1212,7 +1218,10 @@ async def test_calendar_get_events_tool(hass: HomeAssistant) -> None:
tool_input = llm.ToolInput(
tool_name="calendar_get_events",
tool_args={"calendar": "calendar.test_calendar", "range": "today"},
tool_args={
"calendar": "Mock Calendar Name",
"range": "today",
},
)
now = dt_util.now()
with patch("homeassistant.util.dt.now", return_value=now):