Make local backup a backup agent (#130623)

* Make local backup a backup agent

* Adjust

* Adjust

* Adjust

* Adjust tests

* Adjust

* Adjust

* Adjust docstring

* Adjust

* Protect members of CoreLocalBackupAgent

* Remove redundant check for file

* Make the backup.create service use the first local agent

* Add BackupAgent.async_get_backup

* Fix some TODOs

* Add support for downloading backup from a remote agent

* Fix restore

* Fix test

* Adjust kitchen_sink test

* Remove unused method BackupManager.async_get_backup_path

* Re-enable kitchen sink test

* Remove BaseBackupManager.async_upload_backup

* Support restore from remote agent

* Fix review comments
This commit is contained in:
Erik Montnemery
2024-11-18 17:10:30 +01:00
committed by GitHub
parent 257c750b59
commit a47a70df52
15 changed files with 687 additions and 492 deletions

View File

@@ -33,6 +33,7 @@ SERVICE_CREATE_SCHEMA = vol.Schema({vol.Optional(CONF_PASSWORD): str})
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Backup integration."""
hass.data[DOMAIN] = backup_manager = BackupManager(hass)
await backup_manager.async_setup()
with_hassio = is_hassio(hass)
@@ -48,8 +49,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def async_handle_create_service(call: ServiceCall) -> None:
"""Service handler for creating backups."""
agent_id = list(backup_manager.local_backup_agents)[0]
await backup_manager.async_create_backup(
addons_included=None,
agent_ids=[agent_id],
database_included=True,
folders_included=None,
name=None,

View File

@@ -56,6 +56,26 @@ class BackupAgent(abc.ABC):
async def async_list_backups(self, **kwargs: Any) -> list[UploadedBackup]:
"""List backups."""
@abc.abstractmethod
async def async_get_backup(
self,
*,
slug: str,
**kwargs: Any,
) -> UploadedBackup | None:
"""Return a backup."""
class LocalBackupAgent(BackupAgent):
"""Local backup agent."""
@abc.abstractmethod
def get_backup_path(self, slug: str) -> Path:
"""Return the local path to a backup.
The method should return the path to the backup file with the specified slug.
"""
class BackupAgentPlatformProtocol(Protocol):
"""Define the format of backup platforms which implement backup agents."""

View File

@@ -0,0 +1,150 @@
"""Local backup support for Core and Container installations."""
from __future__ import annotations
from dataclasses import asdict, dataclass
import json
from pathlib import Path
from tarfile import TarError
from typing import Any
from homeassistant.core import HomeAssistant
from .agent import BackupAgent, LocalBackupAgent, UploadedBackup
from .const import LOGGER
from .models import BackupUploadMetadata
from .util import read_backup
async def async_get_backup_agents(
hass: HomeAssistant,
**kwargs: Any,
) -> list[BackupAgent]:
"""Return the local backup agent."""
return [CoreLocalBackupAgent(hass)]
@dataclass(slots=True)
class LocalBackup(UploadedBackup):
"""Local backup class."""
path: Path
def as_dict(self) -> dict:
"""Return a dict representation of this backup."""
return {**asdict(self), "path": self.path.as_posix()}
class CoreLocalBackupAgent(LocalBackupAgent):
"""Local backup agent for Core and Container installations."""
name = "local"
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the backup agent."""
super().__init__()
self._hass = hass
self._backup_dir = Path(hass.config.path("backups"))
self._backups: dict[str, LocalBackup] = {}
self._loaded_backups = False
async def load_backups(self) -> None:
"""Load data of stored backup files."""
backups = await self._hass.async_add_executor_job(self._read_backups)
LOGGER.debug("Loaded %s local backups", len(backups))
self._backups = backups
self._loaded_backups = True
def _read_backups(self) -> dict[str, LocalBackup]:
"""Read backups from disk."""
backups: dict[str, LocalBackup] = {}
for backup_path in self._backup_dir.glob("*.tar"):
try:
base_backup = read_backup(backup_path)
backup = LocalBackup(
id=base_backup.slug,
slug=base_backup.slug,
name=base_backup.name,
date=base_backup.date,
path=backup_path,
size=round(backup_path.stat().st_size / 1_048_576, 2),
protected=base_backup.protected,
)
backups[backup.slug] = backup
except (OSError, TarError, json.JSONDecodeError, KeyError) as err:
LOGGER.warning("Unable to read backup %s: %s", backup_path, err)
return backups
async def async_download_backup(
self,
*,
id: str,
path: Path,
**kwargs: Any,
) -> None:
"""Download a backup file."""
raise NotImplementedError
async def async_upload_backup(
self,
*,
path: Path,
metadata: BackupUploadMetadata,
**kwargs: Any,
) -> None:
"""Upload a backup."""
self._backups[metadata.slug] = LocalBackup(
id=metadata.slug, # Do we need another ID?
slug=metadata.slug,
name=metadata.name,
date=metadata.date,
path=path,
size=round(path.stat().st_size / 1_048_576, 2),
protected=metadata.protected,
)
async def async_list_backups(self, **kwargs: Any) -> list[UploadedBackup]:
"""List backups."""
if not self._loaded_backups:
await self.load_backups()
return list(self._backups.values())
async def async_get_backup(
self,
*,
slug: str,
**kwargs: Any,
) -> UploadedBackup | None:
"""Return a backup."""
if not self._loaded_backups:
await self.load_backups()
if not (backup := self._backups.get(slug)):
return None
if not await self._hass.async_add_executor_job(backup.path.exists):
LOGGER.debug(
(
"Removing tracked backup (%s) that does not exists on the expected"
" path %s"
),
backup.slug,
backup.path,
)
self._backups.pop(slug)
return None
return backup
def get_backup_path(self, slug: str) -> Path:
"""Return the local path to a backup."""
return self._backup_dir / f"{slug}.tar"
async def async_remove_backup(self, *, slug: str, **kwargs: Any) -> None:
"""Remove a backup."""
if (backup := await self.async_get_backup(slug=slug)) is None:
return
await self._hass.async_add_executor_job(backup.path.unlink, True) # type: ignore[attr-defined]
LOGGER.debug("Removed backup located at %s", backup.path) # type: ignore[attr-defined]
self._backups.pop(slug)

View File

@@ -11,6 +11,7 @@ if TYPE_CHECKING:
from .manager import BaseBackupManager
from .models import BaseBackup
BUF_SIZE = 2**20 * 4 # 4MB
DOMAIN = "backup"
DATA_MANAGER: HassKey[BaseBackupManager[BaseBackup]] = HassKey(DOMAIN)
LOGGER = getLogger(__package__)

View File

@@ -17,6 +17,9 @@ from homeassistant.util import slugify
from .const import DATA_MANAGER
from .manager import BackupManager
# pylint: disable=fixme
# TODO: Don't forget to remove this when the implementation is complete
@callback
def async_register_http_views(hass: HomeAssistant) -> None:
@@ -39,15 +42,32 @@ class DownloadBackupView(HomeAssistantView):
"""Download a backup file."""
if not request["hass_user"].is_admin:
return Response(status=HTTPStatus.UNAUTHORIZED)
try:
agent_id = request.query.getone("agent_id")
except KeyError:
return Response(status=HTTPStatus.BAD_REQUEST)
manager = cast(BackupManager, request.app[KEY_HASS].data[DATA_MANAGER])
backup = await manager.async_get_backup(slug=slug)
if agent_id not in manager.backup_agents:
return Response(status=HTTPStatus.BAD_REQUEST)
agent = manager.backup_agents[agent_id]
backup = await agent.async_get_backup(slug=slug)
if backup is None or not backup.path.exists():
# We don't need to check if the path exists, aiohttp.FileResponse will handle
# that
if backup is None:
return Response(status=HTTPStatus.NOT_FOUND)
if agent_id in manager.local_backup_agents:
local_agent = manager.local_backup_agents[agent_id]
path = local_agent.get_backup_path(slug=slug)
else:
path = manager.temp_backup_dir / f"{slug}.tar"
await agent.async_download_backup(id=backup.id, path=path)
# TODO: We need a callback to remove the temp file once the download is complete
return FileResponse(
path=backup.path.as_posix(),
path=path.as_posix(),
headers={
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
},
@@ -63,12 +83,16 @@ class UploadBackupView(HomeAssistantView):
@require_admin
async def post(self, request: Request) -> Response:
"""Upload a backup file."""
try:
agent_ids = request.query.getall("agent_id")
except KeyError:
return Response(status=HTTPStatus.BAD_REQUEST)
manager = request.app[KEY_HASS].data[DATA_MANAGER]
reader = await request.multipart()
contents = cast(BodyPartReader, await reader.next())
try:
await manager.async_receive_backup(contents=contents)
await manager.async_receive_backup(contents=contents, agent_ids=agent_ids)
except OSError as err:
return Response(
body=f"Can't write backup file {err}",

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
import abc
import asyncio
from collections.abc import Callable
from dataclasses import asdict, dataclass
from dataclasses import dataclass
import hashlib
import io
import json
@@ -13,10 +13,9 @@ from pathlib import Path
from queue import SimpleQueue
import shutil
import tarfile
from tarfile import TarError
from tempfile import TemporaryDirectory
import time
from typing import Any, Generic, Protocol, cast
from typing import Any, Generic, Protocol
import aiohttp
from securetar import SecureTarFile, atomic_contents_add
@@ -29,13 +28,19 @@ from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import integration_platform
from homeassistant.helpers.json import json_bytes
from homeassistant.util import dt as dt_util
from homeassistant.util.json import json_loads_object
from .agent import BackupAgent, BackupAgentPlatformProtocol
from .const import DOMAIN, EXCLUDE_DATABASE_FROM_BACKUP, EXCLUDE_FROM_BACKUP, LOGGER
from .agent import BackupAgent, BackupAgentPlatformProtocol, LocalBackupAgent
from .const import (
BUF_SIZE,
DOMAIN,
EXCLUDE_DATABASE_FROM_BACKUP,
EXCLUDE_FROM_BACKUP,
LOGGER,
)
from .models import BackupUploadMetadata, BaseBackup
from .util import read_backup
BUF_SIZE = 2**20 * 4 # 4MB
LOCAL_AGENT_ID = f"{DOMAIN}.local"
_BackupT = TypeVar("_BackupT", bound=BaseBackup, default=BaseBackup)
@@ -51,11 +56,7 @@ class NewBackup:
class Backup(BaseBackup):
"""Backup class."""
path: Path
def as_dict(self) -> dict:
"""Return a dict representation of this backup."""
return {**asdict(self), "path": self.path.as_posix()}
agent_ids: list[str]
@dataclass(slots=True)
@@ -84,20 +85,22 @@ class BaseBackupManager(abc.ABC, Generic[_BackupT]):
"""Initialize the backup manager."""
self.hass = hass
self.backup_task: asyncio.Task | None = None
self.backups: dict[str, _BackupT] = {}
self.loaded_platforms = False
self.platforms: dict[str, BackupPlatformProtocol] = {}
self.backup_agents: dict[str, BackupAgent] = {}
self.local_backup_agents: dict[str, LocalBackupAgent] = {}
self.syncing = False
async def async_setup(self) -> None:
"""Set up the backup manager."""
await self.load_platforms()
@callback
def _add_platform_pre_post_handlers(
def _add_platform_pre_post_handler(
self,
hass: HomeAssistant,
integration_domain: str,
platform: BackupPlatformProtocol,
) -> None:
"""Add a platform to the backup manager."""
"""Add a backup platform."""
if not hasattr(platform, "async_pre_backup") or not hasattr(
platform, "async_post_backup"
):
@@ -107,7 +110,6 @@ class BaseBackupManager(abc.ABC, Generic[_BackupT]):
async def _async_add_platform_agents(
self,
hass: HomeAssistant,
integration_domain: str,
platform: BackupAgentPlatformProtocol,
) -> None:
@@ -115,16 +117,30 @@ class BaseBackupManager(abc.ABC, Generic[_BackupT]):
if not hasattr(platform, "async_get_backup_agents"):
return
agents = await platform.async_get_backup_agents(hass=hass)
agents = await platform.async_get_backup_agents(self.hass)
self.backup_agents.update(
{f"{integration_domain}.{agent.name}": agent for agent in agents}
)
self.local_backup_agents.update(
{
f"{integration_domain}.{agent.name}": agent
for agent in agents
if isinstance(agent, LocalBackupAgent)
}
)
async def _add_platform(
self,
hass: HomeAssistant,
integration_domain: str,
platform: Any,
) -> None:
"""Add a backup platform manager."""
self._add_platform_pre_post_handler(integration_domain, platform)
await self._async_add_platform_agents(integration_domain, platform)
async def async_pre_backup_actions(self, **kwargs: Any) -> None:
"""Perform pre backup actions."""
if not self.loaded_platforms:
await self.load_platforms()
pre_backup_results = await asyncio.gather(
*(
platform.async_pre_backup(self.hass)
@@ -138,9 +154,6 @@ class BaseBackupManager(abc.ABC, Generic[_BackupT]):
async def async_post_backup_actions(self, **kwargs: Any) -> None:
"""Perform post backup actions."""
if not self.loaded_platforms:
await self.load_platforms()
post_backup_results = await asyncio.gather(
*(
platform.async_post_backup(self.hass)
@@ -154,30 +167,22 @@ class BaseBackupManager(abc.ABC, Generic[_BackupT]):
async def load_platforms(self) -> None:
"""Load backup platforms."""
if self.loaded_platforms:
return
await integration_platform.async_process_integration_platforms(
self.hass,
DOMAIN,
self._add_platform_pre_post_handlers,
wait_for_platforms=True,
)
await integration_platform.async_process_integration_platforms(
self.hass,
DOMAIN,
self._async_add_platform_agents,
self._add_platform,
wait_for_platforms=True,
)
LOGGER.debug("Loaded %s platforms", len(self.platforms))
LOGGER.debug("Loaded %s agents", len(self.backup_agents))
self.loaded_platforms = True
@abc.abstractmethod
async def async_restore_backup(
self,
slug: str,
*,
password: str | None = None,
agent_id: str,
password: str | None,
**kwargs: Any,
) -> None:
"""Restore a backup."""
@@ -187,6 +192,7 @@ class BaseBackupManager(abc.ABC, Generic[_BackupT]):
self,
*,
addons_included: list[str] | None,
agent_ids: list[str],
database_included: bool,
folders_included: list[str] | None,
name: str | None,
@@ -219,15 +225,12 @@ class BaseBackupManager(abc.ABC, Generic[_BackupT]):
async def async_receive_backup(
self,
*,
agent_ids: list[str],
contents: aiohttp.BodyPartReader,
**kwargs: Any,
) -> None:
"""Receive and store a backup file from upload."""
@abc.abstractmethod
async def async_upload_backup(self, *, slug: str, **kwargs: Any) -> None:
"""Upload a backup."""
class BackupManager(BaseBackupManager[Backup]):
"""Backup manager for the Backup integration."""
@@ -235,111 +238,91 @@ class BackupManager(BaseBackupManager[Backup]):
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the backup manager."""
super().__init__(hass=hass)
self.backup_dir = Path(hass.config.path("backups"))
self.loaded_backups = False
async def async_upload_backup(self, *, slug: str, **kwargs: Any) -> None:
"""Upload a backup."""
await self.load_platforms()
if not self.backup_agents:
return
if not (backup := await self.async_get_backup(slug=slug)):
return
self.temp_backup_dir = Path(hass.config.path("tmp_backups"))
async def _async_upload_backup(
self,
*,
backup: BaseBackup,
agent_ids: list[str],
path: Path,
) -> None:
"""Upload a backup to selected agents."""
self.syncing = True
sync_backup_results = await asyncio.gather(
*(
agent.async_upload_backup(
path=backup.path,
metadata=BackupUploadMetadata(
homeassistant=HAVERSION,
size=backup.size,
date=backup.date,
slug=backup.slug,
name=backup.name,
protected=backup.protected,
),
)
for agent in self.backup_agents.values()
),
return_exceptions=True,
)
for result in sync_backup_results:
if isinstance(result, Exception):
LOGGER.error("Error during backup upload - %s", result)
self.syncing = False
async def load_backups(self) -> None:
"""Load data of stored backup files."""
backups = await self.hass.async_add_executor_job(self._read_backups)
LOGGER.debug("Loaded %s local backups", len(backups))
self.backups = backups
self.loaded_backups = True
def _read_backups(self) -> dict[str, Backup]:
"""Read backups from disk."""
backups: dict[str, Backup] = {}
for backup_path in self.backup_dir.glob("*.tar"):
try:
with tarfile.open(backup_path, "r:", bufsize=BUF_SIZE) as backup_file:
if data_file := backup_file.extractfile("./backup.json"):
data = json_loads_object(data_file.read())
backup = Backup(
slug=cast(str, data["slug"]),
name=cast(str, data["name"]),
date=cast(str, data["date"]),
path=backup_path,
size=round(backup_path.stat().st_size / 1_048_576, 2),
protected=cast(bool, data.get("protected", False)),
)
backups[backup.slug] = backup
except (OSError, TarError, json.JSONDecodeError, KeyError) as err:
LOGGER.warning("Unable to read backup %s: %s", backup_path, err)
return backups
try:
sync_backup_results = await asyncio.gather(
*(
self.backup_agents[agent_id].async_upload_backup(
path=path,
metadata=BackupUploadMetadata(
homeassistant=HAVERSION,
size=backup.size,
date=backup.date,
slug=backup.slug,
name=backup.name,
protected=backup.protected,
),
)
for agent_id in agent_ids
),
return_exceptions=True,
)
for result in sync_backup_results:
if isinstance(result, Exception):
LOGGER.exception(
"Error during backup upload - %s", result, exc_info=result
)
finally:
self.syncing = False
async def async_get_backups(self, **kwargs: Any) -> dict[str, Backup]:
"""Return backups."""
if not self.loaded_backups:
await self.load_backups()
backups: dict[str, Backup] = {}
for agent_id, agent in self.backup_agents.items():
agent_backups = await agent.async_list_backups()
for agent_backup in agent_backups:
if agent_backup.slug not in backups:
backups[agent_backup.slug] = Backup(
slug=agent_backup.slug,
name=agent_backup.name,
date=agent_backup.date,
agent_ids=[],
size=agent_backup.size,
protected=agent_backup.protected,
)
backups[agent_backup.slug].agent_ids.append(agent_id)
return self.backups
return backups
async def async_get_backup(self, *, slug: str, **kwargs: Any) -> Backup | None:
"""Return a backup."""
if not self.loaded_backups:
await self.load_backups()
backup: Backup | None = None
if not (backup := self.backups.get(slug)):
return None
if not backup.path.exists():
LOGGER.debug(
(
"Removing tracked backup (%s) that does not exists on the expected"
" path %s"
),
backup.slug,
backup.path,
)
self.backups.pop(slug)
return None
for agent_id, agent in self.backup_agents.items():
if not (agent_backup := await agent.async_get_backup(slug=slug)):
continue
if backup is None:
backup = Backup(
slug=agent_backup.slug,
name=agent_backup.name,
date=agent_backup.date,
agent_ids=[],
size=agent_backup.size,
protected=agent_backup.protected,
)
backup.agent_ids.append(agent_id)
return backup
async def async_remove_backup(self, *, slug: str, **kwargs: Any) -> None:
"""Remove a backup."""
if (backup := await self.async_get_backup(slug=slug)) is None:
return
await self.hass.async_add_executor_job(backup.path.unlink, True)
LOGGER.debug("Removed backup located at %s", backup.path)
self.backups.pop(slug)
for agent in self.backup_agents.values():
await agent.async_remove_backup(slug=slug) # type: ignore[attr-defined]
async def async_receive_backup(
self,
*,
agent_ids: list[str],
contents: aiohttp.BodyPartReader,
**kwargs: Any,
) -> None:
@@ -387,17 +370,43 @@ class BackupManager(BaseBackupManager[Backup]):
if fut is not None:
await fut
def _move_and_cleanup() -> None:
shutil.move(target_temp_file, self.backup_dir / target_temp_file.name)
def _copy_and_cleanup(local_file_paths: list[Path], backup: BaseBackup) -> Path:
if local_file_paths:
tar_file_path = local_file_paths[0]
else:
tar_file_path = self.temp_backup_dir / f"{backup.slug}.tar"
for local_path in local_file_paths:
shutil.copy(target_temp_file, local_path)
temp_dir_handler.cleanup()
return tar_file_path
await self.hass.async_add_executor_job(_move_and_cleanup)
await self.load_backups()
try:
backup = await self.hass.async_add_executor_job(
read_backup, target_temp_file
)
except (OSError, tarfile.TarError, json.JSONDecodeError, KeyError) as err:
LOGGER.warning("Unable to parse backup %s: %s", target_temp_file, err)
return
local_file_paths = [
self.local_backup_agents[agent_id].get_backup_path(backup.slug)
for agent_id in agent_ids
if agent_id in self.local_backup_agents
]
tar_file_path = await self.hass.async_add_executor_job(
_copy_and_cleanup, local_file_paths, backup
)
await self._async_upload_backup(
backup=backup, agent_ids=agent_ids, path=tar_file_path
)
if not local_file_paths:
await self.hass.async_add_executor_job(tar_file_path.unlink, True)
async def async_create_backup(
self,
*,
addons_included: list[str] | None,
agent_ids: list[str],
database_included: bool,
folders_included: list[str] | None,
name: str | None,
@@ -408,12 +417,17 @@ class BackupManager(BaseBackupManager[Backup]):
"""Initiate generating a backup."""
if self.backup_task:
raise HomeAssistantError("Backup already in progress")
if not agent_ids:
raise HomeAssistantError("At least one agent must be selected")
if any(agent_id not in self.backup_agents for agent_id in agent_ids):
raise HomeAssistantError("Invalid agent selected")
backup_name = name or f"Core {HAVERSION}"
date_str = dt_util.now().isoformat()
slug = _generate_slug(date_str, backup_name)
self.backup_task = self.hass.async_create_task(
self._async_create_backup(
addons_included=addons_included,
agent_ids=agent_ids,
backup_name=backup_name,
database_included=database_included,
date_str=date_str,
@@ -431,6 +445,7 @@ class BackupManager(BaseBackupManager[Backup]):
self,
*,
addons_included: list[str] | None,
agent_ids: list[str],
database_included: bool,
backup_name: str,
date_str: str,
@@ -438,9 +453,16 @@ class BackupManager(BaseBackupManager[Backup]):
on_progress: Callable[[BackupProgress], None] | None,
password: str | None,
slug: str,
) -> Backup:
) -> BaseBackup:
"""Generate a backup."""
success = False
local_file_paths = [
self.local_backup_agents[agent_id].get_backup_path(slug)
for agent_id in agent_ids
if agent_id in self.local_backup_agents
]
try:
await self.async_pre_backup_actions()
@@ -458,25 +480,30 @@ class BackupManager(BaseBackupManager[Backup]):
"protected": password is not None,
}
tar_file_path = Path(self.backup_dir, f"{backup_data['slug']}.tar")
size_in_bytes = await self.hass.async_add_executor_job(
tar_file_path, size_in_bytes = await self.hass.async_add_executor_job(
self._mkdir_and_generate_backup_contents,
tar_file_path,
local_file_paths,
backup_data,
database_included,
password,
)
backup = Backup(
backup = BaseBackup(
slug=slug,
name=backup_name,
date=date_str,
path=tar_file_path,
size=round(size_in_bytes / 1_048_576, 2),
protected=password is not None,
)
if self.loaded_backups:
self.backups[slug] = backup
LOGGER.debug("Generated new backup with slug %s", slug)
LOGGER.debug(
"Generated new backup with slug %s, uploading to agents %s",
slug,
agent_ids,
)
await self._async_upload_backup(
backup=backup, agent_ids=agent_ids, path=tar_file_path
)
if not local_file_paths:
await self.hass.async_add_executor_job(tar_file_path.unlink, True)
success = True
return backup
finally:
@@ -487,15 +514,19 @@ class BackupManager(BaseBackupManager[Backup]):
def _mkdir_and_generate_backup_contents(
self,
tar_file_path: Path,
tar_file_paths: list[Path],
backup_data: dict[str, Any],
database_included: bool,
password: str | None = None,
) -> int:
password: str | None,
) -> tuple[Path, int]:
"""Generate backup contents and return the size."""
if not self.backup_dir.exists():
LOGGER.debug("Creating backup directory")
self.backup_dir.mkdir()
if tar_file_paths:
tar_file_path = tar_file_paths[0]
else:
tar_file_path = self.temp_backup_dir / f"{backup_data['slug']}.tar"
if not (backup_dir := tar_file_path.parent).exists():
LOGGER.debug("Creating backup directory %s", backup_dir)
backup_dir.mkdir()
excludes = EXCLUDE_FROM_BACKUP
if not database_included:
@@ -522,13 +553,16 @@ class BackupManager(BaseBackupManager[Backup]):
excludes=excludes,
arcname="data",
)
return tar_file_path.stat().st_size
for local_path in tar_file_paths[1:]:
shutil.copy(tar_file_path, local_path)
return (tar_file_path, tar_file_path.stat().st_size)
async def async_restore_backup(
self,
slug: str,
*,
password: str | None = None,
agent_id: str,
password: str | None,
**kwargs: Any,
) -> None:
"""Restore a backup.
@@ -536,13 +570,25 @@ class BackupManager(BaseBackupManager[Backup]):
This will write the restore information to .HA_RESTORE which
will be handled during startup by the restore_backup module.
"""
if (backup := await self.async_get_backup(slug=slug)) is None:
raise HomeAssistantError(f"Backup {slug} not found")
if agent_id in self.local_backup_agents:
local_agent = self.local_backup_agents[agent_id]
if not await local_agent.async_get_backup(slug=slug):
raise HomeAssistantError(f"Backup {slug} not found in agent {agent_id}")
path = local_agent.get_backup_path(slug=slug)
else:
path = self.temp_backup_dir / f"{slug}.tar"
agent = self.backup_agents[agent_id]
if not (backup := await agent.async_get_backup(slug=slug)):
raise HomeAssistantError(f"Backup {slug} not found in agent {agent_id}")
await agent.async_download_backup(id=backup.id, path=path)
path = local_agent.get_backup_path(slug)
def _write_restore_file() -> None:
"""Write the restore file."""
Path(self.hass.config.path(RESTORE_BACKUP_FILE)).write_text(
json.dumps({"path": backup.path.as_posix(), "password": password}),
json.dumps({"path": path.as_posix(), "password": password}),
encoding="utf-8",
)

View File

@@ -0,0 +1,28 @@
"""Local backup support for Core and Container installations."""
from __future__ import annotations
from pathlib import Path
import tarfile
from typing import cast
from homeassistant.util.json import json_loads_object
from .const import BUF_SIZE
from .models import BaseBackup
def read_backup(backup_path: Path) -> BaseBackup:
"""Read a backup from disk."""
with tarfile.open(backup_path, "r:", bufsize=BUF_SIZE) as backup_file:
if not (data_file := backup_file.extractfile("./backup.json")):
raise KeyError("backup.json not found in tar file")
data = json_loads_object(data_file.read())
return BaseBackup(
slug=cast(str, data["slug"]),
name=cast(str, data["name"]),
date=cast(str, data["date"]),
size=round(backup_path.stat().st_size / 1_048_576, 2),
protected=cast(bool, data.get("protected", False)),
)

View File

@@ -1,7 +1,6 @@
"""Websocket commands for the Backup integration."""
from pathlib import Path
from typing import Any
from typing import Any, cast
import voluptuous as vol
@@ -9,7 +8,7 @@ from homeassistant.components import websocket_api
from homeassistant.core import HomeAssistant, callback
from .const import DATA_MANAGER, LOGGER
from .manager import BackupProgress
from .manager import BackupManager, BackupProgress
@callback
@@ -97,6 +96,7 @@ async def handle_remove(
{
vol.Required("type"): "backup/restore",
vol.Required("slug"): str,
vol.Required("agent_id"): str,
vol.Optional("password"): str,
}
)
@@ -109,6 +109,7 @@ async def handle_restore(
"""Restore a backup."""
await hass.data[DATA_MANAGER].async_restore_backup(
slug=msg["slug"],
agent_id=msg["agent_id"],
password=msg.get("password"),
)
connection.send_result(msg["id"])
@@ -119,6 +120,7 @@ async def handle_restore(
{
vol.Required("type"): "backup/generate",
vol.Optional("addons_included"): [str],
vol.Required("agent_ids"): [str],
vol.Optional("database_included", default=True): bool,
vol.Optional("folders_included"): [str],
vol.Optional("name"): str,
@@ -138,6 +140,7 @@ async def handle_create(
backup = await hass.data[DATA_MANAGER].async_create_backup(
addons_included=msg.get("addons_included"),
agent_ids=msg["agent_ids"],
database_included=msg["database_included"],
folders_included=msg.get("folders_included"),
name=msg.get("name"),
@@ -199,7 +202,6 @@ async def backup_agents_info(
) -> None:
"""Return backup agents info."""
manager = hass.data[DATA_MANAGER]
await manager.load_platforms()
connection.send_result(
msg["id"],
{
@@ -220,7 +222,6 @@ async def backup_agents_list_backups(
"""Return a list of uploaded backups."""
manager = hass.data[DATA_MANAGER]
backups: list[dict[str, Any]] = []
await manager.load_platforms()
for agent_id, agent in manager.backup_agents.items():
_listed_backups = await agent.async_list_backups()
backups.extend({**b.as_dict(), "agent_id": agent_id} for b in _listed_backups)
@@ -243,18 +244,17 @@ async def backup_agents_download(
msg: dict[str, Any],
) -> None:
"""Download an uploaded backup."""
manager = hass.data[DATA_MANAGER]
await manager.load_platforms()
manager = cast(BackupManager, hass.data[DATA_MANAGER])
if not (agent := manager.backup_agents.get(msg["agent_id"])):
connection.send_error(
msg["id"], "unknown_agent", f"Agent {msg['agent_id']} not found"
)
return
try:
path = manager.temp_backup_dir / f"{msg["slug"]}.tar"
await agent.async_download_backup(
id=msg["backup_id"],
path=Path(hass.config.path("backup"), f"{msg['slug']}.tar"),
path=path,
)
except Exception as err: # noqa: BLE001
connection.send_error(msg["id"], "backup_agents_download", str(err))

View File

@@ -75,3 +75,15 @@ class KitchenSinkBackupAgent(BackupAgent):
async def async_list_backups(self, **kwargs: Any) -> list[UploadedBackup]:
"""List synced backups."""
return self._uploads
async def async_get_backup(
self,
*,
slug: str,
**kwargs: Any,
) -> UploadedBackup | None:
"""Return a backup."""
for backup in self._uploads:
if backup.slug == slug:
return backup
return None

View File

@@ -12,12 +12,32 @@ from homeassistant.components.backup import (
BackupUploadMetadata,
UploadedBackup,
)
from homeassistant.components.backup.manager import Backup
from homeassistant.components.backup.backup import LocalBackup
from homeassistant.components.backup.const import DATA_MANAGER
from homeassistant.components.backup.manager import LOCAL_AGENT_ID, Backup
from homeassistant.components.backup.models import BaseBackup
from homeassistant.core import HomeAssistant
from homeassistant.helpers.typing import ConfigType
from homeassistant.setup import async_setup_component
TEST_BASE_BACKUP = BaseBackup(
slug="abc123",
name="Test",
date="1970-01-01T00:00:00.000Z",
size=0.0,
protected=False,
)
TEST_BACKUP = Backup(
agent_ids=["backup.local"],
slug="abc123",
name="Test",
date="1970-01-01T00:00:00.000Z",
size=0.0,
protected=False,
)
TEST_BACKUP_PATH = Path("abc123.tar")
TEST_LOCAL_BACKUP = LocalBackup(
id="abc123",
slug="abc123",
name="Test",
date="1970-01-01T00:00:00.000Z",
@@ -65,12 +85,39 @@ class BackupAgentTest(BackupAgent):
)
]
async def async_get_backup(
self,
*,
slug: str,
**kwargs: Any,
) -> UploadedBackup | None:
"""Return a backup."""
if slug != "abc123":
return None
return UploadedBackup(
id="abc123",
date="1970-01-01T00:00:00Z",
name="Test",
protected=False,
size=13.37,
slug="abc123",
)
async def setup_backup_integration(
hass: HomeAssistant,
with_hassio: bool = False,
configuration: ConfigType | None = None,
backups: list[Backup] | None = None,
) -> bool:
"""Set up the Backup integration."""
with patch("homeassistant.components.backup.is_hassio", return_value=with_hassio):
return await async_setup_component(hass, DOMAIN, configuration or {})
result = await async_setup_component(hass, DOMAIN, configuration or {})
if with_hassio or not backups:
return result
local_agent = hass.data[DATA_MANAGER].backup_agents[LOCAL_AGENT_ID]
local_agent._backups = {backups.slug: backups for backups in backups}
local_agent._loaded_backups = True
return result

View File

@@ -42,6 +42,9 @@
'id': 1,
'result': dict({
'agents': list([
dict({
'agent_id': 'backup.local',
}),
dict({
'agent_id': 'domain.test',
}),
@@ -57,6 +60,9 @@
'id': 1,
'result': dict({
'agents': list([
dict({
'agent_id': 'backup.local',
}),
dict({
'agent_id': 'domain.test',
}),
@@ -278,9 +284,11 @@
'id': 1,
'result': dict({
'backup': dict({
'agent_ids': list([
'backup.local',
]),
'date': '1970-01-01T00:00:00.000Z',
'name': 'Test',
'path': 'abc123.tar',
'protected': False,
'size': 0.0,
'slug': 'abc123',
@@ -434,9 +442,11 @@
'backing_up': False,
'backups': list([
dict({
'agent_ids': list([
'backup.local',
]),
'date': '1970-01-01T00:00:00.000Z',
'name': 'Test',
'path': 'abc123.tar',
'protected': False,
'size': 0.0,
'slug': 'abc123',

View File

@@ -9,7 +9,7 @@ import pytest
from homeassistant.core import HomeAssistant
from .common import TEST_BACKUP, setup_backup_integration
from .common import TEST_LOCAL_BACKUP, setup_backup_integration
from tests.common import MockUser
from tests.typing import ClientSessionGenerator
@@ -26,8 +26,8 @@ async def test_downloading_backup(
with (
patch(
"homeassistant.components.backup.manager.BackupManager.async_get_backup",
return_value=TEST_BACKUP,
"homeassistant.components.backup.backup.CoreLocalBackupAgent.async_get_backup",
return_value=TEST_LOCAL_BACKUP,
),
patch("pathlib.Path.exists", return_value=True),
patch(
@@ -35,7 +35,7 @@ async def test_downloading_backup(
return_value=web.Response(text=""),
),
):
resp = await client.get("/api/backup/download/abc123")
resp = await client.get("/api/backup/download/abc123?agent_id=backup.local")
assert resp.status == 200
@@ -48,7 +48,7 @@ async def test_downloading_backup_not_found(
client = await hass_client()
resp = await client.get("/api/backup/download/abc123")
resp = await client.get("/api/backup/download/abc123?agent_id=backup.local")
assert resp.status == 404
@@ -63,7 +63,7 @@ async def test_downloading_as_non_admin(
client = await hass_client()
resp = await client.get("/api/backup/download/abc123")
resp = await client.get("/api/backup/download/abc123?agent_id=backup.local")
assert resp.status == 401
@@ -80,7 +80,7 @@ async def test_uploading_a_backup_file(
"homeassistant.components.backup.manager.BackupManager.async_receive_backup",
) as async_receive_backup_mock:
resp = await client.post(
"/api/backup/upload",
"/api/backup/upload?agent_id=backup.local",
data={"file": StringIO("test")},
)
assert resp.status == 201
@@ -110,7 +110,7 @@ async def test_error_handling_uploading_a_backup_file(
side_effect=error,
):
resp = await client.post(
"/api/backup/upload",
"/api/backup/upload?agent_id=backup.local",
data={"file": StringIO("test")},
)
assert resp.status == 500

View File

@@ -3,7 +3,6 @@
from __future__ import annotations
import asyncio
from typing import Any
from unittest.mock import ANY, AsyncMock, MagicMock, Mock, call, mock_open, patch
import aiohttp
@@ -11,17 +10,24 @@ from multidict import CIMultiDict, CIMultiDictProxy
import pytest
from homeassistant.components.backup import (
DOMAIN,
BackupAgentPlatformProtocol,
BackupManager,
BackupPlatformProtocol,
BackupUploadMetadata,
backup as local_backup_platform,
)
from homeassistant.components.backup.manager import BackupProgress
from homeassistant.components.backup.manager import LOCAL_AGENT_ID, BackupProgress
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.setup import async_setup_component
from .common import TEST_BACKUP, BackupAgentTest
from .common import (
TEST_BACKUP,
TEST_BACKUP_PATH,
TEST_BASE_BACKUP,
TEST_LOCAL_BACKUP,
BackupAgentTest,
)
from tests.common import MockPlatform, mock_platform
@@ -52,6 +58,7 @@ async def _mock_backup_generation(
assert manager.backup_task is None
await manager.async_create_backup(
addons_included=[],
agent_ids=[LOCAL_AGENT_ID],
database_included=database_included,
folders_included=[],
name=name,
@@ -80,7 +87,10 @@ async def _mock_backup_generation(
"slug": ANY,
"type": "partial",
}
assert manager.backup_dir.as_posix() in str(mocked_tarfile.call_args_list[0][0][0])
local_agent = manager.backup_agents[LOCAL_AGENT_ID]
assert local_agent._backup_dir.as_posix() in str(
mocked_tarfile.call_args_list[0][0][0]
)
outer_tar = mocked_tarfile.return_value
core_tar = outer_tar.create_inner_tar.return_value.__enter__.return_value
expected_files = [call(hass.config.path(), arcname="data", recursive=False)] + [
@@ -92,41 +102,47 @@ async def _mock_backup_generation(
return backup
async def _setup_mock_domain(
async def _setup_backup_platform(
hass: HomeAssistant,
*,
domain: str = "some_domain",
platform: BackupPlatformProtocol | BackupAgentPlatformProtocol | None = None,
) -> None:
"""Set up a mock domain."""
mock_platform(hass, "some_domain.backup", platform or MockPlatform())
assert await async_setup_component(hass, "some_domain", {})
mock_platform(hass, f"{domain}.backup", platform or MockPlatform())
assert await async_setup_component(hass, domain, {})
async def test_constructor(hass: HomeAssistant) -> None:
"""Test BackupManager constructor."""
manager = BackupManager(hass)
assert manager.backup_dir.as_posix() == hass.config.path("backups")
assert manager.temp_backup_dir.as_posix() == hass.config.path("tmp_backups")
async def test_load_backups(hass: HomeAssistant) -> None:
"""Test loading backups."""
manager = BackupManager(hass)
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
await manager.load_platforms()
with (
patch("pathlib.Path.glob", return_value=[TEST_BACKUP.path]),
patch("pathlib.Path.glob", return_value=[TEST_BACKUP_PATH]),
patch("tarfile.open", return_value=MagicMock()),
patch(
"homeassistant.components.backup.manager.json_loads_object",
"homeassistant.components.backup.util.json_loads_object",
return_value={
"slug": TEST_BACKUP.slug,
"name": TEST_BACKUP.name,
"date": TEST_BACKUP.date,
"slug": TEST_LOCAL_BACKUP.slug,
"name": TEST_LOCAL_BACKUP.name,
"date": TEST_LOCAL_BACKUP.date,
},
),
patch(
"pathlib.Path.stat",
return_value=MagicMock(st_size=TEST_BACKUP.size),
return_value=MagicMock(st_size=TEST_LOCAL_BACKUP.size),
),
):
await manager.load_backups()
await manager.backup_agents[LOCAL_AGENT_ID].load_backups()
backups = await manager.async_get_backups()
assert backups == {TEST_BACKUP.slug: TEST_BACKUP}
@@ -137,13 +153,17 @@ async def test_load_backups_with_exception(
) -> None:
"""Test loading backups with exception."""
manager = BackupManager(hass)
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
await manager.load_platforms()
with (
patch("pathlib.Path.glob", return_value=[TEST_BACKUP.path]),
patch("pathlib.Path.glob", return_value=[TEST_BACKUP_PATH]),
patch("tarfile.open", side_effect=OSError("Test exception")),
):
await manager.load_backups()
await manager.backup_agents[LOCAL_AGENT_ID].load_backups()
backups = await manager.async_get_backups()
assert f"Unable to read backup {TEST_BACKUP.path}: Test exception" in caplog.text
assert f"Unable to read backup {TEST_BACKUP_PATH}: Test exception" in caplog.text
assert backups == {}
@@ -153,11 +173,16 @@ async def test_removing_backup(
) -> None:
"""Test removing backup."""
manager = BackupManager(hass)
manager.backups = {TEST_BACKUP.slug: TEST_BACKUP}
manager.loaded_backups = True
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
await manager.load_platforms()
local_agent = manager.backup_agents[LOCAL_AGENT_ID]
local_agent._backups = {TEST_LOCAL_BACKUP.slug: TEST_LOCAL_BACKUP}
local_agent._loaded_backups = True
with patch("pathlib.Path.exists", return_value=True):
await manager.async_remove_backup(slug=TEST_BACKUP.slug)
await manager.async_remove_backup(slug=TEST_LOCAL_BACKUP.slug)
assert "Removed backup located at" in caplog.text
@@ -168,6 +193,9 @@ async def test_removing_non_existing_backup(
"""Test removing not existing backup."""
manager = BackupManager(hass)
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
await manager.load_platforms()
await manager.async_remove_backup(slug="non_existing")
assert "Removed backup located at" not in caplog.text
@@ -178,16 +206,21 @@ async def test_getting_backup_that_does_not_exist(
) -> None:
"""Test getting backup that does not exist."""
manager = BackupManager(hass)
manager.backups = {TEST_BACKUP.slug: TEST_BACKUP}
manager.loaded_backups = True
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
await manager.load_platforms()
local_agent = manager.backup_agents[LOCAL_AGENT_ID]
local_agent._backups = {TEST_LOCAL_BACKUP.slug: TEST_LOCAL_BACKUP}
local_agent._loaded_backups = True
with patch("pathlib.Path.exists", return_value=False):
backup = await manager.async_get_backup(slug=TEST_BACKUP.slug)
backup = await manager.async_get_backup(slug=TEST_LOCAL_BACKUP.slug)
assert backup is None
assert (
f"Removing tracked backup ({TEST_BACKUP.slug}) that "
f"does not exists on the expected path {TEST_BACKUP.path}"
f"Removing tracked backup ({TEST_LOCAL_BACKUP.slug}) that "
f"does not exists on the expected path {TEST_LOCAL_BACKUP.path}"
) in caplog.text
@@ -199,6 +232,7 @@ async def test_async_create_backup_when_backing_up(hass: HomeAssistant) -> None:
with pytest.raises(HomeAssistantError, match="Backup already in progress"):
await manager.async_create_backup(
addons_included=[],
agent_ids=[LOCAL_AGENT_ID],
database_included=True,
folders_included=[],
name=None,
@@ -227,7 +261,12 @@ async def test_async_create_backup(
) -> None:
"""Test generate backup."""
manager = BackupManager(hass)
manager.loaded_backups = True
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
await manager.load_platforms()
local_agent = manager.backup_agents[LOCAL_AGENT_ID]
local_agent._loaded_backups = True
await _mock_backup_generation(
hass, manager, mocked_json_bytes, mocked_tarfile, **params
@@ -236,10 +275,10 @@ async def test_async_create_backup(
assert "Generated new backup with slug " in caplog.text
assert "Creating backup directory" in caplog.text
assert "Loaded 0 platforms" in caplog.text
assert "Loaded 0 agents" in caplog.text
assert "Loaded 1 agents" in caplog.text
assert len(manager.backups) == 1
backup = list(manager.backups.values())[0]
assert len(local_agent._backups) == 1
backup = list(local_agent._backups.values())[0]
assert backup.protected is bool(params.get("password"))
@@ -250,12 +289,11 @@ async def test_loading_platforms(
"""Test loading backup platforms."""
manager = BackupManager(hass)
assert not manager.loaded_platforms
assert not manager.platforms
await _setup_mock_domain(
await _setup_backup_platform(
hass,
Mock(
platform=Mock(
async_pre_backup=AsyncMock(),
async_post_backup=AsyncMock(),
async_get_backup_agents=AsyncMock(),
@@ -264,7 +302,6 @@ async def test_loading_platforms(
await manager.load_platforms()
await hass.async_block_till_done()
assert manager.loaded_platforms
assert len(manager.platforms) == 1
assert "Loaded 1 platforms" in caplog.text
@@ -277,19 +314,17 @@ async def test_loading_agents(
"""Test loading backup agents."""
manager = BackupManager(hass)
assert not manager.loaded_platforms
assert not manager.platforms
await _setup_mock_domain(
await _setup_backup_platform(
hass,
Mock(
platform=Mock(
async_get_backup_agents=AsyncMock(return_value=[BackupAgentTest("test")]),
),
)
await manager.load_platforms()
await hass.async_block_till_done()
assert manager.loaded_platforms
assert len(manager.backup_agents) == 1
assert "Loaded 1 agents" in caplog.text
@@ -303,172 +338,17 @@ async def test_not_loading_bad_platforms(
"""Test loading backup platforms."""
manager = BackupManager(hass)
assert not manager.loaded_platforms
assert not manager.platforms
await _setup_mock_domain(hass)
await _setup_backup_platform(hass)
await manager.load_platforms()
await hass.async_block_till_done()
assert manager.loaded_platforms
assert len(manager.platforms) == 0
assert "Loaded 0 platforms" in caplog.text
@pytest.mark.usefixtures("mock_backup_generation")
async def test_syncing_backup(
hass: HomeAssistant,
caplog: pytest.LogCaptureFixture,
mocked_json_bytes: Mock,
mocked_tarfile: Mock,
) -> None:
"""Test syncing a backup."""
manager = BackupManager(hass)
await _setup_mock_domain(
hass,
Mock(
async_pre_backup=AsyncMock(),
async_post_backup=AsyncMock(),
async_get_backup_agents=AsyncMock(
return_value=[
BackupAgentTest("agent1"),
BackupAgentTest("agent2"),
]
),
),
)
await manager.load_platforms()
await hass.async_block_till_done()
backup = await _mock_backup_generation(
hass, manager, mocked_json_bytes, mocked_tarfile
)
with (
patch(
"homeassistant.components.backup.manager.BackupManager.async_get_backup",
return_value=backup,
),
patch.object(BackupAgentTest, "async_upload_backup") as mocked_upload,
patch(
"homeassistant.components.backup.manager.HAVERSION",
"2025.1.0",
),
):
await manager.async_upload_backup(slug=backup.slug)
assert mocked_upload.call_count == 2
first_call = mocked_upload.call_args_list[0]
assert first_call[1]["path"] == backup.path
assert first_call[1]["metadata"] == BackupUploadMetadata(
date=backup.date,
homeassistant="2025.1.0",
name=backup.name,
protected=backup.protected,
size=backup.size,
slug=backup.slug,
)
assert "Error during backup upload" not in caplog.text
@pytest.mark.usefixtures("mock_backup_generation")
async def test_syncing_backup_with_exception(
hass: HomeAssistant,
caplog: pytest.LogCaptureFixture,
mocked_json_bytes: Mock,
mocked_tarfile: Mock,
) -> None:
"""Test syncing a backup with exception."""
manager = BackupManager(hass)
class ModifiedBackupSyncAgentTest(BackupAgentTest):
async def async_upload_backup(self, **kwargs: Any) -> None:
raise HomeAssistantError("Test exception")
await _setup_mock_domain(
hass,
Mock(
async_pre_backup=AsyncMock(),
async_post_backup=AsyncMock(),
async_get_backup_agents=AsyncMock(
return_value=[
ModifiedBackupSyncAgentTest("agent1"),
ModifiedBackupSyncAgentTest("agent2"),
]
),
),
)
await manager.load_platforms()
await hass.async_block_till_done()
backup = await _mock_backup_generation(
hass, manager, mocked_json_bytes, mocked_tarfile
)
with (
patch(
"homeassistant.components.backup.manager.BackupManager.async_get_backup",
return_value=backup,
),
patch.object(
ModifiedBackupSyncAgentTest,
"async_upload_backup",
) as mocked_upload,
patch(
"homeassistant.components.backup.manager.HAVERSION",
"2025.1.0",
),
):
mocked_upload.side_effect = HomeAssistantError("Test exception")
await manager.async_upload_backup(slug=backup.slug)
assert mocked_upload.call_count == 2
first_call = mocked_upload.call_args_list[0]
assert first_call[1]["path"] == backup.path
assert first_call[1]["metadata"] == BackupUploadMetadata(
date=backup.date,
homeassistant="2025.1.0",
name=backup.name,
protected=backup.protected,
size=backup.size,
slug=backup.slug,
)
assert "Error during backup upload - Test exception" in caplog.text
@pytest.mark.usefixtures("mock_backup_generation")
async def test_syncing_backup_no_agents(
hass: HomeAssistant,
caplog: pytest.LogCaptureFixture,
mocked_json_bytes: Mock,
mocked_tarfile: Mock,
) -> None:
"""Test syncing a backup with no agents."""
manager = BackupManager(hass)
await _setup_mock_domain(
hass,
Mock(
async_pre_backup=AsyncMock(),
async_post_backup=AsyncMock(),
async_get_backup_agents=AsyncMock(return_value=[]),
),
)
await manager.load_platforms()
await hass.async_block_till_done()
backup = await _mock_backup_generation(
hass, manager, mocked_json_bytes, mocked_tarfile
)
with patch(
"homeassistant.components.backup.agent.BackupAgent.async_upload_backup"
) as mocked_async_upload_backup:
await manager.async_upload_backup(slug=backup.slug)
assert mocked_async_upload_backup.call_count == 0
async def test_exception_plaform_pre(
hass: HomeAssistant, mocked_json_bytes: Mock, mocked_tarfile: Mock
) -> None:
@@ -479,9 +359,9 @@ async def test_exception_plaform_pre(
async def _mock_step(hass: HomeAssistant) -> None:
raise HomeAssistantError("Test exception")
await _setup_mock_domain(
await _setup_backup_platform(
hass,
Mock(
platform=Mock(
async_pre_backup=_mock_step,
async_post_backup=AsyncMock(),
async_get_backup_agents=AsyncMock(),
@@ -502,9 +382,9 @@ async def test_exception_plaform_post(
async def _mock_step(hass: HomeAssistant) -> None:
raise HomeAssistantError("Test exception")
await _setup_mock_domain(
await _setup_backup_platform(
hass,
Mock(
platform=Mock(
async_pre_backup=AsyncMock(),
async_post_backup=_mock_step,
async_get_backup_agents=AsyncMock(),
@@ -515,58 +395,6 @@ async def test_exception_plaform_post(
await _mock_backup_generation(hass, manager, mocked_json_bytes, mocked_tarfile)
async def test_loading_platforms_when_running_async_pre_backup_actions(
hass: HomeAssistant,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test loading backup platforms when running post backup actions."""
manager = BackupManager(hass)
assert not manager.loaded_platforms
assert not manager.platforms
await _setup_mock_domain(
hass,
Mock(
async_pre_backup=AsyncMock(),
async_post_backup=AsyncMock(),
async_get_backup_agents=AsyncMock(),
),
)
await manager.async_pre_backup_actions()
assert manager.loaded_platforms
assert len(manager.platforms) == 1
assert "Loaded 1 platforms" in caplog.text
async def test_loading_platforms_when_running_async_post_backup_actions(
hass: HomeAssistant,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test loading backup platforms when running post backup actions."""
manager = BackupManager(hass)
assert not manager.loaded_platforms
assert not manager.platforms
await _setup_mock_domain(
hass,
Mock(
async_pre_backup=AsyncMock(),
async_post_backup=AsyncMock(),
async_get_backup_agents=AsyncMock(),
),
)
await manager.async_post_backup_actions()
assert manager.loaded_platforms
assert len(manager.platforms) == 1
assert "Loaded 1 platforms" in caplog.text
async def test_async_receive_backup(
hass: HomeAssistant,
caplog: pytest.LogCaptureFixture,
@@ -574,6 +402,9 @@ async def test_async_receive_backup(
"""Test receiving a backup file."""
manager = BackupManager(hass)
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
await manager.load_platforms()
size = 2 * 2**16
protocol = Mock(_reading_paused=False)
stream = aiohttp.StreamReader(protocol, 2**16)
@@ -582,8 +413,16 @@ async def test_async_receive_backup(
open_mock = mock_open()
with patch("pathlib.Path.open", open_mock), patch("shutil.move") as mover_mock:
with (
patch("pathlib.Path.open", open_mock),
patch("shutil.copy") as copy_mock,
patch(
"homeassistant.components.backup.manager.read_backup",
return_value=TEST_BASE_BACKUP,
),
):
await manager.async_receive_backup(
agent_ids=[LOCAL_AGENT_ID],
contents=aiohttp.BodyPartReader(
b"--:",
CIMultiDictProxy(
@@ -594,11 +433,11 @@ async def test_async_receive_backup(
)
),
stream,
)
),
)
assert open_mock.call_count == 1
assert mover_mock.call_count == 1
assert mover_mock.mock_calls[0].args[1].name == "abc123.tar"
assert copy_mock.call_count == 1
assert copy_mock.mock_calls[0].args[1].name == "abc123.tar"
async def test_async_trigger_restore(
@@ -607,18 +446,25 @@ async def test_async_trigger_restore(
) -> None:
"""Test trigger restore."""
manager = BackupManager(hass)
manager.loaded_backups = True
manager.backups = {TEST_BACKUP.slug: TEST_BACKUP}
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
await manager.load_platforms()
local_agent = manager.backup_agents[LOCAL_AGENT_ID]
local_agent._backups = {TEST_LOCAL_BACKUP.slug: TEST_LOCAL_BACKUP}
local_agent._loaded_backups = True
with (
patch("pathlib.Path.exists", return_value=True),
patch("pathlib.Path.write_text") as mocked_write_text,
patch("homeassistant.core.ServiceRegistry.async_call") as mocked_service_call,
):
await manager.async_restore_backup(TEST_BACKUP.slug)
await manager.async_restore_backup(
TEST_LOCAL_BACKUP.slug, agent_id=LOCAL_AGENT_ID, password=None
)
assert (
mocked_write_text.call_args[0][0]
== '{"path": "abc123.tar", "password": null}'
== f'{{"path": "{hass.config.path()}/backups/abc123.tar", "password": null}}'
)
assert mocked_service_call.called
@@ -629,18 +475,25 @@ async def test_async_trigger_restore_with_password(
) -> None:
"""Test trigger restore."""
manager = BackupManager(hass)
manager.loaded_backups = True
manager.backups = {TEST_BACKUP.slug: TEST_BACKUP}
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
await manager.load_platforms()
local_agent = manager.backup_agents[LOCAL_AGENT_ID]
local_agent._backups = {TEST_LOCAL_BACKUP.slug: TEST_LOCAL_BACKUP}
local_agent._loaded_backups = True
with (
patch("pathlib.Path.exists", return_value=True),
patch("pathlib.Path.write_text") as mocked_write_text,
patch("homeassistant.core.ServiceRegistry.async_call") as mocked_service_call,
):
await manager.async_restore_backup(slug=TEST_BACKUP.slug, password="abc123")
await manager.async_restore_backup(
slug=TEST_LOCAL_BACKUP.slug, agent_id=LOCAL_AGENT_ID, password="abc123"
)
assert (
mocked_write_text.call_args[0][0]
== '{"path": "abc123.tar", "password": "abc123"}'
== f'{{"path": "{hass.config.path()}/backups/abc123.tar", "password": "abc123"}}'
)
assert mocked_service_call.called
@@ -648,7 +501,14 @@ async def test_async_trigger_restore_with_password(
async def test_async_trigger_restore_missing_backup(hass: HomeAssistant) -> None:
"""Test trigger restore."""
manager = BackupManager(hass)
manager.loaded_backups = True
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
await manager.load_platforms()
local_agent = manager.backup_agents[LOCAL_AGENT_ID]
local_agent._loaded_backups = True
with pytest.raises(HomeAssistantError, match="Backup abc123 not found"):
await manager.async_restore_backup(TEST_BACKUP.slug)
await manager.async_restore_backup(
TEST_LOCAL_BACKUP.slug, agent_id=LOCAL_AGENT_ID, password=None
)

View File

@@ -2,7 +2,7 @@
from pathlib import Path
from typing import Any
from unittest.mock import ANY, AsyncMock, patch
from unittest.mock import ANY, patch
from freezegun.api import FrozenDateTimeFactory
import pytest
@@ -14,7 +14,7 @@ from homeassistant.components.backup.manager import NewBackup
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from .common import TEST_BACKUP, BackupAgentTest, setup_backup_integration
from .common import TEST_LOCAL_BACKUP, BackupAgentTest, setup_backup_integration
from tests.typing import WebSocketGenerator
@@ -45,31 +45,21 @@ async def test_info(
with_hassio: bool,
) -> None:
"""Test getting backup info."""
await setup_backup_integration(hass, with_hassio=with_hassio)
hass.data[DATA_MANAGER].backups = {TEST_BACKUP.slug: TEST_BACKUP}
await setup_backup_integration(
hass, with_hassio=with_hassio, backups=[TEST_LOCAL_BACKUP]
)
client = await hass_ws_client(hass)
await hass.async_block_till_done()
with (
patch(
"homeassistant.components.backup.manager.BackupManager.load_backups",
AsyncMock(),
),
patch(
"homeassistant.components.backup.manager.BackupManager.async_get_backups",
return_value={TEST_BACKUP.slug: TEST_BACKUP},
),
):
await client.send_json_auto_id({"type": "backup/info"})
assert await client.receive_json() == snapshot
await client.send_json_auto_id({"type": "backup/info"})
assert await client.receive_json() == snapshot
@pytest.mark.parametrize(
"backup_content",
[
pytest.param(TEST_BACKUP, id="with_backup_content"),
pytest.param([TEST_LOCAL_BACKUP], id="with_backup_content"),
pytest.param(None, id="without_backup_content"),
],
)
@@ -88,15 +78,14 @@ async def test_details(
backup_content: BaseBackup | None,
) -> None:
"""Test getting backup info."""
await setup_backup_integration(hass, with_hassio=with_hassio)
await setup_backup_integration(
hass, with_hassio=with_hassio, backups=backup_content
)
client = await hass_ws_client(hass)
await hass.async_block_till_done()
with patch(
"homeassistant.components.backup.manager.BackupManager.async_get_backup",
return_value=backup_content,
):
with patch("pathlib.Path.exists", return_value=True):
await client.send_json_auto_id({"type": "backup/details", "slug": "abc123"})
assert await client.receive_json() == snapshot
@@ -159,7 +148,9 @@ async def test_generate(
freezer.move_to("2024-11-13 12:01:00+01:00")
await hass.async_block_till_done()
await client.send_json_auto_id({"type": "backup/generate", **(data or {})})
await client.send_json_auto_id(
{"type": "backup/generate", **{"agent_ids": ["backup.local"]} | (data or {})}
)
for _ in range(number_of_messages):
assert await client.receive_json() == snapshot
@@ -168,16 +159,18 @@ async def test_generate(
@pytest.mark.parametrize(
("params", "expected_extra_call_params"),
[
({}, {}),
({"agent_ids": ["backup.local"]}, {"agent_ids": ["backup.local"]}),
(
{
"addons_included": ["ssl"],
"agent_ids": ["backup.local"],
"database_included": False,
"folders_included": ["media"],
"name": "abc123",
},
{
"addons_included": ["ssl"],
"agent_ids": ["backup.local"],
"database_included": False,
"folders_included": ["media"],
"name": "abc123",
@@ -241,7 +234,9 @@ async def test_restore(
with patch(
"homeassistant.components.backup.manager.BackupManager.async_restore_backup",
):
await client.send_json_auto_id({"type": "backup/restore", "slug": "abc123"})
await client.send_json_auto_id(
{"type": "backup/restore", "slug": "abc123", "agent_id": "backup.local"}
)
assert await client.receive_json() == snapshot
@@ -384,7 +379,7 @@ async def test_agents_info(
) -> None:
"""Test getting backup agents info."""
await setup_backup_integration(hass, with_hassio=with_hassio)
hass.data[DATA_MANAGER].backup_agents = {"domain.test": BackupAgentTest("test")}
hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test")
client = await hass_ws_client(hass)
await hass.async_block_till_done()
@@ -408,7 +403,7 @@ async def test_agents_list_backups(
) -> None:
"""Test backup agents list backups details."""
await setup_backup_integration(hass, with_hassio=with_hassio)
hass.data[DATA_MANAGER].backup_agents = {"domain.test": BackupAgentTest("test")}
hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test")
client = await hass_ws_client(hass)
await hass.async_block_till_done()
@@ -432,7 +427,7 @@ async def test_agents_download(
) -> None:
"""Test WS command to start downloading a backup."""
await setup_backup_integration(hass, with_hassio=with_hassio)
hass.data[DATA_MANAGER].backup_agents = {"domain.test": BackupAgentTest("test")}
hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test")
client = await hass_ws_client(hass)
await hass.async_block_till_done()
@@ -449,7 +444,7 @@ async def test_agents_download(
assert await client.receive_json() == snapshot
assert download_mock.call_args[1] == {
"id": "abc123",
"path": Path(hass.config.path("backup"), "abc123.tar"),
"path": Path(hass.config.path("tmp_backups"), "abc123.tar"),
}
@@ -460,7 +455,7 @@ async def test_agents_download_exception(
) -> None:
"""Test WS command to start downloading a backup throwing an exception."""
await setup_backup_integration(hass)
hass.data[DATA_MANAGER].backup_agents = {"domain.test": BackupAgentTest("test")}
hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test")
client = await hass_ws_client(hass)
await hass.async_block_till_done()

View File

@@ -1,18 +1,18 @@
"""Test the Kitchen Sink backup platform."""
from collections.abc import AsyncGenerator
from pathlib import Path
from io import StringIO
from unittest.mock import patch
from uuid import UUID
import pytest
from homeassistant.components.backup import DOMAIN as BACKUP_DOMAIN, Backup
from homeassistant.components.backup import DOMAIN as BACKUP_DOMAIN, BaseBackup
from homeassistant.components.kitchen_sink import DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from tests.typing import WebSocketGenerator
from tests.typing import ClientSessionGenerator, WebSocketGenerator
@pytest.fixture(autouse=True)
@@ -31,7 +31,7 @@ async def backup_only() -> AsyncGenerator[None]:
@pytest.fixture(autouse=True)
async def setup_integration(hass: HomeAssistant) -> AsyncGenerator[None]:
"""Set up Kitchen Sink integration."""
with patch("homeassistant.components.backup.is_hassio", return_value=True):
with patch("homeassistant.components.backup.is_hassio", return_value=False):
assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}})
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
await hass.async_block_till_done()
@@ -50,7 +50,7 @@ async def test_agents_info(
assert response["success"]
assert response["result"] == {
"agents": [{"agent_id": "kitchen_sink.syncer"}],
"agents": [{"agent_id": "backup.local"}, {"agent_id": "kitchen_sink.syncer"}],
"syncing": False,
}
@@ -100,25 +100,25 @@ async def test_agents_download(
response = await client.receive_json()
assert response["success"]
path = hass.config.path(f"backup/{slug}.tar")
path = hass.config.path(f"tmp_backups/{slug}.tar")
assert f"Downloading backup {backup_id} to {path}" in caplog.text
@pytest.mark.xfail(reason="Disabled until /api/backup/upload accepts a list of agents")
async def test_agents_upload(
hass: HomeAssistant,
hass_client: ClientSessionGenerator,
hass_ws_client: WebSocketGenerator,
caplog: pytest.LogCaptureFixture,
hass_supervisor_access_token: str,
) -> None:
"""Test backup agents upload."""
client = await hass_ws_client(hass, hass_supervisor_access_token)
ws_client = await hass_ws_client(hass, hass_supervisor_access_token)
client = await hass_client()
slug = "test-backup"
test_backup = Backup(
test_backup = BaseBackup(
slug=slug,
name="Test",
date="1970-01-01T00:00:00.000Z",
path=Path(hass.config.path(f"backups/{slug}.tar")),
size=0.0,
protected=False,
)
@@ -129,25 +129,24 @@ async def test_agents_upload(
patch(
"homeassistant.components.backup.manager.BackupManager.async_get_backup",
) as fetch_backup,
patch(
"homeassistant.components.backup.manager.read_backup",
return_value=test_backup,
),
):
fetch_backup.return_value = test_backup
await client.send_json_auto_id(
{
"type": "backup/upload",
"data": {
"slug": slug,
},
}
resp = await client.post(
"/api/backup/upload?agent_id=kitchen_sink.syncer",
data={"file": StringIO("test")},
)
response = await client.receive_json()
assert response["success"]
assert resp.status == 201
backup_name = f"{slug}.tar"
assert f"Uploading backup {backup_name}" in caplog.text
with patch("homeassistant.components.kitchen_sink.backup.uuid4", return_value=uuid):
await client.send_json_auto_id({"type": "backup/agents/list_backups"})
response = await client.receive_json()
await ws_client.send_json_auto_id({"type": "backup/agents/list_backups"})
response = await ws_client.receive_json()
assert response["success"]
backup_list = response["result"]