Merge remote-tracking branch 'upstream/allthebackupchanges' into backup_hassio

This commit is contained in:
Erik
2024-11-20 20:10:26 +01:00
8 changed files with 17 additions and 16 deletions

View File

@@ -42,7 +42,7 @@ env:
MYPY_CACHE_VERSION: 9
HA_SHORT_VERSION: "2024.12"
DEFAULT_PYTHON: "3.12"
ALL_PYTHON_VERSIONS: "['3.12', '3.13']"
ALL_PYTHON_VERSIONS: "['3.13']"
# 10.3 is the oldest supported version
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
# 10.6 is the current long-term-support
@@ -76,20 +76,21 @@ jobs:
# In case of issues with the partial run, use the following line instead:
# test_full_suite: 'true'
core: ${{ steps.core.outputs.changes }}
integrations_glob: ${{ steps.info.outputs.integrations_glob }}
integrations: ${{ steps.integrations.outputs.changes }}
pre-commit_cache_key: ${{ steps.generate_pre-commit_cache_key.outputs.key }}
python_cache_key: ${{ steps.generate_python_cache_key.outputs.key }}
requirements: ${{ steps.core.outputs.requirements }}
mariadb_groups: ${{ steps.info.outputs.mariadb_groups }}
postgresql_groups: ${{ steps.info.outputs.postgresql_groups }}
python_versions: ${{ steps.info.outputs.python_versions }}
test_full_suite: ${{ steps.info.outputs.test_full_suite }}
test_group_count: ${{ steps.info.outputs.test_group_count }}
test_groups: ${{ steps.info.outputs.test_groups }}
tests_glob: ${{ steps.info.outputs.tests_glob }}
tests: ${{ steps.info.outputs.tests }}
skip_coverage: ${{ steps.info.outputs.skip_coverage }}
mariadb_groups: "[]"
postgresql_groups: "[]"
test_full_suite: "false"
integrations_glob: "{backup,cloud,hassio,kitchen_sink,demo}"
test_groups: '["backup", "cloud", "hassio", "kitchen_sink", "demo"]'
tests: '["backup", "cloud", "hassio", "kitchen_sink", "demo"]'
tests_glob: "{backup,cloud,hassio,kitchen_sink,demo}"
runs-on: ubuntu-24.04
steps:
- name: Check out code from GitHub

View File

@@ -56,7 +56,7 @@ class CoreLocalBackupAgent(LocalBackupAgent):
backup_id=base_backup.backup_id,
name=base_backup.name,
date=base_backup.date,
size=round(backup_path.stat().st_size / 1_048_576, 2),
size=backup_path.stat().st_size,
protected=base_backup.protected,
)
backups[backup.backup_id] = backup

View File

@@ -613,7 +613,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
date=date_str,
name=backup_name,
protected=password is not None,
size=round(size_in_bytes / 1_048_576, 2),
size=size_in_bytes,
)
success = True
return (backup, tar_file_path)

View File

@@ -11,7 +11,7 @@ class BaseBackup:
date: str
name: str
protected: bool
size: float
size: int
def as_dict(self) -> dict:
"""Return a dict representation of this backup."""

View File

@@ -24,5 +24,5 @@ def read_backup(backup_path: Path) -> BaseBackup:
date=cast(str, data["date"]),
name=cast(str, data["name"]),
protected=cast(bool, data.get("protected", False)),
size=round(backup_path.stat().st_size / 1_048_576, 2),
size=backup_path.stat().st_size,
)

View File

@@ -86,7 +86,7 @@ async def handle_details(
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required("type"): "backup/remove",
vol.Required("type"): "backup/delete",
vol.Required("backup_id"): str,
}
)

View File

@@ -193,7 +193,7 @@ async def test_delete(
await client.send_json_auto_id({"type": "backup/info"})
assert await client.receive_json() == snapshot
await client.send_json_auto_id({"type": "backup/remove", "backup_id": "abc123"})
await client.send_json_auto_id({"type": "backup/delete", "backup_id": "abc123"})
assert await client.receive_json() == snapshot
await client.send_json_auto_id({"type": "backup/info"})
@@ -215,7 +215,7 @@ async def test_agent_delete_backup(
with patch.object(BackupAgentTest, "async_delete_backup") as delete_mock:
await client.send_json_auto_id(
{
"type": "backup/remove",
"type": "backup/delete",
"backup_id": "abc123",
}
)

View File

@@ -165,7 +165,7 @@ async def test_agent_delete_backup(
await client.send_json_auto_id(
{
"type": "backup/remove",
"type": "backup/delete",
"backup_id": backup_id,
}
)