forked from home-assistant/core
Compare commits
232 Commits
2025.2.0b1
...
2025.2.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2d5a75d4f2 | ||
|
|
e1ad3f05e6 | ||
|
|
b9280edbfa | ||
|
|
010993fc5f | ||
|
|
713931661e | ||
|
|
af06521f66 | ||
|
|
c32f57f85a | ||
|
|
171061a778 | ||
|
|
476ea35bdb | ||
|
|
00e6866664 | ||
|
|
201bf95ab8 | ||
|
|
ff22bbd0e4 | ||
|
|
fd8d4e937c | ||
|
|
7903348d79 | ||
|
|
090dbba06e | ||
|
|
af77e69eb0 | ||
|
|
23e7638687 | ||
|
|
36b722960a | ||
|
|
3dd241a398 | ||
|
|
b5a9c3d1f6 | ||
|
|
eca714a45a | ||
|
|
8049699efb | ||
|
|
7c6afd50dc | ||
|
|
42d8889778 | ||
|
|
a4c0304e1f | ||
|
|
c63e688ba8 | ||
|
|
16298b4195 | ||
|
|
da23eb22db | ||
|
|
4bd1d0199b | ||
|
|
efe7050030 | ||
|
|
79ff85f517 | ||
|
|
73ad4caf94 | ||
|
|
e3d649d349 | ||
|
|
657e3488ba | ||
|
|
7508c14a53 | ||
|
|
ac84970da8 | ||
|
|
30073f3493 | ||
|
|
3abd7b8ba3 | ||
|
|
62bc6e4bf6 | ||
|
|
5faa189fef | ||
|
|
e09ae1c83d | ||
|
|
7b20299de7 | ||
|
|
81e501aba1 | ||
|
|
568ac22ce8 | ||
|
|
c71ab054f1 | ||
|
|
bea201f9f6 | ||
|
|
dda90bc04c | ||
|
|
a033e4c88d | ||
|
|
42b6f83e7c | ||
|
|
cb937bc115 | ||
|
|
bec569caf9 | ||
|
|
3390fb32a8 | ||
|
|
3ebb58f780 | ||
|
|
30b131d3b9 | ||
|
|
cd40232beb | ||
|
|
f27fe365c5 | ||
|
|
1c769418fb | ||
|
|
db7c2dab52 | ||
|
|
627377872b | ||
|
|
8504162539 | ||
|
|
67c6a1d436 | ||
|
|
5c383f3d88 | ||
|
|
3a88c9d6f4 | ||
|
|
5c7cabed1e | ||
|
|
65fde6042f | ||
|
|
d5dd0f6ec1 | ||
|
|
95410586b1 | ||
|
|
d5ad91fce3 | ||
|
|
04b0d587c5 | ||
|
|
72a3c5296c | ||
|
|
d6414b9849 | ||
|
|
c4e2ddd28b | ||
|
|
5687a4d718 | ||
|
|
a4474b2794 | ||
|
|
72a69d7e41 | ||
|
|
e8314fb286 | ||
|
|
30c099ef4e | ||
|
|
c506c9080a | ||
|
|
79563f3746 | ||
|
|
0764c7e773 | ||
|
|
fa83591148 | ||
|
|
df2b29aef1 | ||
|
|
da8d300f29 | ||
|
|
2c5fd4ee2a | ||
|
|
16d9270833 | ||
|
|
d8179dacc6 | ||
|
|
3dc075f287 | ||
|
|
b5e4fee9aa | ||
|
|
1c8ced2c2d | ||
|
|
1a5b8cf854 | ||
|
|
af40bb39ad | ||
|
|
14034ed7f8 | ||
|
|
d7f0a55568 | ||
|
|
1038a849c4 | ||
|
|
c4b08d3d57 | ||
|
|
0e9658b5ff | ||
|
|
0463b90d36 | ||
|
|
37f0832c8b | ||
|
|
2005e14d5f | ||
|
|
99219a9a73 | ||
|
|
1f967f7f77 | ||
|
|
8de64b8b1f | ||
|
|
48c88d8fa1 | ||
|
|
d478f906df | ||
|
|
09e02493b7 | ||
|
|
55c746f909 | ||
|
|
834a04ac49 | ||
|
|
fa9b4c3524 | ||
|
|
13bfa82038 | ||
|
|
0766b47161 | ||
|
|
fa8225d0a2 | ||
|
|
623c82e5d1 | ||
|
|
728a1a4be5 | ||
|
|
4bbb3e351b | ||
|
|
044bafd6aa | ||
|
|
1e1069b647 | ||
|
|
455af9179b | ||
|
|
30b309d7a1 | ||
|
|
7e32342eb2 | ||
|
|
bb9740991e | ||
|
|
88e5d1c18f | ||
|
|
e960053226 | ||
|
|
b318fb46a0 | ||
|
|
523835080b | ||
|
|
5a63138581 | ||
|
|
90ddb6cce1 | ||
|
|
81783dcfd3 | ||
|
|
405cc47157 | ||
|
|
809f5eea49 | ||
|
|
63c153d671 | ||
|
|
c8c6eddc65 | ||
|
|
ddb40cb4a8 | ||
|
|
38975775ac | ||
|
|
4fa043e6ff | ||
|
|
433a51f6d5 | ||
|
|
48511986bb | ||
|
|
f1128adec4 | ||
|
|
54a718c1d7 | ||
|
|
63d1dddc76 | ||
|
|
7d1b72a581 | ||
|
|
6c172705d1 | ||
|
|
505f089a73 | ||
|
|
dbf9e370a8 | ||
|
|
dc1c2f24e6 | ||
|
|
78dcf8b18e | ||
|
|
613168fd62 | ||
|
|
5f28e95bdc | ||
|
|
1db5da4037 | ||
|
|
6bf5e95089 | ||
|
|
1ea23fda10 | ||
|
|
21a85c014a | ||
|
|
4c8f716320 | ||
|
|
63bd67f6cd | ||
|
|
73b874c5e6 | ||
|
|
3b67dc3651 | ||
|
|
434a4ebc9f | ||
|
|
cb4b7e71af | ||
|
|
4c6fda2096 | ||
|
|
9b5c21524c | ||
|
|
76937541f1 | ||
|
|
bad966f3ab | ||
|
|
2d1d9bbe5a | ||
|
|
e76ff0a0de | ||
|
|
fa8d1b4dc4 | ||
|
|
b3c44ca03a | ||
|
|
6efa6f9687 | ||
|
|
3588b88cbb | ||
|
|
a51846a8cd | ||
|
|
ec22479733 | ||
|
|
3a11e8df6a | ||
|
|
a4eab35e01 | ||
|
|
829a6271af | ||
|
|
9935528dd3 | ||
|
|
df35d226d6 | ||
|
|
2b510caa1c | ||
|
|
90c357c01f | ||
|
|
321ce698be | ||
|
|
ea519268b6 | ||
|
|
4687b2e455 | ||
|
|
bbb03d6731 | ||
|
|
b9884f72c3 | ||
|
|
e1105ef2fa | ||
|
|
5450ed8445 | ||
|
|
7deb1715dd | ||
|
|
ca2a555037 | ||
|
|
ae79b09401 | ||
|
|
e86a633c23 | ||
|
|
b412164440 | ||
|
|
4fe76ec78c | ||
|
|
f4166c5390 | ||
|
|
3107b81333 | ||
|
|
07b85163d5 | ||
|
|
c28d465f3b | ||
|
|
00298db465 | ||
|
|
6bab5b2c32 | ||
|
|
0272d37e88 | ||
|
|
26ae498974 | ||
|
|
c77bca1e44 | ||
|
|
ad86f9efd5 | ||
|
|
71a40d9234 | ||
|
|
eb344ba335 | ||
|
|
eca30717a9 | ||
|
|
6e55ba137a | ||
|
|
a391f0a7cc | ||
|
|
c9fd27555c | ||
|
|
9cd48dd452 | ||
|
|
a74328e600 | ||
|
|
5cec045cac | ||
|
|
04a7c6f15e | ||
|
|
833b17a8ee | ||
|
|
a955901d40 | ||
|
|
9a55b5e3f7 | ||
|
|
3847057444 | ||
|
|
659a0df9ab | ||
|
|
74f0af1ba1 | ||
|
|
ad6c3f9e10 | ||
|
|
252b13e63a | ||
|
|
07acabdb36 | ||
|
|
f479ed4ff0 | ||
|
|
b70598673b | ||
|
|
08bb027eac | ||
|
|
613f0add76 | ||
|
|
9e23ff9a4d | ||
|
|
fad3d5d293 | ||
|
|
b300fb1fab | ||
|
|
aed779172d | ||
|
|
5e646a3cb6 | ||
|
|
0764aca2f1 | ||
|
|
8babdc0b71 | ||
|
|
ff64e5a312 | ||
|
|
55ac0b0f37 | ||
|
|
f391438d0a |
@@ -146,6 +146,7 @@ def _extract_backup(
|
||||
config_dir,
|
||||
dirs_exist_ok=True,
|
||||
ignore=shutil.ignore_patterns(*(keep)),
|
||||
ignore_dangling_symlinks=True,
|
||||
)
|
||||
elif restore_content.restore_database:
|
||||
for entry in KEEP_DATABASE:
|
||||
|
||||
@@ -161,6 +161,16 @@ FRONTEND_INTEGRATIONS = {
|
||||
# integrations can be removed and database migration status is
|
||||
# visible in frontend
|
||||
"frontend",
|
||||
# Hassio is an after dependency of backup, after dependencies
|
||||
# are not promoted from stage 2 to earlier stages, so we need to
|
||||
# add it here. Hassio needs to be setup before backup, otherwise
|
||||
# the backup integration will think we are a container/core install
|
||||
# when using HAOS or Supervised install.
|
||||
"hassio",
|
||||
# Backup is an after dependency of frontend, after dependencies
|
||||
# are not promoted from stage 2 to earlier stages, so we need to
|
||||
# add it here.
|
||||
"backup",
|
||||
}
|
||||
RECORDER_INTEGRATIONS = {
|
||||
# Setup after frontend
|
||||
|
||||
@@ -144,7 +144,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
return self.async_create_entry(title=discovery.name, data={})
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -92,7 +92,7 @@ class AranetConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address][0], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aranet",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aranet4==2.5.0"]
|
||||
"requirements": ["aranet4==2.5.1"]
|
||||
}
|
||||
|
||||
@@ -1122,6 +1122,7 @@ class PipelineRun:
|
||||
context=user_input.context,
|
||||
language=user_input.language,
|
||||
agent_id=user_input.agent_id,
|
||||
extra_system_prompt=user_input.extra_system_prompt,
|
||||
)
|
||||
speech = conversation_result.response.speech.get("plain", {}).get(
|
||||
"speech", ""
|
||||
|
||||
@@ -63,6 +63,21 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"async_internal_announce",
|
||||
[AssistSatelliteEntityFeature.ANNOUNCE],
|
||||
)
|
||||
component.async_register_entity_service(
|
||||
"start_conversation",
|
||||
vol.All(
|
||||
cv.make_entity_service_schema(
|
||||
{
|
||||
vol.Optional("start_message"): str,
|
||||
vol.Optional("start_media_id"): str,
|
||||
vol.Optional("extra_system_prompt"): str,
|
||||
}
|
||||
),
|
||||
cv.has_at_least_one_key("start_message", "start_media_id"),
|
||||
),
|
||||
"async_internal_start_conversation",
|
||||
[AssistSatelliteEntityFeature.START_CONVERSATION],
|
||||
)
|
||||
hass.data[CONNECTION_TEST_DATA] = {}
|
||||
async_register_websocket_api(hass)
|
||||
hass.http.register_view(ConnectionTestView())
|
||||
|
||||
@@ -26,3 +26,6 @@ class AssistSatelliteEntityFeature(IntFlag):
|
||||
|
||||
ANNOUNCE = 1
|
||||
"""Device supports remotely triggered announcements."""
|
||||
|
||||
START_CONVERSATION = 2
|
||||
"""Device supports starting conversations."""
|
||||
|
||||
@@ -10,7 +10,7 @@ import logging
|
||||
import time
|
||||
from typing import Any, Final, Literal, final
|
||||
|
||||
from homeassistant.components import media_source, stt, tts
|
||||
from homeassistant.components import conversation, media_source, stt, tts
|
||||
from homeassistant.components.assist_pipeline import (
|
||||
OPTION_PREFERRED,
|
||||
AudioSettings,
|
||||
@@ -27,6 +27,7 @@ from homeassistant.components.tts import (
|
||||
generate_media_source_id as tts_generate_media_source_id,
|
||||
)
|
||||
from homeassistant.core import Context, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
|
||||
@@ -117,6 +118,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
|
||||
_run_has_tts: bool = False
|
||||
_is_announcing = False
|
||||
_extra_system_prompt: str | None = None
|
||||
_wake_word_intercept_future: asyncio.Future[str | None] | None = None
|
||||
_attr_tts_options: dict[str, Any] | None = None
|
||||
_pipeline_task: asyncio.Task | None = None
|
||||
@@ -216,6 +218,59 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_internal_start_conversation(
|
||||
self,
|
||||
start_message: str | None = None,
|
||||
start_media_id: str | None = None,
|
||||
extra_system_prompt: str | None = None,
|
||||
) -> None:
|
||||
"""Start a conversation from the satellite.
|
||||
|
||||
If start_media_id is not provided, message is synthesized to
|
||||
audio with the selected pipeline.
|
||||
|
||||
If start_media_id is provided, it is played directly. It is possible
|
||||
to omit the message and the satellite will not show any text.
|
||||
|
||||
Calls async_start_conversation.
|
||||
"""
|
||||
await self._cancel_running_pipeline()
|
||||
|
||||
# The Home Assistant built-in agent doesn't support conversations.
|
||||
pipeline = async_get_pipeline(self.hass, self._resolve_pipeline())
|
||||
if pipeline.conversation_engine == conversation.HOME_ASSISTANT_AGENT:
|
||||
raise HomeAssistantError(
|
||||
"Built-in conversation agent does not support starting conversations"
|
||||
)
|
||||
|
||||
if start_message is None:
|
||||
start_message = ""
|
||||
|
||||
announcement = await self._resolve_announcement_media_id(
|
||||
start_message, start_media_id
|
||||
)
|
||||
|
||||
if self._is_announcing:
|
||||
raise SatelliteBusyError
|
||||
|
||||
self._is_announcing = True
|
||||
# Provide our start info to the LLM so it understands context of incoming message
|
||||
if extra_system_prompt is not None:
|
||||
self._extra_system_prompt = extra_system_prompt
|
||||
else:
|
||||
self._extra_system_prompt = start_message or None
|
||||
|
||||
try:
|
||||
await self.async_start_conversation(announcement)
|
||||
finally:
|
||||
self._is_announcing = False
|
||||
|
||||
async def async_start_conversation(
|
||||
self, start_announcement: AssistSatelliteAnnouncement
|
||||
) -> None:
|
||||
"""Start a conversation from the satellite."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_accept_pipeline_from_satellite(
|
||||
self,
|
||||
audio_stream: AsyncIterable[bytes],
|
||||
@@ -226,6 +281,10 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
"""Triggers an Assist pipeline in Home Assistant from a satellite."""
|
||||
await self._cancel_running_pipeline()
|
||||
|
||||
# Consume system prompt in first pipeline
|
||||
extra_system_prompt = self._extra_system_prompt
|
||||
self._extra_system_prompt = None
|
||||
|
||||
if self._wake_word_intercept_future and start_stage in (
|
||||
PipelineStage.WAKE_WORD,
|
||||
PipelineStage.STT,
|
||||
@@ -302,6 +361,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
),
|
||||
start_stage=start_stage,
|
||||
end_stage=end_stage,
|
||||
conversation_extra_system_prompt=extra_system_prompt,
|
||||
),
|
||||
f"{self.entity_id}_pipeline",
|
||||
)
|
||||
|
||||
@@ -7,6 +7,9 @@
|
||||
"services": {
|
||||
"announce": {
|
||||
"service": "mdi:bullhorn"
|
||||
},
|
||||
"start_conversation": {
|
||||
"service": "mdi:forum"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""Assist Satellite intents."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -7,6 +9,8 @@ from homeassistant.helpers import entity_registry as er, intent
|
||||
|
||||
from .const import DOMAIN, AssistSatelliteEntityFeature
|
||||
|
||||
EXCLUDED_DOMAINS: Final[set[str]] = {"voip"}
|
||||
|
||||
|
||||
async def async_setup_intents(hass: HomeAssistant) -> None:
|
||||
"""Set up the intents."""
|
||||
@@ -30,19 +34,36 @@ class BroadcastIntentHandler(intent.IntentHandler):
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
# Find all assist satellite entities that are not the one invoking the intent
|
||||
entities = {
|
||||
entity: entry
|
||||
for entity in hass.states.async_entity_ids(DOMAIN)
|
||||
if (entry := ent_reg.async_get(entity))
|
||||
and entry.supported_features & AssistSatelliteEntityFeature.ANNOUNCE
|
||||
}
|
||||
entities: dict[str, er.RegistryEntry] = {}
|
||||
for entity in hass.states.async_entity_ids(DOMAIN):
|
||||
entry = ent_reg.async_get(entity)
|
||||
if (
|
||||
(entry is None)
|
||||
or (
|
||||
# Supports announce
|
||||
not (
|
||||
entry.supported_features & AssistSatelliteEntityFeature.ANNOUNCE
|
||||
)
|
||||
)
|
||||
# Not the invoking device
|
||||
or (intent_obj.device_id and (entry.device_id == intent_obj.device_id))
|
||||
):
|
||||
# Skip satellite
|
||||
continue
|
||||
|
||||
if intent_obj.device_id:
|
||||
entities = {
|
||||
entity: entry
|
||||
for entity, entry in entities.items()
|
||||
if entry.device_id != intent_obj.device_id
|
||||
}
|
||||
# Check domain of config entry against excluded domains
|
||||
if (
|
||||
entry.config_entry_id
|
||||
and (
|
||||
config_entry := hass.config_entries.async_get_entry(
|
||||
entry.config_entry_id
|
||||
)
|
||||
)
|
||||
and (config_entry.domain in EXCLUDED_DOMAINS)
|
||||
):
|
||||
continue
|
||||
|
||||
entities[entity] = entry
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
@@ -54,7 +75,6 @@ class BroadcastIntentHandler(intent.IntentHandler):
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.async_set_speech("Done")
|
||||
response.response_type = intent.IntentResponseType.ACTION_DONE
|
||||
response.async_set_results(
|
||||
success_results=[
|
||||
|
||||
@@ -14,3 +14,23 @@ announce:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
start_conversation:
|
||||
target:
|
||||
entity:
|
||||
domain: assist_satellite
|
||||
supported_features:
|
||||
- assist_satellite.AssistSatelliteEntityFeature.START_CONVERSATION
|
||||
fields:
|
||||
start_message:
|
||||
required: false
|
||||
example: "You left the lights on in the living room. Turn them off?"
|
||||
selector:
|
||||
text:
|
||||
start_media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
extra_system_prompt:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
|
||||
@@ -25,6 +25,24 @@
|
||||
"description": "The media ID to announce instead of using text-to-speech."
|
||||
}
|
||||
}
|
||||
},
|
||||
"start_conversation": {
|
||||
"name": "Start Conversation",
|
||||
"description": "Start a conversation from a satellite.",
|
||||
"fields": {
|
||||
"start_message": {
|
||||
"name": "Message",
|
||||
"description": "The message to start with."
|
||||
},
|
||||
"start_media_id": {
|
||||
"name": "Media ID",
|
||||
"description": "The media ID to start with instead of using text-to-speech."
|
||||
},
|
||||
"extra_system_prompt": {
|
||||
"name": "Extra system prompt",
|
||||
"description": "Provide background information to the AI about the request."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,15 +26,19 @@ from .manager import (
|
||||
BackupReaderWriterError,
|
||||
CoreBackupReaderWriter,
|
||||
CreateBackupEvent,
|
||||
CreateBackupStage,
|
||||
CreateBackupState,
|
||||
IdleEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
NewBackup,
|
||||
RestoreBackupEvent,
|
||||
RestoreBackupStage,
|
||||
RestoreBackupState,
|
||||
WrittenBackup,
|
||||
)
|
||||
from .models import AddonInfo, AgentBackup, Folder
|
||||
from .models import AddonInfo, AgentBackup, BackupNotFound, Folder
|
||||
from .util import suggested_filename, suggested_filename_from_name_date
|
||||
from .websocket import async_register_websocket_handlers
|
||||
|
||||
__all__ = [
|
||||
@@ -44,10 +48,13 @@ __all__ = [
|
||||
"BackupAgentError",
|
||||
"BackupAgentPlatformProtocol",
|
||||
"BackupManagerError",
|
||||
"BackupNotFound",
|
||||
"BackupPlatformProtocol",
|
||||
"BackupReaderWriter",
|
||||
"BackupReaderWriterError",
|
||||
"CreateBackupEvent",
|
||||
"CreateBackupStage",
|
||||
"CreateBackupState",
|
||||
"Folder",
|
||||
"IdleEvent",
|
||||
"IncorrectPasswordError",
|
||||
@@ -55,9 +62,12 @@ __all__ = [
|
||||
"ManagerBackup",
|
||||
"NewBackup",
|
||||
"RestoreBackupEvent",
|
||||
"RestoreBackupStage",
|
||||
"RestoreBackupState",
|
||||
"WrittenBackup",
|
||||
"async_get_manager",
|
||||
"suggested_filename",
|
||||
"suggested_filename_from_name_date",
|
||||
]
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
@@ -11,13 +11,7 @@ from propcache.api import cached_property
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .models import AgentBackup, BackupError
|
||||
|
||||
|
||||
class BackupAgentError(BackupError):
|
||||
"""Base class for backup agent errors."""
|
||||
|
||||
error_code = "backup_agent_error"
|
||||
from .models import AgentBackup, BackupAgentError
|
||||
|
||||
|
||||
class BackupAgentUnreachableError(BackupAgentError):
|
||||
@@ -94,11 +88,16 @@ class LocalBackupAgent(BackupAgent):
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_backup_path(self, backup_id: str) -> Path:
|
||||
"""Return the local path to a backup.
|
||||
"""Return the local path to an existing backup.
|
||||
|
||||
The method should return the path to the backup file with the specified id.
|
||||
Raises BackupAgentError if the backup does not exist.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_new_backup_path(self, backup: AgentBackup) -> Path:
|
||||
"""Return the local path to a new backup."""
|
||||
|
||||
|
||||
class BackupAgentPlatformProtocol(Protocol):
|
||||
"""Define the format of backup platforms which implement backup agents."""
|
||||
|
||||
@@ -13,8 +13,8 @@ from homeassistant.helpers.hassio import is_hassio
|
||||
|
||||
from .agent import BackupAgent, LocalBackupAgent
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .models import AgentBackup
|
||||
from .util import read_backup
|
||||
from .models import AgentBackup, BackupNotFound
|
||||
from .util import read_backup, suggested_filename
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
@@ -39,7 +39,7 @@ class CoreLocalBackupAgent(LocalBackupAgent):
|
||||
super().__init__()
|
||||
self._hass = hass
|
||||
self._backup_dir = Path(hass.config.path("backups"))
|
||||
self._backups: dict[str, AgentBackup] = {}
|
||||
self._backups: dict[str, tuple[AgentBackup, Path]] = {}
|
||||
self._loaded_backups = False
|
||||
|
||||
async def _load_backups(self) -> None:
|
||||
@@ -49,13 +49,13 @@ class CoreLocalBackupAgent(LocalBackupAgent):
|
||||
self._backups = backups
|
||||
self._loaded_backups = True
|
||||
|
||||
def _read_backups(self) -> dict[str, AgentBackup]:
|
||||
def _read_backups(self) -> dict[str, tuple[AgentBackup, Path]]:
|
||||
"""Read backups from disk."""
|
||||
backups: dict[str, AgentBackup] = {}
|
||||
backups: dict[str, tuple[AgentBackup, Path]] = {}
|
||||
for backup_path in self._backup_dir.glob("*.tar"):
|
||||
try:
|
||||
backup = read_backup(backup_path)
|
||||
backups[backup.backup_id] = backup
|
||||
backups[backup.backup_id] = (backup, backup_path)
|
||||
except (OSError, TarError, json.JSONDecodeError, KeyError) as err:
|
||||
LOGGER.warning("Unable to read backup %s: %s", backup_path, err)
|
||||
return backups
|
||||
@@ -76,13 +76,13 @@ class CoreLocalBackupAgent(LocalBackupAgent):
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
self._backups[backup.backup_id] = backup
|
||||
self._backups[backup.backup_id] = (backup, self.get_new_backup_path(backup))
|
||||
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
if not self._loaded_backups:
|
||||
await self._load_backups()
|
||||
return list(self._backups.values())
|
||||
return [backup for backup, _ in self._backups.values()]
|
||||
|
||||
async def async_get_backup(
|
||||
self,
|
||||
@@ -93,10 +93,10 @@ class CoreLocalBackupAgent(LocalBackupAgent):
|
||||
if not self._loaded_backups:
|
||||
await self._load_backups()
|
||||
|
||||
if not (backup := self._backups.get(backup_id)):
|
||||
if backup_id not in self._backups:
|
||||
return None
|
||||
|
||||
backup_path = self.get_backup_path(backup_id)
|
||||
backup, backup_path = self._backups[backup_id]
|
||||
if not await self._hass.async_add_executor_job(backup_path.exists):
|
||||
LOGGER.debug(
|
||||
(
|
||||
@@ -112,15 +112,28 @@ class CoreLocalBackupAgent(LocalBackupAgent):
|
||||
return backup
|
||||
|
||||
def get_backup_path(self, backup_id: str) -> Path:
|
||||
"""Return the local path to a backup."""
|
||||
return self._backup_dir / f"{backup_id}.tar"
|
||||
"""Return the local path to an existing backup.
|
||||
|
||||
Raises BackupAgentError if the backup does not exist.
|
||||
"""
|
||||
try:
|
||||
return self._backups[backup_id][1]
|
||||
except KeyError as err:
|
||||
raise BackupNotFound(f"Backup {backup_id} does not exist") from err
|
||||
|
||||
def get_new_backup_path(self, backup: AgentBackup) -> Path:
|
||||
"""Return the local path to a new backup."""
|
||||
return self._backup_dir / suggested_filename(backup)
|
||||
|
||||
async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None:
|
||||
"""Delete a backup file."""
|
||||
if await self.async_get_backup(backup_id) is None:
|
||||
return
|
||||
if not self._loaded_backups:
|
||||
await self._load_backups()
|
||||
|
||||
backup_path = self.get_backup_path(backup_id)
|
||||
try:
|
||||
backup_path = self.get_backup_path(backup_id)
|
||||
except BackupNotFound:
|
||||
return
|
||||
await self._hass.async_add_executor_job(backup_path.unlink, True)
|
||||
LOGGER.debug("Deleted backup located at %s", backup_path)
|
||||
self._backups.pop(backup_id)
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass, field, replace
|
||||
import datetime as dt
|
||||
from datetime import datetime, timedelta
|
||||
@@ -252,7 +250,7 @@ class RetentionConfig:
|
||||
"""Delete backups older than days."""
|
||||
self._schedule_next(manager)
|
||||
|
||||
def _backups_filter(
|
||||
def _delete_filter(
|
||||
backups: dict[str, ManagerBackup],
|
||||
) -> dict[str, ManagerBackup]:
|
||||
"""Return backups older than days to delete."""
|
||||
@@ -269,7 +267,9 @@ class RetentionConfig:
|
||||
< now
|
||||
}
|
||||
|
||||
await _delete_filtered_backups(manager, _backups_filter)
|
||||
await manager.async_delete_filtered_backups(
|
||||
include_filter=_automatic_backups_filter, delete_filter=_delete_filter
|
||||
)
|
||||
|
||||
manager.remove_next_delete_event = async_call_later(
|
||||
manager.hass, timedelta(days=1), _delete_backups
|
||||
@@ -521,74 +521,21 @@ class CreateBackupParametersDict(TypedDict, total=False):
|
||||
password: str | None
|
||||
|
||||
|
||||
async def _delete_filtered_backups(
|
||||
manager: BackupManager,
|
||||
backup_filter: Callable[[dict[str, ManagerBackup]], dict[str, ManagerBackup]],
|
||||
) -> None:
|
||||
"""Delete backups parsed with a filter.
|
||||
|
||||
:param manager: The backup manager.
|
||||
:param backup_filter: A filter that should return the backups to delete.
|
||||
"""
|
||||
backups, get_agent_errors = await manager.async_get_backups()
|
||||
if get_agent_errors:
|
||||
LOGGER.debug(
|
||||
"Error getting backups; continuing anyway: %s",
|
||||
get_agent_errors,
|
||||
)
|
||||
|
||||
# only delete backups that are created with the saved automatic settings
|
||||
backups = {
|
||||
def _automatic_backups_filter(
|
||||
backups: dict[str, ManagerBackup],
|
||||
) -> dict[str, ManagerBackup]:
|
||||
"""Return automatic backups."""
|
||||
return {
|
||||
backup_id: backup
|
||||
for backup_id, backup in backups.items()
|
||||
if backup.with_automatic_settings
|
||||
}
|
||||
|
||||
LOGGER.debug("Total automatic backups: %s", backups)
|
||||
|
||||
filtered_backups = backup_filter(backups)
|
||||
|
||||
if not filtered_backups:
|
||||
return
|
||||
|
||||
# always delete oldest backup first
|
||||
filtered_backups = dict(
|
||||
sorted(
|
||||
filtered_backups.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)
|
||||
)
|
||||
|
||||
if len(filtered_backups) >= len(backups):
|
||||
# Never delete the last backup.
|
||||
last_backup = filtered_backups.popitem()
|
||||
LOGGER.debug("Keeping the last backup: %s", last_backup)
|
||||
|
||||
LOGGER.debug("Backups to delete: %s", filtered_backups)
|
||||
|
||||
if not filtered_backups:
|
||||
return
|
||||
|
||||
backup_ids = list(filtered_backups)
|
||||
delete_results = await asyncio.gather(
|
||||
*(manager.async_delete_backup(backup_id) for backup_id in filtered_backups)
|
||||
)
|
||||
agent_errors = {
|
||||
backup_id: error
|
||||
for backup_id, error in zip(backup_ids, delete_results, strict=True)
|
||||
if error
|
||||
}
|
||||
if agent_errors:
|
||||
LOGGER.error(
|
||||
"Error deleting old copies: %s",
|
||||
agent_errors,
|
||||
)
|
||||
|
||||
|
||||
async def delete_backups_exceeding_configured_count(manager: BackupManager) -> None:
|
||||
"""Delete backups exceeding the configured retention count."""
|
||||
|
||||
def _backups_filter(
|
||||
def _delete_filter(
|
||||
backups: dict[str, ManagerBackup],
|
||||
) -> dict[str, ManagerBackup]:
|
||||
"""Return oldest backups more numerous than copies to delete."""
|
||||
@@ -603,4 +550,6 @@ async def delete_backups_exceeding_configured_count(manager: BackupManager) -> N
|
||||
)[: max(len(backups) - manager.config.data.retention.copies, 0)]
|
||||
)
|
||||
|
||||
await _delete_filtered_backups(manager, _backups_filter)
|
||||
await manager.async_delete_filtered_backups(
|
||||
include_filter=_automatic_backups_filter, delete_filter=_delete_filter
|
||||
)
|
||||
|
||||
@@ -21,6 +21,7 @@ from . import util
|
||||
from .agent import BackupAgent
|
||||
from .const import DATA_MANAGER
|
||||
from .manager import BackupManager
|
||||
from .models import BackupNotFound
|
||||
|
||||
|
||||
@callback
|
||||
@@ -69,13 +70,16 @@ class DownloadBackupView(HomeAssistantView):
|
||||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
|
||||
}
|
||||
|
||||
if not password or not backup.protected:
|
||||
return await self._send_backup_no_password(
|
||||
request, headers, backup_id, agent_id, agent, manager
|
||||
try:
|
||||
if not password or not backup.protected:
|
||||
return await self._send_backup_no_password(
|
||||
request, headers, backup_id, agent_id, agent, manager
|
||||
)
|
||||
return await self._send_backup_with_password(
|
||||
hass, request, headers, backup_id, agent_id, password, agent, manager
|
||||
)
|
||||
return await self._send_backup_with_password(
|
||||
hass, request, headers, backup_id, agent_id, password, agent, manager
|
||||
)
|
||||
except BackupNotFound:
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
|
||||
async def _send_backup_no_password(
|
||||
self,
|
||||
|
||||
@@ -4,11 +4,13 @@ from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import asyncio
|
||||
from collections import defaultdict
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from dataclasses import dataclass, replace
|
||||
from enum import StrEnum
|
||||
import hashlib
|
||||
import io
|
||||
from itertools import chain
|
||||
import json
|
||||
from pathlib import Path, PurePath
|
||||
import shutil
|
||||
@@ -50,7 +52,14 @@ from .const import (
|
||||
EXCLUDE_FROM_BACKUP,
|
||||
LOGGER,
|
||||
)
|
||||
from .models import AgentBackup, BackupError, BackupManagerError, BaseBackup, Folder
|
||||
from .models import (
|
||||
AgentBackup,
|
||||
BackupError,
|
||||
BackupManagerError,
|
||||
BackupReaderWriterError,
|
||||
BaseBackup,
|
||||
Folder,
|
||||
)
|
||||
from .store import BackupStore
|
||||
from .util import (
|
||||
AsyncIteratorReader,
|
||||
@@ -274,12 +283,6 @@ class BackupReaderWriter(abc.ABC):
|
||||
"""Get restore events after core restart."""
|
||||
|
||||
|
||||
class BackupReaderWriterError(BackupError):
|
||||
"""Backup reader/writer error."""
|
||||
|
||||
error_code = "backup_reader_writer_error"
|
||||
|
||||
|
||||
class IncorrectPasswordError(BackupReaderWriterError):
|
||||
"""Raised when the password is incorrect."""
|
||||
|
||||
@@ -558,8 +561,15 @@ class BackupManager:
|
||||
return_exceptions=True,
|
||||
)
|
||||
for idx, result in enumerate(list_backups_results):
|
||||
agent_id = agent_ids[idx]
|
||||
if isinstance(result, BackupAgentError):
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
agent_errors[agent_id] = result
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
agent_errors[agent_id] = result
|
||||
LOGGER.error(
|
||||
"Unexpected error for %s: %s", agent_id, result, exc_info=result
|
||||
)
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result # unexpected error
|
||||
@@ -586,7 +596,7 @@ class BackupManager:
|
||||
name=agent_backup.name,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
backups[backup_id].agents[agent_ids[idx]] = AgentBackupStatus(
|
||||
backups[backup_id].agents[agent_id] = AgentBackupStatus(
|
||||
protected=agent_backup.protected,
|
||||
size=agent_backup.size,
|
||||
)
|
||||
@@ -609,8 +619,15 @@ class BackupManager:
|
||||
return_exceptions=True,
|
||||
)
|
||||
for idx, result in enumerate(get_backup_results):
|
||||
agent_id = agent_ids[idx]
|
||||
if isinstance(result, BackupAgentError):
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
agent_errors[agent_id] = result
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
agent_errors[agent_id] = result
|
||||
LOGGER.error(
|
||||
"Unexpected error for %s: %s", agent_id, result, exc_info=result
|
||||
)
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result # unexpected error
|
||||
@@ -638,7 +655,7 @@ class BackupManager:
|
||||
name=result.name,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
backup.agents[agent_ids[idx]] = AgentBackupStatus(
|
||||
backup.agents[agent_id] = AgentBackupStatus(
|
||||
protected=result.protected,
|
||||
size=result.size,
|
||||
)
|
||||
@@ -661,10 +678,13 @@ class BackupManager:
|
||||
return None
|
||||
return with_automatic_settings
|
||||
|
||||
async def async_delete_backup(self, backup_id: str) -> dict[str, Exception]:
|
||||
async def async_delete_backup(
|
||||
self, backup_id: str, *, agent_ids: list[str] | None = None
|
||||
) -> dict[str, Exception]:
|
||||
"""Delete a backup."""
|
||||
agent_errors: dict[str, Exception] = {}
|
||||
agent_ids = list(self.backup_agents)
|
||||
if agent_ids is None:
|
||||
agent_ids = list(self.backup_agents)
|
||||
|
||||
delete_backup_results = await asyncio.gather(
|
||||
*(
|
||||
@@ -674,8 +694,15 @@ class BackupManager:
|
||||
return_exceptions=True,
|
||||
)
|
||||
for idx, result in enumerate(delete_backup_results):
|
||||
agent_id = agent_ids[idx]
|
||||
if isinstance(result, BackupAgentError):
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
agent_errors[agent_id] = result
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
agent_errors[agent_id] = result
|
||||
LOGGER.error(
|
||||
"Unexpected error for %s: %s", agent_id, result, exc_info=result
|
||||
)
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result # unexpected error
|
||||
@@ -685,6 +712,106 @@ class BackupManager:
|
||||
|
||||
return agent_errors
|
||||
|
||||
async def async_delete_filtered_backups(
|
||||
self,
|
||||
*,
|
||||
include_filter: Callable[[dict[str, ManagerBackup]], dict[str, ManagerBackup]],
|
||||
delete_filter: Callable[[dict[str, ManagerBackup]], dict[str, ManagerBackup]],
|
||||
) -> None:
|
||||
"""Delete backups parsed with a filter.
|
||||
|
||||
:param include_filter: A filter that should return the backups to consider for
|
||||
deletion. Note: The newest of the backups returned by include_filter will
|
||||
unconditionally be kept, even if delete_filter returns all backups.
|
||||
:param delete_filter: A filter that should return the backups to delete.
|
||||
"""
|
||||
backups, get_agent_errors = await self.async_get_backups()
|
||||
if get_agent_errors:
|
||||
LOGGER.debug(
|
||||
"Error getting backups; continuing anyway: %s",
|
||||
get_agent_errors,
|
||||
)
|
||||
|
||||
# Run the include filter first to ensure we only consider backups that
|
||||
# should be included in the deletion process.
|
||||
backups = include_filter(backups)
|
||||
backups_by_agent: dict[str, dict[str, ManagerBackup]] = defaultdict(dict)
|
||||
for backup_id, backup in backups.items():
|
||||
for agent_id in backup.agents:
|
||||
backups_by_agent[agent_id][backup_id] = backup
|
||||
|
||||
LOGGER.debug("Backups returned by include filter: %s", backups)
|
||||
LOGGER.debug(
|
||||
"Backups returned by include filter by agent: %s",
|
||||
{agent_id: list(backups) for agent_id, backups in backups_by_agent.items()},
|
||||
)
|
||||
|
||||
backups_to_delete = delete_filter(backups)
|
||||
|
||||
LOGGER.debug("Backups returned by delete filter: %s", backups_to_delete)
|
||||
|
||||
if not backups_to_delete:
|
||||
return
|
||||
|
||||
# always delete oldest backup first
|
||||
backups_to_delete_by_agent: dict[str, dict[str, ManagerBackup]] = defaultdict(
|
||||
dict
|
||||
)
|
||||
for backup_id, backup in sorted(
|
||||
backups_to_delete.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
):
|
||||
for agent_id in backup.agents:
|
||||
backups_to_delete_by_agent[agent_id][backup_id] = backup
|
||||
LOGGER.debug(
|
||||
"Backups returned by delete filter by agent: %s",
|
||||
{
|
||||
agent_id: list(backups)
|
||||
for agent_id, backups in backups_to_delete_by_agent.items()
|
||||
},
|
||||
)
|
||||
for agent_id, to_delete_from_agent in backups_to_delete_by_agent.items():
|
||||
if len(to_delete_from_agent) >= len(backups_by_agent[agent_id]):
|
||||
# Never delete the last backup.
|
||||
last_backup = to_delete_from_agent.popitem()
|
||||
LOGGER.debug(
|
||||
"Keeping the last backup %s for agent %s", last_backup, agent_id
|
||||
)
|
||||
|
||||
LOGGER.debug(
|
||||
"Backups to delete by agent: %s",
|
||||
{
|
||||
agent_id: list(backups)
|
||||
for agent_id, backups in backups_to_delete_by_agent.items()
|
||||
},
|
||||
)
|
||||
|
||||
backup_ids_to_delete: dict[str, set[str]] = defaultdict(set)
|
||||
for agent_id, to_delete in backups_to_delete_by_agent.items():
|
||||
for backup_id in to_delete:
|
||||
backup_ids_to_delete[backup_id].add(agent_id)
|
||||
|
||||
if not backup_ids_to_delete:
|
||||
return
|
||||
|
||||
backup_ids = list(backup_ids_to_delete)
|
||||
delete_results = await asyncio.gather(
|
||||
*(
|
||||
self.async_delete_backup(backup_id, agent_ids=list(agent_ids))
|
||||
for backup_id, agent_ids in backup_ids_to_delete.items()
|
||||
)
|
||||
)
|
||||
agent_errors = {
|
||||
backup_id: error
|
||||
for backup_id, error in zip(backup_ids, delete_results, strict=True)
|
||||
if error
|
||||
}
|
||||
if agent_errors:
|
||||
LOGGER.error(
|
||||
"Error deleting old copies: %s",
|
||||
agent_errors,
|
||||
)
|
||||
|
||||
async def async_receive_backup(
|
||||
self,
|
||||
*,
|
||||
@@ -762,7 +889,7 @@ class BackupManager:
|
||||
password=None,
|
||||
)
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
self.known_backups.add(written_backup.backup, agent_errors, [])
|
||||
return written_backup.backup.backup_id
|
||||
|
||||
async def async_create_backup(
|
||||
@@ -886,19 +1013,30 @@ class BackupManager:
|
||||
with_automatic_settings: bool,
|
||||
) -> NewBackup:
|
||||
"""Initiate generating a backup."""
|
||||
if not agent_ids:
|
||||
raise BackupManagerError("At least one agent must be selected")
|
||||
if invalid_agents := [
|
||||
unavailable_agents = [
|
||||
agent_id for agent_id in agent_ids if agent_id not in self.backup_agents
|
||||
]:
|
||||
raise BackupManagerError(f"Invalid agents selected: {invalid_agents}")
|
||||
]
|
||||
if not (
|
||||
available_agents := [
|
||||
agent_id for agent_id in agent_ids if agent_id in self.backup_agents
|
||||
]
|
||||
):
|
||||
raise BackupManagerError(
|
||||
f"At least one available backup agent must be selected, got {agent_ids}"
|
||||
)
|
||||
if unavailable_agents:
|
||||
LOGGER.warning(
|
||||
"Backup agents %s are not available, will backupp to %s",
|
||||
unavailable_agents,
|
||||
available_agents,
|
||||
)
|
||||
if include_all_addons and include_addons:
|
||||
raise BackupManagerError(
|
||||
"Cannot include all addons and specify specific addons"
|
||||
)
|
||||
|
||||
backup_name = (
|
||||
name
|
||||
(name if name is None else name.strip())
|
||||
or f"{'Automatic' if with_automatic_settings else 'Custom'} backup {HAVERSION}"
|
||||
)
|
||||
extra_metadata = extra_metadata or {}
|
||||
@@ -908,7 +1046,7 @@ class BackupManager:
|
||||
new_backup,
|
||||
self._backup_task,
|
||||
) = await self._reader_writer.async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
agent_ids=available_agents,
|
||||
backup_name=backup_name,
|
||||
extra_metadata=extra_metadata
|
||||
| {
|
||||
@@ -927,7 +1065,9 @@ class BackupManager:
|
||||
raise BackupManagerError(str(err)) from err
|
||||
|
||||
backup_finish_task = self._backup_finish_task = self.hass.async_create_task(
|
||||
self._async_finish_backup(agent_ids, with_automatic_settings, password),
|
||||
self._async_finish_backup(
|
||||
available_agents, unavailable_agents, with_automatic_settings, password
|
||||
),
|
||||
name="backup_manager_finish_backup",
|
||||
)
|
||||
if not raise_task_error:
|
||||
@@ -944,7 +1084,11 @@ class BackupManager:
|
||||
return new_backup
|
||||
|
||||
async def _async_finish_backup(
|
||||
self, agent_ids: list[str], with_automatic_settings: bool, password: str | None
|
||||
self,
|
||||
available_agents: list[str],
|
||||
unavailable_agents: list[str],
|
||||
with_automatic_settings: bool,
|
||||
password: str | None,
|
||||
) -> None:
|
||||
"""Finish a backup."""
|
||||
if TYPE_CHECKING:
|
||||
@@ -963,7 +1107,7 @@ class BackupManager:
|
||||
LOGGER.debug(
|
||||
"Generated new backup with backup_id %s, uploading to agents %s",
|
||||
written_backup.backup.backup_id,
|
||||
agent_ids,
|
||||
available_agents,
|
||||
)
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(
|
||||
@@ -976,13 +1120,15 @@ class BackupManager:
|
||||
try:
|
||||
agent_errors = await self._async_upload_backup(
|
||||
backup=written_backup.backup,
|
||||
agent_ids=agent_ids,
|
||||
agent_ids=available_agents,
|
||||
open_stream=written_backup.open_stream,
|
||||
password=password,
|
||||
)
|
||||
finally:
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
self.known_backups.add(
|
||||
written_backup.backup, agent_errors, unavailable_agents
|
||||
)
|
||||
if not agent_errors:
|
||||
if with_automatic_settings:
|
||||
# create backup was successful, update last_completed_automatic_backup
|
||||
@@ -991,7 +1137,7 @@ class BackupManager:
|
||||
backup_success = True
|
||||
|
||||
if with_automatic_settings:
|
||||
self._update_issue_after_agent_upload(agent_errors)
|
||||
self._update_issue_after_agent_upload(agent_errors, unavailable_agents)
|
||||
# delete old backups more numerous than copies
|
||||
# try this regardless of agent errors above
|
||||
await delete_backups_exceeding_configured_count(self)
|
||||
@@ -1151,10 +1297,10 @@ class BackupManager:
|
||||
)
|
||||
|
||||
def _update_issue_after_agent_upload(
|
||||
self, agent_errors: dict[str, Exception]
|
||||
self, agent_errors: dict[str, Exception], unavailable_agents: list[str]
|
||||
) -> None:
|
||||
"""Update issue registry after a backup is uploaded to agents."""
|
||||
if not agent_errors:
|
||||
if not agent_errors and not unavailable_agents:
|
||||
ir.async_delete_issue(self.hass, DOMAIN, "automatic_backup_failed")
|
||||
return
|
||||
ir.async_create_issue(
|
||||
@@ -1166,7 +1312,17 @@ class BackupManager:
|
||||
learn_more_url="homeassistant://config/backup",
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="automatic_backup_failed_upload_agents",
|
||||
translation_placeholders={"failed_agents": ", ".join(agent_errors)},
|
||||
translation_placeholders={
|
||||
"failed_agents": ", ".join(
|
||||
chain(
|
||||
(
|
||||
self.backup_agents[agent_id].name
|
||||
for agent_id in agent_errors
|
||||
),
|
||||
unavailable_agents,
|
||||
)
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
async def async_can_decrypt_on_download(
|
||||
@@ -1233,11 +1389,12 @@ class KnownBackups:
|
||||
self,
|
||||
backup: AgentBackup,
|
||||
agent_errors: dict[str, Exception],
|
||||
unavailable_agents: list[str],
|
||||
) -> None:
|
||||
"""Add a backup."""
|
||||
self._backups[backup.backup_id] = KnownBackup(
|
||||
backup_id=backup.backup_id,
|
||||
failed_agent_ids=list(agent_errors),
|
||||
failed_agent_ids=list(chain(agent_errors, unavailable_agents)),
|
||||
)
|
||||
self._manager.store.save()
|
||||
|
||||
@@ -1343,13 +1500,31 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
manager = self._hass.data[DATA_MANAGER]
|
||||
|
||||
agent_config = manager.config.data.agents.get(self._local_agent_id)
|
||||
if agent_config and not agent_config.protected:
|
||||
if (
|
||||
self._local_agent_id in agent_ids
|
||||
and agent_config
|
||||
and not agent_config.protected
|
||||
):
|
||||
password = None
|
||||
|
||||
backup = AgentBackup(
|
||||
addons=[],
|
||||
backup_id=backup_id,
|
||||
database_included=include_database,
|
||||
date=date_str,
|
||||
extra_metadata=extra_metadata,
|
||||
folders=[],
|
||||
homeassistant_included=True,
|
||||
homeassistant_version=HAVERSION,
|
||||
name=backup_name,
|
||||
protected=password is not None,
|
||||
size=0,
|
||||
)
|
||||
|
||||
local_agent_tar_file_path = None
|
||||
if self._local_agent_id in agent_ids:
|
||||
local_agent = manager.local_backup_agents[self._local_agent_id]
|
||||
local_agent_tar_file_path = local_agent.get_backup_path(backup_id)
|
||||
local_agent_tar_file_path = local_agent.get_new_backup_path(backup)
|
||||
|
||||
on_progress(
|
||||
CreateBackupEvent(
|
||||
@@ -1391,19 +1566,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
# ValueError from json_bytes
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
else:
|
||||
backup = AgentBackup(
|
||||
addons=[],
|
||||
backup_id=backup_id,
|
||||
database_included=include_database,
|
||||
date=date_str,
|
||||
extra_metadata=extra_metadata,
|
||||
folders=[],
|
||||
homeassistant_included=True,
|
||||
homeassistant_version=HAVERSION,
|
||||
name=backup_name,
|
||||
protected=password is not None,
|
||||
size=size_in_bytes,
|
||||
)
|
||||
backup = replace(backup, size=size_in_bytes)
|
||||
|
||||
async_add_executor_job = self._hass.async_add_executor_job
|
||||
|
||||
@@ -1517,7 +1680,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
manager = self._hass.data[DATA_MANAGER]
|
||||
if self._local_agent_id in agent_ids:
|
||||
local_agent = manager.local_backup_agents[self._local_agent_id]
|
||||
tar_file_path = local_agent.get_backup_path(backup.backup_id)
|
||||
tar_file_path = local_agent.get_new_backup_path(backup)
|
||||
await async_add_executor_job(make_backup_dir, tar_file_path.parent)
|
||||
await async_add_executor_job(shutil.move, temp_file, tar_file_path)
|
||||
else:
|
||||
|
||||
@@ -41,12 +41,6 @@ class BaseBackup:
|
||||
homeassistant_version: str | None # None if homeassistant_included is False
|
||||
name: str
|
||||
|
||||
def as_frontend_json(self) -> dict:
|
||||
"""Return a dict representation of this backup for sending to frontend."""
|
||||
return {
|
||||
key: val for key, val in asdict(self).items() if key != "extra_metadata"
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AgentBackup(BaseBackup):
|
||||
@@ -83,7 +77,25 @@ class BackupError(HomeAssistantError):
|
||||
error_code = "unknown"
|
||||
|
||||
|
||||
class BackupAgentError(BackupError):
|
||||
"""Base class for backup agent errors."""
|
||||
|
||||
error_code = "backup_agent_error"
|
||||
|
||||
|
||||
class BackupManagerError(BackupError):
|
||||
"""Backup manager error."""
|
||||
|
||||
error_code = "backup_manager_error"
|
||||
|
||||
|
||||
class BackupReaderWriterError(BackupError):
|
||||
"""Backup reader/writer error."""
|
||||
|
||||
error_code = "backup_reader_writer_error"
|
||||
|
||||
|
||||
class BackupNotFound(BackupAgentError, BackupManagerError):
|
||||
"""Raised when a backup is not found."""
|
||||
|
||||
error_code = "backup_not_found"
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from concurrent.futures import CancelledError, Future
|
||||
import copy
|
||||
from dataclasses import dataclass, replace
|
||||
from io import BytesIO
|
||||
@@ -12,6 +13,7 @@ import os
|
||||
from pathlib import Path, PurePath
|
||||
from queue import SimpleQueue
|
||||
import tarfile
|
||||
import threading
|
||||
from typing import IO, Any, Self, cast
|
||||
|
||||
import aiohttp
|
||||
@@ -20,8 +22,8 @@ from securetar import SecureTarError, SecureTarFile, SecureTarReadError
|
||||
from homeassistant.backup_restore import password_to_key
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
from homeassistant.util.thread import ThreadWithException
|
||||
|
||||
from .const import BUF_SIZE, LOGGER
|
||||
from .models import AddonInfo, AgentBackup, Folder
|
||||
@@ -117,6 +119,17 @@ def read_backup(backup_path: Path) -> AgentBackup:
|
||||
)
|
||||
|
||||
|
||||
def suggested_filename_from_name_date(name: str, date_str: str) -> str:
|
||||
"""Suggest a filename for the backup."""
|
||||
date = dt_util.parse_datetime(date_str, raise_on_error=True)
|
||||
return "_".join(f"{name} {date.strftime('%Y-%m-%d %H.%M %S%f')}.tar".split())
|
||||
|
||||
|
||||
def suggested_filename(backup: AgentBackup) -> str:
|
||||
"""Suggest a filename for the backup."""
|
||||
return suggested_filename_from_name_date(backup.name, backup.date)
|
||||
|
||||
|
||||
def validate_password(path: Path, password: str | None) -> bool:
|
||||
"""Validate the password."""
|
||||
with tarfile.open(path, "r:", bufsize=BUF_SIZE) as backup_file:
|
||||
@@ -155,23 +168,38 @@ class AsyncIteratorReader:
|
||||
|
||||
def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None:
|
||||
"""Initialize the wrapper."""
|
||||
self._aborted = False
|
||||
self._hass = hass
|
||||
self._stream = stream
|
||||
self._buffer: bytes | None = None
|
||||
self._next_future: Future[bytes | None] | None = None
|
||||
self._pos: int = 0
|
||||
|
||||
async def _next(self) -> bytes | None:
|
||||
"""Get the next chunk from the iterator."""
|
||||
return await anext(self._stream, None)
|
||||
|
||||
def abort(self) -> None:
|
||||
"""Abort the reader."""
|
||||
self._aborted = True
|
||||
if self._next_future is not None:
|
||||
self._next_future.cancel()
|
||||
|
||||
def read(self, n: int = -1, /) -> bytes:
|
||||
"""Read data from the iterator."""
|
||||
result = bytearray()
|
||||
while n < 0 or len(result) < n:
|
||||
if not self._buffer:
|
||||
self._buffer = asyncio.run_coroutine_threadsafe(
|
||||
self._next_future = asyncio.run_coroutine_threadsafe(
|
||||
self._next(), self._hass.loop
|
||||
).result()
|
||||
)
|
||||
if self._aborted:
|
||||
self._next_future.cancel()
|
||||
raise AbortCipher
|
||||
try:
|
||||
self._buffer = self._next_future.result()
|
||||
except CancelledError as err:
|
||||
raise AbortCipher from err
|
||||
self._pos = 0
|
||||
if not self._buffer:
|
||||
# The stream is exhausted
|
||||
@@ -193,9 +221,11 @@ class AsyncIteratorWriter:
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the wrapper."""
|
||||
self._aborted = False
|
||||
self._hass = hass
|
||||
self._pos: int = 0
|
||||
self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1)
|
||||
self._write_future: Future[bytes | None] | None = None
|
||||
|
||||
def __aiter__(self) -> Self:
|
||||
"""Return the iterator."""
|
||||
@@ -207,13 +237,28 @@ class AsyncIteratorWriter:
|
||||
return data
|
||||
raise StopAsyncIteration
|
||||
|
||||
def abort(self) -> None:
|
||||
"""Abort the writer."""
|
||||
self._aborted = True
|
||||
if self._write_future is not None:
|
||||
self._write_future.cancel()
|
||||
|
||||
def tell(self) -> int:
|
||||
"""Return the current position in the iterator."""
|
||||
return self._pos
|
||||
|
||||
def write(self, s: bytes, /) -> int:
|
||||
"""Write data to the iterator."""
|
||||
asyncio.run_coroutine_threadsafe(self._queue.put(s), self._hass.loop).result()
|
||||
self._write_future = asyncio.run_coroutine_threadsafe(
|
||||
self._queue.put(s), self._hass.loop
|
||||
)
|
||||
if self._aborted:
|
||||
self._write_future.cancel()
|
||||
raise AbortCipher
|
||||
try:
|
||||
self._write_future.result()
|
||||
except CancelledError as err:
|
||||
raise AbortCipher from err
|
||||
self._pos += len(s)
|
||||
return len(s)
|
||||
|
||||
@@ -403,7 +448,9 @@ def _encrypt_backup(
|
||||
class _CipherWorkerStatus:
|
||||
done: asyncio.Event
|
||||
error: Exception | None = None
|
||||
thread: ThreadWithException
|
||||
reader: AsyncIteratorReader
|
||||
thread: threading.Thread
|
||||
writer: AsyncIteratorWriter
|
||||
|
||||
|
||||
class _CipherBackupStreamer:
|
||||
@@ -456,11 +503,13 @@ class _CipherBackupStreamer:
|
||||
stream = await self._open_stream()
|
||||
reader = AsyncIteratorReader(self._hass, stream)
|
||||
writer = AsyncIteratorWriter(self._hass)
|
||||
worker = ThreadWithException(
|
||||
worker = threading.Thread(
|
||||
target=self._cipher_func,
|
||||
args=[reader, writer, self._password, on_done, self.size(), self._nonces],
|
||||
)
|
||||
worker_status = _CipherWorkerStatus(done=asyncio.Event(), thread=worker)
|
||||
worker_status = _CipherWorkerStatus(
|
||||
done=asyncio.Event(), reader=reader, thread=worker, writer=writer
|
||||
)
|
||||
self._workers.append(worker_status)
|
||||
worker.start()
|
||||
return writer
|
||||
@@ -468,9 +517,8 @@ class _CipherBackupStreamer:
|
||||
async def wait(self) -> None:
|
||||
"""Wait for the worker threads to finish."""
|
||||
for worker in self._workers:
|
||||
if not worker.thread.is_alive():
|
||||
continue
|
||||
worker.thread.raise_exc(AbortCipher)
|
||||
worker.reader.abort()
|
||||
worker.writer.abort()
|
||||
await asyncio.gather(*(worker.done.wait() for worker in self._workers))
|
||||
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ from .manager import (
|
||||
IncorrectPasswordError,
|
||||
ManagerStateEvent,
|
||||
)
|
||||
from .models import Folder
|
||||
from .models import BackupNotFound, Folder
|
||||
|
||||
|
||||
@callback
|
||||
@@ -57,7 +57,7 @@ async def handle_info(
|
||||
"agent_errors": {
|
||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||
},
|
||||
"backups": [backup.as_frontend_json() for backup in backups.values()],
|
||||
"backups": list(backups.values()),
|
||||
"last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup,
|
||||
"last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup,
|
||||
"last_non_idle_event": manager.last_non_idle_event,
|
||||
@@ -91,7 +91,7 @@ async def handle_details(
|
||||
"agent_errors": {
|
||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||
},
|
||||
"backup": backup.as_frontend_json() if backup else None,
|
||||
"backup": backup,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -151,6 +151,8 @@ async def handle_restore(
|
||||
restore_folders=msg.get("restore_folders"),
|
||||
restore_homeassistant=msg["restore_homeassistant"],
|
||||
)
|
||||
except BackupNotFound:
|
||||
connection.send_error(msg["id"], "backup_not_found", "Backup not found")
|
||||
except IncorrectPasswordError:
|
||||
connection.send_error(msg["id"], "password_incorrect", "Incorrect password")
|
||||
else:
|
||||
@@ -179,6 +181,8 @@ async def handle_can_decrypt_on_download(
|
||||
agent_id=msg["agent_id"],
|
||||
password=msg.get("password"),
|
||||
)
|
||||
except BackupNotFound:
|
||||
connection.send_error(msg["id"], "backup_not_found", "Backup not found")
|
||||
except IncorrectPasswordError:
|
||||
connection.send_error(msg["id"], "password_incorrect", "Incorrect password")
|
||||
except DecryptOnDowloadNotSupported:
|
||||
@@ -199,7 +203,7 @@ async def handle_can_decrypt_on_download(
|
||||
vol.Optional("include_database", default=True): bool,
|
||||
vol.Optional("include_folders"): [vol.Coerce(Folder)],
|
||||
vol.Optional("include_homeassistant", default=True): bool,
|
||||
vol.Optional("name"): str,
|
||||
vol.Optional("name"): vol.Any(str, None),
|
||||
vol.Optional("password"): vol.Any(str, None),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import datetime
|
||||
import logging
|
||||
import platform
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from bleak_retry_connector import BleakSlotManager
|
||||
from bluetooth_adapters import (
|
||||
@@ -80,6 +80,7 @@ from .const import (
|
||||
CONF_DETAILS,
|
||||
CONF_PASSIVE,
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID,
|
||||
CONF_SOURCE_DEVICE_ID,
|
||||
CONF_SOURCE_DOMAIN,
|
||||
CONF_SOURCE_MODEL,
|
||||
DOMAIN,
|
||||
@@ -297,7 +298,11 @@ async def async_discover_adapters(
|
||||
|
||||
|
||||
async def async_update_device(
|
||||
hass: HomeAssistant, entry: ConfigEntry, adapter: str, details: AdapterDetails
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
adapter: str,
|
||||
details: AdapterDetails,
|
||||
via_device_id: str | None = None,
|
||||
) -> None:
|
||||
"""Update device registry entry.
|
||||
|
||||
@@ -306,7 +311,8 @@ async def async_update_device(
|
||||
update the device with the new location so they can
|
||||
figure out where the adapter is.
|
||||
"""
|
||||
dr.async_get(hass).async_get_or_create(
|
||||
device_registry = dr.async_get(hass)
|
||||
device_entry = device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
name=adapter_human_name(adapter, details[ADAPTER_ADDRESS]),
|
||||
connections={(dr.CONNECTION_BLUETOOTH, details[ADAPTER_ADDRESS])},
|
||||
@@ -315,6 +321,11 @@ async def async_update_device(
|
||||
sw_version=details.get(ADAPTER_SW_VERSION),
|
||||
hw_version=details.get(ADAPTER_HW_VERSION),
|
||||
)
|
||||
if via_device_id and (via_device_entry := device_registry.async_get(via_device_id)):
|
||||
kwargs: dict[str, Any] = {"via_device_id": via_device_id}
|
||||
if not device_entry.area_id and via_device_entry.area_id:
|
||||
kwargs["area_id"] = via_device_entry.area_id
|
||||
device_registry.async_update_device(device_entry.id, **kwargs)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
@@ -349,6 +360,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
source_entry.title,
|
||||
details,
|
||||
entry.data.get(CONF_SOURCE_DEVICE_ID),
|
||||
)
|
||||
return True
|
||||
manager = _get_manager(hass)
|
||||
|
||||
@@ -181,10 +181,16 @@ def async_register_scanner(
|
||||
source_domain: str | None = None,
|
||||
source_model: str | None = None,
|
||||
source_config_entry_id: str | None = None,
|
||||
source_device_id: str | None = None,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Register a BleakScanner."""
|
||||
return _get_manager(hass).async_register_hass_scanner(
|
||||
scanner, connection_slots, source_domain, source_model, source_config_entry_id
|
||||
scanner,
|
||||
connection_slots,
|
||||
source_domain,
|
||||
source_model,
|
||||
source_config_entry_id,
|
||||
source_device_id,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -37,6 +37,7 @@ from .const import (
|
||||
CONF_PASSIVE,
|
||||
CONF_SOURCE,
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID,
|
||||
CONF_SOURCE_DEVICE_ID,
|
||||
CONF_SOURCE_DOMAIN,
|
||||
CONF_SOURCE_MODEL,
|
||||
DOMAIN,
|
||||
@@ -139,7 +140,7 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=adapter_title(adapter, details), data={}
|
||||
)
|
||||
|
||||
configured_addresses = self._async_current_ids()
|
||||
configured_addresses = self._async_current_ids(include_ignore=False)
|
||||
bluetooth_adapters = get_adapters()
|
||||
await bluetooth_adapters.refresh()
|
||||
self._adapters = bluetooth_adapters.adapters
|
||||
@@ -154,12 +155,8 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
and not (system == "Linux" and details[ADAPTER_ADDRESS] == DEFAULT_ADDRESS)
|
||||
]
|
||||
if not unconfigured_adapters:
|
||||
ignored_adapters = len(
|
||||
self._async_current_entries(include_ignore=True)
|
||||
) - len(self._async_current_entries(include_ignore=False))
|
||||
return self.async_abort(
|
||||
reason="no_adapters",
|
||||
description_placeholders={"ignored_adapters": str(ignored_adapters)},
|
||||
)
|
||||
if len(unconfigured_adapters) == 1:
|
||||
self._adapter = list(self._adapters)[0]
|
||||
@@ -194,6 +191,7 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_SOURCE_MODEL: user_input[CONF_SOURCE_MODEL],
|
||||
CONF_SOURCE_DOMAIN: user_input[CONF_SOURCE_DOMAIN],
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID: user_input[CONF_SOURCE_CONFIG_ENTRY_ID],
|
||||
CONF_SOURCE_DEVICE_ID: user_input[CONF_SOURCE_DEVICE_ID],
|
||||
}
|
||||
self._abort_if_unique_id_configured(updates=data)
|
||||
manager = get_manager()
|
||||
|
||||
@@ -22,7 +22,7 @@ CONF_SOURCE: Final = "source"
|
||||
CONF_SOURCE_DOMAIN: Final = "source_domain"
|
||||
CONF_SOURCE_MODEL: Final = "source_model"
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID: Final = "source_config_entry_id"
|
||||
|
||||
CONF_SOURCE_DEVICE_ID: Final = "source_device_id"
|
||||
|
||||
SOURCE_LOCAL: Final = "local"
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from .const import (
|
||||
CONF_SOURCE,
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID,
|
||||
CONF_SOURCE_DEVICE_ID,
|
||||
CONF_SOURCE_DOMAIN,
|
||||
CONF_SOURCE_MODEL,
|
||||
DOMAIN,
|
||||
@@ -254,6 +255,7 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
||||
source_domain: str | None = None,
|
||||
source_model: str | None = None,
|
||||
source_config_entry_id: str | None = None,
|
||||
source_device_id: str | None = None,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Register a scanner."""
|
||||
cancel = self.async_register_scanner(scanner, connection_slots)
|
||||
@@ -261,9 +263,6 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
||||
isinstance(scanner, BaseHaRemoteScanner)
|
||||
and source_domain
|
||||
and source_config_entry_id
|
||||
and not self.hass.config_entries.async_entry_for_domain_unique_id(
|
||||
DOMAIN, scanner.source
|
||||
)
|
||||
):
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.flow.async_init(
|
||||
@@ -274,6 +273,7 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
||||
CONF_SOURCE_DOMAIN: source_domain,
|
||||
CONF_SOURCE_MODEL: source_model,
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID: source_config_entry_id,
|
||||
CONF_SOURCE_DEVICE_ID: source_device_id,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
@@ -16,11 +16,11 @@
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"bleak==0.22.3",
|
||||
"bleak-retry-connector==3.8.0",
|
||||
"bluetooth-adapters==0.21.1",
|
||||
"bleak-retry-connector==3.8.1",
|
||||
"bluetooth-adapters==0.21.4",
|
||||
"bluetooth-auto-recovery==1.4.2",
|
||||
"bluetooth-data-tools==1.22.0",
|
||||
"dbus-fast==2.30.2",
|
||||
"habluetooth==3.14.0"
|
||||
"bluetooth-data-tools==1.23.4",
|
||||
"dbus-fast==2.33.0",
|
||||
"habluetooth==3.21.1"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -411,7 +411,7 @@ def ble_device_matches(
|
||||
) and service_data_uuid not in service_info.service_data:
|
||||
return False
|
||||
|
||||
if manufacturer_id := matcher.get(MANUFACTURER_ID):
|
||||
if (manufacturer_id := matcher.get(MANUFACTURER_ID)) is not None:
|
||||
if manufacturer_id not in service_info.manufacturer_data:
|
||||
return False
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"no_adapters": "No unconfigured Bluetooth adapters found. There are {ignored_adapters} ignored adapters."
|
||||
"no_adapters": "No unconfigured Bluetooth adapters found."
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
|
||||
@@ -132,7 +132,7 @@ class BTHomeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
return self._async_get_or_create_entry()
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bthome",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bthome-ble==3.9.1"]
|
||||
"requirements": ["bthome-ble==3.12.3"]
|
||||
}
|
||||
|
||||
@@ -67,6 +67,16 @@ SENSOR_DESCRIPTIONS = {
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
# Conductivity (µS/cm)
|
||||
(
|
||||
BTHomeSensorDeviceClass.CONDUCTIVITY,
|
||||
Units.CONDUCTIVITY,
|
||||
): SensorEntityDescription(
|
||||
key=f"{BTHomeSensorDeviceClass.CONDUCTIVITY}_{Units.CONDUCTIVITY}",
|
||||
device_class=SensorDeviceClass.CONDUCTIVITY,
|
||||
native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
# Count (-)
|
||||
(BTHomeSensorDeviceClass.COUNT, None): SensorEntityDescription(
|
||||
key=str(BTHomeSensorDeviceClass.COUNT),
|
||||
@@ -99,6 +109,12 @@ SENSOR_DESCRIPTIONS = {
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
# Directions (°)
|
||||
(BTHomeExtendedSensorDeviceClass.DIRECTION, Units.DEGREE): SensorEntityDescription(
|
||||
key=f"{BTHomeExtendedSensorDeviceClass.DIRECTION}_{Units.DEGREE}",
|
||||
native_unit_of_measurement=DEGREE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
# Distance (mm)
|
||||
(
|
||||
BTHomeSensorDeviceClass.DISTANCE,
|
||||
@@ -221,6 +237,16 @@ SENSOR_DESCRIPTIONS = {
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
# Precipitation (mm)
|
||||
(
|
||||
BTHomeExtendedSensorDeviceClass.PRECIPITATION,
|
||||
Units.LENGTH_MILLIMETERS,
|
||||
): SensorEntityDescription(
|
||||
key=f"{BTHomeExtendedSensorDeviceClass.PRECIPITATION}_{Units.LENGTH_MILLIMETERS}",
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
native_unit_of_measurement=UnitOfLength.MILLIMETERS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
# Pressure (mbar)
|
||||
(BTHomeSensorDeviceClass.PRESSURE, Units.PRESSURE_MBAR): SensorEntityDescription(
|
||||
key=f"{BTHomeSensorDeviceClass.PRESSURE}_{Units.PRESSURE_MBAR}",
|
||||
@@ -357,16 +383,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
native_unit_of_measurement=UnitOfVolume.LITERS,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
),
|
||||
# Conductivity (µS/cm)
|
||||
(
|
||||
BTHomeSensorDeviceClass.CONDUCTIVITY,
|
||||
Units.CONDUCTIVITY,
|
||||
): SensorEntityDescription(
|
||||
key=f"{BTHomeSensorDeviceClass.CONDUCTIVITY}_{Units.CONDUCTIVITY}",
|
||||
device_class=SensorDeviceClass.CONDUCTIVITY,
|
||||
native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1175,12 +1175,17 @@ async def async_handle_snapshot_service(
|
||||
f"Cannot write `{snapshot_file}`, no access to path; `allowlist_external_dirs` may need to be adjusted in `configuration.yaml`"
|
||||
)
|
||||
|
||||
async with asyncio.timeout(CAMERA_IMAGE_TIMEOUT):
|
||||
image = (
|
||||
await _async_get_stream_image(camera, wait_for_next_keyframe=True)
|
||||
if camera.use_stream_for_stills
|
||||
else await camera.async_camera_image()
|
||||
)
|
||||
try:
|
||||
async with asyncio.timeout(CAMERA_IMAGE_TIMEOUT):
|
||||
image = (
|
||||
await _async_get_stream_image(camera, wait_for_next_keyframe=True)
|
||||
if camera.use_stream_for_stills
|
||||
else await camera.async_camera_image()
|
||||
)
|
||||
except TimeoutError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Unable to get snapshot: Timed out after {CAMERA_IMAGE_TIMEOUT} seconds"
|
||||
) from err
|
||||
|
||||
if image is None:
|
||||
return
|
||||
@@ -1194,7 +1199,7 @@ async def async_handle_snapshot_service(
|
||||
try:
|
||||
await hass.async_add_executor_job(_write_image, snapshot_file, image)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write image to file: %s", err)
|
||||
raise HomeAssistantError(f"Can't write image to file: {err}") from err
|
||||
|
||||
|
||||
async def async_handle_play_stream_service(
|
||||
|
||||
@@ -29,6 +29,7 @@ from homeassistant.components.google_assistant import helpers as google_helpers
|
||||
from homeassistant.components.homeassistant import exposed_entities
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.components.system_health import get_info as get_system_health_info
|
||||
from homeassistant.const import CLOUD_NEVER_EXPOSED_ENTITIES
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -107,6 +108,7 @@ def async_setup(hass: HomeAssistant) -> None:
|
||||
hass.http.register_view(CloudRegisterView)
|
||||
hass.http.register_view(CloudResendConfirmView)
|
||||
hass.http.register_view(CloudForgotPasswordView)
|
||||
hass.http.register_view(DownloadSupportPackageView)
|
||||
|
||||
_CLOUD_ERRORS.update(
|
||||
{
|
||||
@@ -389,6 +391,59 @@ class CloudForgotPasswordView(HomeAssistantView):
|
||||
return self.json_message("ok")
|
||||
|
||||
|
||||
class DownloadSupportPackageView(HomeAssistantView):
|
||||
"""Download support package view."""
|
||||
|
||||
url = "/api/cloud/support_package"
|
||||
name = "api:cloud:support_package"
|
||||
|
||||
def _generate_markdown(
|
||||
self, hass_info: dict[str, Any], domains_info: dict[str, dict[str, str]]
|
||||
) -> str:
|
||||
def get_domain_table_markdown(domain_info: dict[str, Any]) -> str:
|
||||
if len(domain_info) == 0:
|
||||
return "No information available\n"
|
||||
|
||||
markdown = ""
|
||||
first = True
|
||||
for key, value in domain_info.items():
|
||||
markdown += f"{key} | {value}\n"
|
||||
if first:
|
||||
markdown += "--- | ---\n"
|
||||
first = False
|
||||
return markdown + "\n"
|
||||
|
||||
markdown = "## System Information\n\n"
|
||||
markdown += get_domain_table_markdown(hass_info)
|
||||
|
||||
for domain, domain_info in domains_info.items():
|
||||
domain_info_md = get_domain_table_markdown(domain_info)
|
||||
markdown += (
|
||||
f"<details><summary>{domain}</summary>\n\n"
|
||||
f"{domain_info_md}"
|
||||
"</details>\n\n"
|
||||
)
|
||||
|
||||
return markdown
|
||||
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
"""Download support package file."""
|
||||
|
||||
hass = request.app[KEY_HASS]
|
||||
domain_health = await get_system_health_info(hass)
|
||||
|
||||
hass_info = domain_health.pop("homeassistant", {})
|
||||
markdown = self._generate_markdown(hass_info, domain_health)
|
||||
|
||||
return web.Response(
|
||||
body=markdown,
|
||||
content_type="text/markdown",
|
||||
headers={
|
||||
"Content-Disposition": 'attachment; filename="support_package.md"'
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "cloud/remove_data"})
|
||||
@websocket_api.async_response
|
||||
|
||||
@@ -140,8 +140,10 @@ def get_accounts(client, version):
|
||||
API_ACCOUNT_ID: account[API_V3_ACCOUNT_ID],
|
||||
API_ACCOUNT_NAME: account[API_ACCOUNT_NAME],
|
||||
API_ACCOUNT_CURRENCY: account[API_ACCOUNT_CURRENCY],
|
||||
API_ACCOUNT_AMOUNT: account[API_ACCOUNT_AVALIABLE][API_ACCOUNT_VALUE]
|
||||
+ account[API_ACCOUNT_HOLD][API_ACCOUNT_VALUE],
|
||||
API_ACCOUNT_AMOUNT: (
|
||||
float(account[API_ACCOUNT_AVALIABLE][API_ACCOUNT_VALUE])
|
||||
+ float(account[API_ACCOUNT_HOLD][API_ACCOUNT_VALUE])
|
||||
),
|
||||
ACCOUNT_IS_VAULT: account[API_RESOURCE_TYPE] == API_V3_TYPE_VAULT,
|
||||
}
|
||||
for account in accounts
|
||||
|
||||
@@ -302,7 +302,8 @@ def config_entries_progress(
|
||||
[
|
||||
flw
|
||||
for flw in hass.config_entries.flow.async_progress()
|
||||
if flw["context"]["source"] != config_entries.SOURCE_USER
|
||||
if flw["context"]["source"]
|
||||
not in (config_entries.SOURCE_RECONFIGURE, config_entries.SOURCE_USER)
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.2.0", "home-assistant-intents==2025.1.28"]
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.2.5"]
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
],
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"aiodhcpwatcher==1.0.2",
|
||||
"aiodhcpwatcher==1.0.3",
|
||||
"aiodiscover==2.1.0",
|
||||
"cached-ipaddress==0.8.0"
|
||||
]
|
||||
|
||||
@@ -44,9 +44,7 @@ class DiscovergyUpdateCoordinator(DataUpdateCoordinator[Reading]):
|
||||
)
|
||||
except InvalidLogin as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"Auth expired while fetching last reading for meter {self.meter.meter_id}"
|
||||
"Auth expired while fetching last reading"
|
||||
) from err
|
||||
except (HTTPError, DiscovergyClientError) as err:
|
||||
raise UpdateFailed(
|
||||
f"Error while fetching last reading for meter {self.meter.meter_id}"
|
||||
) from err
|
||||
raise UpdateFailed(f"Error while fetching last reading: {err}") from err
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==11.1.0b1"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.0.0"]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from typing import Any
|
||||
|
||||
from eheimdigital.device import EheimDigitalDevice
|
||||
from eheimdigital.heater import EheimDigitalHeater
|
||||
from eheimdigital.types import EheimDigitalClientError, HeaterMode, HeaterUnit
|
||||
|
||||
@@ -39,17 +40,23 @@ async def async_setup_entry(
|
||||
"""Set up the callbacks for the coordinator so climate entities can be added as devices are found."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async def async_setup_device_entities(device_address: str) -> None:
|
||||
"""Set up the light entities for a device."""
|
||||
device = coordinator.hub.devices[device_address]
|
||||
def async_setup_device_entities(
|
||||
device_address: str | dict[str, EheimDigitalDevice],
|
||||
) -> None:
|
||||
"""Set up the climate entities for one or multiple devices."""
|
||||
entities: list[EheimDigitalHeaterClimate] = []
|
||||
if isinstance(device_address, str):
|
||||
device_address = {device_address: coordinator.hub.devices[device_address]}
|
||||
for device in device_address.values():
|
||||
if isinstance(device, EheimDigitalHeater):
|
||||
entities.append(EheimDigitalHeaterClimate(coordinator, device))
|
||||
coordinator.known_devices.add(device.mac_address)
|
||||
|
||||
if isinstance(device, EheimDigitalHeater):
|
||||
async_add_entities([EheimDigitalHeaterClimate(coordinator, device)])
|
||||
async_add_entities(entities)
|
||||
|
||||
coordinator.add_platform_callback(async_setup_device_entities)
|
||||
|
||||
for device_address in entry.runtime_data.hub.devices:
|
||||
await async_setup_device_entities(device_address)
|
||||
async_setup_device_entities(coordinator.hub.devices)
|
||||
|
||||
|
||||
class EheimDigitalHeaterClimate(EheimDigitalEntity[EheimDigitalHeater], ClimateEntity):
|
||||
@@ -69,6 +76,7 @@ class EheimDigitalHeaterClimate(EheimDigitalEntity[EheimDigitalHeater], ClimateE
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_preset_mode = PRESET_NONE
|
||||
_attr_translation_key = "heater"
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self, coordinator: EheimDigitalUpdateCoordinator, device: EheimDigitalHeater
|
||||
|
||||
@@ -2,8 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from typing import Any
|
||||
from collections.abc import Callable
|
||||
|
||||
from aiohttp import ClientError
|
||||
from eheimdigital.device import EheimDigitalDevice
|
||||
@@ -19,7 +18,9 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
type AsyncSetupDeviceEntitiesCallback = Callable[[str], Coroutine[Any, Any, None]]
|
||||
type AsyncSetupDeviceEntitiesCallback = Callable[
|
||||
[str | dict[str, EheimDigitalDevice]], None
|
||||
]
|
||||
|
||||
|
||||
class EheimDigitalUpdateCoordinator(
|
||||
@@ -61,7 +62,7 @@ class EheimDigitalUpdateCoordinator(
|
||||
|
||||
if device_address not in self.known_devices:
|
||||
for platform_callback in self.platform_callbacks:
|
||||
await platform_callback(device_address)
|
||||
platform_callback(device_address)
|
||||
|
||||
async def _async_receive_callback(self) -> None:
|
||||
self.async_set_updated_data(self.hub.devices)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from typing import Any
|
||||
|
||||
from eheimdigital.classic_led_ctrl import EheimDigitalClassicLEDControl
|
||||
from eheimdigital.device import EheimDigitalDevice
|
||||
from eheimdigital.types import EheimDigitalClientError, LightMode
|
||||
|
||||
from homeassistant.components.light import (
|
||||
@@ -37,24 +38,28 @@ async def async_setup_entry(
|
||||
"""Set up the callbacks for the coordinator so lights can be added as devices are found."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async def async_setup_device_entities(device_address: str) -> None:
|
||||
"""Set up the light entities for a device."""
|
||||
device = coordinator.hub.devices[device_address]
|
||||
def async_setup_device_entities(
|
||||
device_address: str | dict[str, EheimDigitalDevice],
|
||||
) -> None:
|
||||
"""Set up the light entities for one or multiple devices."""
|
||||
entities: list[EheimDigitalClassicLEDControlLight] = []
|
||||
if isinstance(device_address, str):
|
||||
device_address = {device_address: coordinator.hub.devices[device_address]}
|
||||
for device in device_address.values():
|
||||
if isinstance(device, EheimDigitalClassicLEDControl):
|
||||
for channel in range(2):
|
||||
if len(device.tankconfig[channel]) > 0:
|
||||
entities.append(
|
||||
EheimDigitalClassicLEDControlLight(
|
||||
coordinator, device, channel
|
||||
)
|
||||
)
|
||||
coordinator.known_devices.add(device.mac_address)
|
||||
|
||||
if isinstance(device, EheimDigitalClassicLEDControl):
|
||||
for channel in range(2):
|
||||
if len(device.tankconfig[channel]) > 0:
|
||||
entities.append(
|
||||
EheimDigitalClassicLEDControlLight(coordinator, device, channel)
|
||||
)
|
||||
coordinator.known_devices.add(device.mac_address)
|
||||
async_add_entities(entities)
|
||||
|
||||
coordinator.add_platform_callback(async_setup_device_entities)
|
||||
|
||||
for device_address in entry.runtime_data.hub.devices:
|
||||
await async_setup_device_entities(device_address)
|
||||
async_setup_device_entities(coordinator.hub.devices)
|
||||
|
||||
|
||||
class EheimDigitalClassicLEDControlLight(
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eheimdigital"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["eheimdigital==1.0.5"],
|
||||
"requirements": ["eheimdigital==1.0.6"],
|
||||
"zeroconf": [
|
||||
{ "type": "_http._tcp.local.", "name": "eheimdigital._http._tcp.local." }
|
||||
]
|
||||
|
||||
@@ -4,12 +4,16 @@ from __future__ import annotations
|
||||
|
||||
import aiohttp
|
||||
from electrickiwi_api import ElectricKiwiApi
|
||||
from electrickiwi_api.exceptions import ApiException
|
||||
from electrickiwi_api.exceptions import ApiException, AuthException
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow
|
||||
from homeassistant.helpers import (
|
||||
aiohttp_client,
|
||||
config_entry_oauth2_flow,
|
||||
entity_registry as er,
|
||||
)
|
||||
|
||||
from . import api
|
||||
from .coordinator import (
|
||||
@@ -44,7 +48,9 @@ async def async_setup_entry(
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
ek_api = ElectricKiwiApi(
|
||||
api.AsyncConfigEntryAuth(aiohttp_client.async_get_clientsession(hass), session)
|
||||
api.ConfigEntryElectricKiwiAuth(
|
||||
aiohttp_client.async_get_clientsession(hass), session
|
||||
)
|
||||
)
|
||||
hop_coordinator = ElectricKiwiHOPDataCoordinator(hass, entry, ek_api)
|
||||
account_coordinator = ElectricKiwiAccountDataCoordinator(hass, entry, ek_api)
|
||||
@@ -53,6 +59,8 @@ async def async_setup_entry(
|
||||
await ek_api.set_active_session()
|
||||
await hop_coordinator.async_config_entry_first_refresh()
|
||||
await account_coordinator.async_config_entry_first_refresh()
|
||||
except AuthException as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except ApiException as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
@@ -70,3 +78,53 @@ async def async_unload_entry(
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, config_entry: ElectricKiwiConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate old entry."""
|
||||
if config_entry.version == 1 and config_entry.minor_version == 1:
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, config_entry
|
||||
)
|
||||
)
|
||||
|
||||
session = config_entry_oauth2_flow.OAuth2Session(
|
||||
hass, config_entry, implementation
|
||||
)
|
||||
|
||||
ek_api = ElectricKiwiApi(
|
||||
api.ConfigEntryElectricKiwiAuth(
|
||||
aiohttp_client.async_get_clientsession(hass), session
|
||||
)
|
||||
)
|
||||
try:
|
||||
await ek_api.set_active_session()
|
||||
connection_details = await ek_api.get_connection_details()
|
||||
except AuthException:
|
||||
config_entry.async_start_reauth(hass)
|
||||
return False
|
||||
except ApiException:
|
||||
return False
|
||||
unique_id = str(ek_api.customer_number)
|
||||
identifier = ek_api.electricity.identifier
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry, unique_id=unique_id, minor_version=2
|
||||
)
|
||||
entity_registry = er.async_get(hass)
|
||||
entity_entries = er.async_entries_for_config_entry(
|
||||
entity_registry, config_entry_id=config_entry.entry_id
|
||||
)
|
||||
|
||||
for entity in entity_entries:
|
||||
assert entity.config_entry_id
|
||||
entity_registry.async_update_entity(
|
||||
entity.entity_id,
|
||||
new_unique_id=entity.unique_id.replace(
|
||||
f"{unique_id}_{connection_details.id}", f"{unique_id}_{identifier}"
|
||||
),
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -2,17 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from electrickiwi_api import AbstractAuth
|
||||
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow
|
||||
|
||||
from .const import API_BASE_URL
|
||||
|
||||
|
||||
class AsyncConfigEntryAuth(AbstractAuth):
|
||||
class ConfigEntryElectricKiwiAuth(AbstractAuth):
|
||||
"""Provide Electric Kiwi authentication tied to an OAuth2 based config entry."""
|
||||
|
||||
def __init__(
|
||||
@@ -29,4 +28,21 @@ class AsyncConfigEntryAuth(AbstractAuth):
|
||||
"""Return a valid access token."""
|
||||
await self._oauth_session.async_ensure_token_valid()
|
||||
|
||||
return cast(str, self._oauth_session.token["access_token"])
|
||||
return str(self._oauth_session.token["access_token"])
|
||||
|
||||
|
||||
class ConfigFlowElectricKiwiAuth(AbstractAuth):
|
||||
"""Provide Electric Kiwi authentication tied to an OAuth2 based config flow."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
token: str,
|
||||
) -> None:
|
||||
"""Initialize ConfigFlowFitbitApi."""
|
||||
super().__init__(aiohttp_client.async_get_clientsession(hass), API_BASE_URL)
|
||||
self._token = token
|
||||
|
||||
async def async_get_access_token(self) -> str:
|
||||
"""Return the token for the Electric Kiwi API."""
|
||||
return self._token
|
||||
|
||||
@@ -6,9 +6,14 @@ from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.config_entries import ConfigFlowResult
|
||||
from electrickiwi_api import ElectricKiwiApi
|
||||
from electrickiwi_api.exceptions import ApiException
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
from . import api
|
||||
from .const import DOMAIN, SCOPE_VALUES
|
||||
|
||||
|
||||
@@ -17,6 +22,8 @@ class ElectricKiwiOauth2FlowHandler(
|
||||
):
|
||||
"""Config flow to handle Electric Kiwi OAuth2 authentication."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
DOMAIN = DOMAIN
|
||||
|
||||
@property
|
||||
@@ -40,12 +47,30 @@ class ElectricKiwiOauth2FlowHandler(
|
||||
) -> ConfigFlowResult:
|
||||
"""Dialog that informs the user that reauth is required."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="reauth_confirm")
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
description_placeholders={CONF_NAME: self._get_reauth_entry().title},
|
||||
)
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
|
||||
"""Create an entry for Electric Kiwi."""
|
||||
existing_entry = await self.async_set_unique_id(DOMAIN)
|
||||
if existing_entry:
|
||||
return self.async_update_reload_and_abort(existing_entry, data=data)
|
||||
return await super().async_oauth_create_entry(data)
|
||||
ek_api = ElectricKiwiApi(
|
||||
api.ConfigFlowElectricKiwiAuth(self.hass, data["token"]["access_token"])
|
||||
)
|
||||
|
||||
try:
|
||||
session = await ek_api.get_active_session()
|
||||
except ApiException:
|
||||
return self.async_abort(reason="connection_error")
|
||||
|
||||
unique_id = str(session.data.customer_number)
|
||||
await self.async_set_unique_id(unique_id)
|
||||
if self.source == SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_account")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data=data
|
||||
)
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=unique_id, data=data)
|
||||
|
||||
@@ -8,4 +8,4 @@ OAUTH2_AUTHORIZE = "https://welcome.electrickiwi.co.nz/oauth/authorize"
|
||||
OAUTH2_TOKEN = "https://welcome.electrickiwi.co.nz/oauth/token"
|
||||
API_BASE_URL = "https://api.electrickiwi.co.nz"
|
||||
|
||||
SCOPE_VALUES = "read_connection_detail read_billing_frequency read_account_running_balance read_consumption_summary read_consumption_averages read_hop_intervals_config read_hop_connection save_hop_connection read_session"
|
||||
SCOPE_VALUES = "read_customer_details read_connection_detail read_connection read_billing_address get_bill_address read_billing_frequency read_billing_details read_billing_bills read_billing_bill read_billing_bill_id read_billing_bill_file read_account_running_balance read_customer_account_summary read_consumption_summary download_consumption_file read_consumption_averages get_consumption_averages read_hop_intervals_config read_hop_intervals read_hop_connection read_hop_specific_connection save_hop_connection save_hop_specific_connection read_outage_contact get_outage_contact_info_for_icp read_session read_session_data_login"
|
||||
|
||||
@@ -10,7 +10,7 @@ import logging
|
||||
|
||||
from electrickiwi_api import ElectricKiwiApi
|
||||
from electrickiwi_api.exceptions import ApiException, AuthException
|
||||
from electrickiwi_api.model import AccountBalance, Hop, HopIntervals
|
||||
from electrickiwi_api.model import AccountSummary, Hop, HopIntervals
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -34,7 +34,7 @@ class ElectricKiwiRuntimeData:
|
||||
type ElectricKiwiConfigEntry = ConfigEntry[ElectricKiwiRuntimeData]
|
||||
|
||||
|
||||
class ElectricKiwiAccountDataCoordinator(DataUpdateCoordinator[AccountBalance]):
|
||||
class ElectricKiwiAccountDataCoordinator(DataUpdateCoordinator[AccountSummary]):
|
||||
"""ElectricKiwi Account Data object."""
|
||||
|
||||
def __init__(
|
||||
@@ -51,13 +51,13 @@ class ElectricKiwiAccountDataCoordinator(DataUpdateCoordinator[AccountBalance]):
|
||||
name="Electric Kiwi Account Data",
|
||||
update_interval=ACCOUNT_SCAN_INTERVAL,
|
||||
)
|
||||
self._ek_api = ek_api
|
||||
self.ek_api = ek_api
|
||||
|
||||
async def _async_update_data(self) -> AccountBalance:
|
||||
async def _async_update_data(self) -> AccountSummary:
|
||||
"""Fetch data from Account balance API endpoint."""
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
return await self._ek_api.get_account_balance()
|
||||
return await self.ek_api.get_account_summary()
|
||||
except AuthException as auth_err:
|
||||
raise ConfigEntryAuthFailed from auth_err
|
||||
except ApiException as api_err:
|
||||
@@ -85,7 +85,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
|
||||
# Polling interval. Will only be polled if there are subscribers.
|
||||
update_interval=HOP_SCAN_INTERVAL,
|
||||
)
|
||||
self._ek_api = ek_api
|
||||
self.ek_api = ek_api
|
||||
self.hop_intervals: HopIntervals | None = None
|
||||
|
||||
def get_hop_options(self) -> dict[str, int]:
|
||||
@@ -100,7 +100,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
|
||||
async def async_update_hop(self, hop_interval: int) -> Hop:
|
||||
"""Update selected hop and data."""
|
||||
try:
|
||||
self.async_set_updated_data(await self._ek_api.post_hop(hop_interval))
|
||||
self.async_set_updated_data(await self.ek_api.post_hop(hop_interval))
|
||||
except AuthException as auth_err:
|
||||
raise ConfigEntryAuthFailed from auth_err
|
||||
except ApiException as api_err:
|
||||
@@ -118,7 +118,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
if self.hop_intervals is None:
|
||||
hop_intervals: HopIntervals = await self._ek_api.get_hop_intervals()
|
||||
hop_intervals: HopIntervals = await self.ek_api.get_hop_intervals()
|
||||
hop_intervals.intervals = OrderedDict(
|
||||
filter(
|
||||
lambda pair: pair[1].active == 1,
|
||||
@@ -127,7 +127,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
|
||||
)
|
||||
|
||||
self.hop_intervals = hop_intervals
|
||||
return await self._ek_api.get_hop()
|
||||
return await self.ek_api.get_hop()
|
||||
except AuthException as auth_err:
|
||||
raise ConfigEntryAuthFailed from auth_err
|
||||
except ApiException as api_err:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/electric_kiwi",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["electrickiwi-api==0.8.5"]
|
||||
"requirements": ["electrickiwi-api==0.9.14"]
|
||||
}
|
||||
|
||||
@@ -53,8 +53,8 @@ class ElectricKiwiSelectHOPEntity(
|
||||
"""Initialise the HOP selection entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator._ek_api.customer_number}" # noqa: SLF001
|
||||
f"_{coordinator._ek_api.connection_id}_{description.key}" # noqa: SLF001
|
||||
f"{coordinator.ek_api.customer_number}"
|
||||
f"_{coordinator.ek_api.electricity.identifier}_{description.key}"
|
||||
)
|
||||
self.entity_description = description
|
||||
self.values_dict = coordinator.get_hop_options()
|
||||
|
||||
@@ -6,7 +6,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from electrickiwi_api.model import AccountBalance, Hop
|
||||
from electrickiwi_api.model import AccountSummary, Hop
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@@ -39,7 +39,15 @@ ATTR_HOP_PERCENTAGE = "hop_percentage"
|
||||
class ElectricKiwiAccountSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Electric Kiwi sensor entity."""
|
||||
|
||||
value_func: Callable[[AccountBalance], float | datetime]
|
||||
value_func: Callable[[AccountSummary], float | datetime]
|
||||
|
||||
|
||||
def _get_hop_percentage(account_balance: AccountSummary) -> float:
|
||||
"""Return the hop percentage from account summary."""
|
||||
if power := account_balance.services.get("power"):
|
||||
if connection := power.connections[0]:
|
||||
return float(connection.hop_percentage)
|
||||
return 0.0
|
||||
|
||||
|
||||
ACCOUNT_SENSOR_TYPES: tuple[ElectricKiwiAccountSensorEntityDescription, ...] = (
|
||||
@@ -72,9 +80,7 @@ ACCOUNT_SENSOR_TYPES: tuple[ElectricKiwiAccountSensorEntityDescription, ...] = (
|
||||
translation_key="hop_power_savings",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_func=lambda account_balance: float(
|
||||
account_balance.connections[0].hop_percentage
|
||||
),
|
||||
value_func=_get_hop_percentage,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -165,8 +171,8 @@ class ElectricKiwiAccountEntity(
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator._ek_api.customer_number}" # noqa: SLF001
|
||||
f"_{coordinator._ek_api.connection_id}_{description.key}" # noqa: SLF001
|
||||
f"{coordinator.ek_api.customer_number}"
|
||||
f"_{coordinator.ek_api.electricity.identifier}_{description.key}"
|
||||
)
|
||||
self.entity_description = description
|
||||
|
||||
@@ -194,8 +200,8 @@ class ElectricKiwiHOPEntity(
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator._ek_api.customer_number}" # noqa: SLF001
|
||||
f"_{coordinator._ek_api.connection_id}_{description.key}" # noqa: SLF001
|
||||
f"{coordinator.ek_api.customer_number}"
|
||||
f"_{coordinator.ek_api.electricity.identifier}_{description.key}"
|
||||
)
|
||||
self.entity_description = description
|
||||
|
||||
|
||||
@@ -21,7 +21,8 @@
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
|
||||
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]"
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
|
||||
"connection_error": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Searching for Energenie-Power-Sockets Devices.",
|
||||
"title": "Searching for Energenie Power Sockets devices",
|
||||
"description": "Choose a discovered device.",
|
||||
"data": {
|
||||
"device": "[%key:common::config_flow::data::device%]"
|
||||
@@ -13,7 +13,7 @@
|
||||
"abort": {
|
||||
"usb_error": "Couldn't access USB devices!",
|
||||
"no_device": "Unable to discover any (new) supported device.",
|
||||
"device_not_found": "No device was found for the given id.",
|
||||
"device_not_found": "No device was found for the given ID.",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.2.0"]
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.7.0"]
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ def async_connect_scanner(
|
||||
entry_data: RuntimeEntryData,
|
||||
cli: APIClient,
|
||||
device_info: DeviceInfo,
|
||||
device_id: str,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Connect scanner."""
|
||||
client_data = connect_scanner(cli, device_info, entry_data.available)
|
||||
@@ -45,6 +46,7 @@ def async_connect_scanner(
|
||||
source_domain=DOMAIN,
|
||||
source_model=device_info.model,
|
||||
source_config_entry_id=entry_data.entry_id,
|
||||
source_device_id=device_id,
|
||||
),
|
||||
scanner.async_setup(),
|
||||
],
|
||||
|
||||
@@ -425,7 +425,9 @@ class ESPHomeManager:
|
||||
|
||||
if device_info.bluetooth_proxy_feature_flags_compat(api_version):
|
||||
entry_data.disconnect_callbacks.add(
|
||||
async_connect_scanner(hass, entry_data, cli, device_info)
|
||||
async_connect_scanner(
|
||||
hass, entry_data, cli, device_info, self.device_id
|
||||
)
|
||||
)
|
||||
else:
|
||||
bluetooth.async_remove_scanner(hass, device_info.mac_address)
|
||||
@@ -571,7 +573,9 @@ def _async_setup_device_registry(
|
||||
|
||||
configuration_url = None
|
||||
if device_info.webserver_port > 0:
|
||||
configuration_url = f"http://{entry.data['host']}:{device_info.webserver_port}"
|
||||
entry_host = entry.data["host"]
|
||||
host = f"[{entry_host}]" if ":" in entry_host else entry_host
|
||||
configuration_url = f"http://{host}:{device_info.webserver_port}"
|
||||
elif (
|
||||
(dashboard := async_get_dashboard(hass))
|
||||
and dashboard.data
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
"requirements": [
|
||||
"aioesphomeapi==29.0.0",
|
||||
"esphome-dashboard-api==1.2.3",
|
||||
"bleak-esphome==2.2.0"
|
||||
"bleak-esphome==2.7.0"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/fireservicerota",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyfireservicerota"],
|
||||
"requirements": ["pyfireservicerota==0.0.43"]
|
||||
"requirements": ["pyfireservicerota==0.0.46"]
|
||||
}
|
||||
|
||||
@@ -21,5 +21,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250129.0"]
|
||||
"requirements": ["home-assistant-frontend==20250210.0"]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ from collections.abc import Callable
|
||||
from google_drive_api.exceptions import GoogleDriveApiError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import instance_id
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -49,6 +49,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoogleDriveConfigEntry)
|
||||
except GoogleDriveApiError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
_async_notify_backup_listeners_soon(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -56,10 +58,15 @@ async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: GoogleDriveConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
hass.loop.call_soon(_notify_backup_listeners, hass)
|
||||
_async_notify_backup_listeners_soon(hass)
|
||||
return True
|
||||
|
||||
|
||||
def _notify_backup_listeners(hass: HomeAssistant) -> None:
|
||||
def _async_notify_backup_listeners(hass: HomeAssistant) -> None:
|
||||
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||
listener()
|
||||
|
||||
|
||||
@callback
|
||||
def _async_notify_backup_listeners_soon(hass: HomeAssistant) -> None:
|
||||
hass.loop.call_soon(_async_notify_backup_listeners, hass)
|
||||
|
||||
@@ -11,7 +11,7 @@ from aiohttp import ClientSession, ClientTimeout, StreamReader
|
||||
from aiohttp.client_exceptions import ClientError, ClientResponseError
|
||||
from google_drive_api.api import AbstractAuth, GoogleDriveApi
|
||||
|
||||
from homeassistant.components.backup import AgentBackup
|
||||
from homeassistant.components.backup import AgentBackup, suggested_filename
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.exceptions import (
|
||||
@@ -132,7 +132,7 @@ class DriveClient:
|
||||
"""Upload a backup."""
|
||||
folder_id, _ = await self.async_create_ha_root_folder_if_not_exists()
|
||||
backup_metadata = {
|
||||
"name": f"{backup.name} {backup.date}.tar",
|
||||
"name": suggested_filename(backup),
|
||||
"description": json.dumps(backup.as_dict()),
|
||||
"parents": [folder_id],
|
||||
"properties": {
|
||||
@@ -146,9 +146,10 @@ class DriveClient:
|
||||
backup.backup_id,
|
||||
backup_metadata,
|
||||
)
|
||||
await self._api.upload_file(
|
||||
await self._api.resumable_upload_file(
|
||||
backup_metadata,
|
||||
open_stream,
|
||||
backup.size,
|
||||
timeout=ClientTimeout(total=_UPLOAD_AND_DOWNLOAD_TIMEOUT),
|
||||
)
|
||||
_LOGGER.debug(
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
|
||||
from homeassistant.components.application_credentials import AuthorizationServer
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
AUTH_CALLBACK_PATH,
|
||||
MY_AUTH_CALLBACK_PATH,
|
||||
)
|
||||
|
||||
|
||||
async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer:
|
||||
@@ -14,8 +18,14 @@ async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationSe
|
||||
|
||||
async def async_get_description_placeholders(hass: HomeAssistant) -> dict[str, str]:
|
||||
"""Return description placeholders for the credentials dialog."""
|
||||
if "my" in hass.config.components:
|
||||
redirect_url = MY_AUTH_CALLBACK_PATH
|
||||
else:
|
||||
ha_host = hass.config.external_url or "https://YOUR_DOMAIN:PORT"
|
||||
redirect_url = f"{ha_host}{AUTH_CALLBACK_PATH}"
|
||||
return {
|
||||
"oauth_consent_url": "https://console.cloud.google.com/apis/credentials/consent",
|
||||
"more_info_url": "https://www.home-assistant.io/integrations/google_drive/",
|
||||
"oauth_creds_url": "https://console.cloud.google.com/apis/credentials",
|
||||
"redirect_url": redirect_url,
|
||||
}
|
||||
|
||||
@@ -80,16 +80,14 @@ class GoogleDriveBackupAgent(BackupAgent):
|
||||
try:
|
||||
await self._client.async_upload_backup(open_stream, backup)
|
||||
except (GoogleDriveApiError, HomeAssistantError, TimeoutError) as err:
|
||||
_LOGGER.error("Upload backup error: %s", err)
|
||||
raise BackupAgentError("Failed to upload backup") from err
|
||||
raise BackupAgentError(f"Failed to upload backup: {err}") from err
|
||||
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
try:
|
||||
return await self._client.async_list_backups()
|
||||
except (GoogleDriveApiError, HomeAssistantError, TimeoutError) as err:
|
||||
_LOGGER.error("List backups error: %s", err)
|
||||
raise BackupAgentError("Failed to list backups") from err
|
||||
raise BackupAgentError(f"Failed to list backups: {err}") from err
|
||||
|
||||
async def async_get_backup(
|
||||
self,
|
||||
@@ -121,9 +119,7 @@ class GoogleDriveBackupAgent(BackupAgent):
|
||||
stream = await self._client.async_download(file_id)
|
||||
return ChunkAsyncStreamIterator(stream)
|
||||
except (GoogleDriveApiError, HomeAssistantError, TimeoutError) as err:
|
||||
_LOGGER.error("Download backup error: %s", err)
|
||||
raise BackupAgentError("Failed to download backup") from err
|
||||
_LOGGER.error("Download backup_id: %s not found", backup_id)
|
||||
raise BackupAgentError(f"Failed to download backup: {err}") from err
|
||||
raise BackupAgentError("Backup not found")
|
||||
|
||||
async def async_delete_backup(
|
||||
@@ -143,5 +139,4 @@ class GoogleDriveBackupAgent(BackupAgent):
|
||||
await self._client.async_delete(file_id)
|
||||
_LOGGER.debug("Deleted backup_id: %s", backup_id)
|
||||
except (GoogleDriveApiError, HomeAssistantError, TimeoutError) as err:
|
||||
_LOGGER.error("Delete backup error: %s", err)
|
||||
raise BackupAgentError("Failed to delete backup") from err
|
||||
raise BackupAgentError(f"Failed to delete backup: {err}") from err
|
||||
|
||||
@@ -10,5 +10,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["google_drive_api"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-google-drive-api==0.0.2"]
|
||||
"requirements": ["python-google-drive-api==0.1.0"]
|
||||
}
|
||||
|
||||
@@ -35,6 +35,6 @@
|
||||
}
|
||||
},
|
||||
"application_credentials": {
|
||||
"description": "Follow the [instructions]({more_info_url}) for [OAuth consent screen]({oauth_consent_url}) to give Home Assistant access to your Google Drive. You also need to create Application Credentials linked to your account:\n1. Go to [Credentials]({oauth_creds_url}) and select **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web application** for the Application Type."
|
||||
"description": "Follow the [instructions]({more_info_url}) to configure the Cloud Console:\n\n1. Go to the [OAuth consent screen]({oauth_consent_url}) and configure\n1. Go to [Credentials]({oauth_creds_url}) and select **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web application** for the Application Type.\n1. Add `{redirect_url}` under *Authorized redirect URI*."
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,7 +78,7 @@ class GoveeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=title, data={CONF_DEVICE_TYPE: device.device_type}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -38,6 +38,10 @@
|
||||
"local_name": "GV5126*",
|
||||
"connectable": false
|
||||
},
|
||||
{
|
||||
"local_name": "GV5179*",
|
||||
"connectable": false
|
||||
},
|
||||
{
|
||||
"local_name": "GVH5127*",
|
||||
"connectable": false
|
||||
@@ -131,5 +135,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/govee_ble",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["govee-ble==0.42.0"]
|
||||
"requirements": ["govee-ble==0.43.0"]
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ from typing import Any
|
||||
|
||||
from aiohttp import ClientError
|
||||
from habiticalib import (
|
||||
Avatar,
|
||||
ContentData,
|
||||
Habitica,
|
||||
HabiticaException,
|
||||
@@ -19,7 +20,6 @@ from habiticalib import (
|
||||
TaskFilter,
|
||||
TooManyRequestsError,
|
||||
UserData,
|
||||
UserStyles,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -159,12 +159,10 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
|
||||
else:
|
||||
await self.async_request_refresh()
|
||||
|
||||
async def generate_avatar(self, user_styles: UserStyles) -> bytes:
|
||||
async def generate_avatar(self, avatar: Avatar) -> bytes:
|
||||
"""Generate Avatar."""
|
||||
|
||||
avatar = BytesIO()
|
||||
await self.habitica.generate_avatar(
|
||||
fp=avatar, user_styles=user_styles, fmt="PNG"
|
||||
)
|
||||
png = BytesIO()
|
||||
await self.habitica.generate_avatar(fp=png, avatar=avatar, fmt="PNG")
|
||||
|
||||
return avatar.getvalue()
|
||||
return png.getvalue()
|
||||
|
||||
@@ -23,5 +23,5 @@ async def async_get_config_entry_diagnostics(
|
||||
CONF_URL: config_entry.data[CONF_URL],
|
||||
CONF_API_USER: config_entry.data[CONF_API_USER],
|
||||
},
|
||||
"habitica_data": habitica_data.to_dict()["data"],
|
||||
"habitica_data": habitica_data.to_dict(omit_none=False)["data"],
|
||||
}
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from enum import StrEnum
|
||||
|
||||
from habiticalib import UserStyles
|
||||
from habiticalib import Avatar, extract_avatar
|
||||
|
||||
from homeassistant.components.image import ImageEntity, ImageEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -45,7 +44,7 @@ class HabiticaImage(HabiticaBase, ImageEntity):
|
||||
translation_key=HabiticaImageEntity.AVATAR,
|
||||
)
|
||||
_attr_content_type = "image/png"
|
||||
_current_appearance: UserStyles | None = None
|
||||
_current_appearance: Avatar | None = None
|
||||
_cache: bytes | None = None
|
||||
|
||||
def __init__(
|
||||
@@ -60,7 +59,7 @@ class HabiticaImage(HabiticaBase, ImageEntity):
|
||||
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Check if equipped gear and other things have changed since last avatar image generation."""
|
||||
new_appearance = UserStyles.from_dict(asdict(self.coordinator.data.user))
|
||||
new_appearance = extract_avatar(self.coordinator.data.user)
|
||||
|
||||
if self._current_appearance != new_appearance:
|
||||
self._current_appearance = new_appearance
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/habitica",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["habiticalib"],
|
||||
"requirements": ["habiticalib==0.3.3"]
|
||||
"requirements": ["habiticalib==0.3.7"]
|
||||
}
|
||||
|
||||
@@ -77,7 +77,7 @@ SERVICE_API_CALL_SCHEMA = vol.Schema(
|
||||
|
||||
SERVICE_CAST_SKILL_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Required(ATTR_SKILL): cv.string,
|
||||
vol.Optional(ATTR_TASK): cv.string,
|
||||
}
|
||||
@@ -85,12 +85,12 @@ SERVICE_CAST_SKILL_SCHEMA = vol.Schema(
|
||||
|
||||
SERVICE_MANAGE_QUEST_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
}
|
||||
)
|
||||
SERVICE_SCORE_TASK_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Required(ATTR_TASK): cv.string,
|
||||
vol.Optional(ATTR_DIRECTION): cv.string,
|
||||
}
|
||||
@@ -98,7 +98,7 @@ SERVICE_SCORE_TASK_SCHEMA = vol.Schema(
|
||||
|
||||
SERVICE_TRANSFORMATION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Required(ATTR_ITEM): cv.string,
|
||||
vol.Required(ATTR_TARGET): cv.string,
|
||||
}
|
||||
@@ -106,7 +106,7 @@ SERVICE_TRANSFORMATION_SCHEMA = vol.Schema(
|
||||
|
||||
SERVICE_GET_TASKS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Optional(ATTR_TYPE): vol.All(
|
||||
cv.ensure_list, [vol.All(vol.Upper, vol.In({x.name for x in TaskType}))]
|
||||
),
|
||||
@@ -510,7 +510,10 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
or (task.notes and keyword in task.notes.lower())
|
||||
or any(keyword in item.text.lower() for item in task.checklist)
|
||||
]
|
||||
result: dict[str, Any] = {"tasks": response}
|
||||
result: dict[str, Any] = {
|
||||
"tasks": [task.to_dict(omit_none=False) for task in response]
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
hass.services.async_register(
|
||||
|
||||
@@ -6,7 +6,7 @@ import asyncio
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from pathlib import Path, PurePath
|
||||
from typing import Any, cast
|
||||
from uuid import UUID
|
||||
|
||||
@@ -20,6 +20,7 @@ from aiohasupervisor.models import (
|
||||
backups as supervisor_backups,
|
||||
mounts as supervisor_mounts,
|
||||
)
|
||||
from aiohasupervisor.models.backups import LOCATION_CLOUD_BACKUP, LOCATION_LOCAL_STORAGE
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
DATA_MANAGER,
|
||||
@@ -27,30 +28,39 @@ from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupManagerError,
|
||||
BackupNotFound,
|
||||
BackupReaderWriter,
|
||||
BackupReaderWriterError,
|
||||
CreateBackupEvent,
|
||||
CreateBackupStage,
|
||||
CreateBackupState,
|
||||
Folder,
|
||||
IdleEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
NewBackup,
|
||||
RestoreBackupEvent,
|
||||
RestoreBackupStage,
|
||||
RestoreBackupState,
|
||||
WrittenBackup,
|
||||
async_get_manager as async_get_backup_manager,
|
||||
suggested_filename as suggested_backup_filename,
|
||||
suggested_filename_from_name_date,
|
||||
)
|
||||
from homeassistant.const import __version__ as HAVERSION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
|
||||
from .const import DOMAIN, EVENT_SUPERVISOR_EVENT
|
||||
from .handler import get_supervisor_client
|
||||
|
||||
LOCATION_CLOUD_BACKUP = ".cloud_backup"
|
||||
LOCATION_LOCAL = ".local"
|
||||
MOUNT_JOBS = ("mount_manager_create_mount", "mount_manager_remove_mount")
|
||||
RESTORE_JOB_ID_ENV = "SUPERVISOR_RESTORE_JOB_ID"
|
||||
# Set on backups automatically created when updating an addon
|
||||
TAG_ADDON_UPDATE = "supervisor.addon_update"
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -61,7 +71,9 @@ async def async_get_backup_agents(
|
||||
"""Return the hassio backup agents."""
|
||||
client = get_supervisor_client(hass)
|
||||
mounts = await client.mounts.info()
|
||||
agents: list[BackupAgent] = [SupervisorBackupAgent(hass, "local", None)]
|
||||
agents: list[BackupAgent] = [
|
||||
SupervisorBackupAgent(hass, "local", LOCATION_LOCAL_STORAGE)
|
||||
]
|
||||
for mount in mounts.mounts:
|
||||
if mount.usage is not supervisor_mounts.MountUsage.BACKUP:
|
||||
continue
|
||||
@@ -101,7 +113,7 @@ def async_register_backup_agents_listener(
|
||||
|
||||
|
||||
def _backup_details_to_agent_backup(
|
||||
details: supervisor_backups.BackupComplete, location: str | None
|
||||
details: supervisor_backups.BackupComplete, location: str
|
||||
) -> AgentBackup:
|
||||
"""Convert a supervisor backup details object to an agent backup."""
|
||||
homeassistant_included = details.homeassistant is not None
|
||||
@@ -113,12 +125,14 @@ def _backup_details_to_agent_backup(
|
||||
AddonInfo(name=addon.name, slug=addon.slug, version=addon.version)
|
||||
for addon in details.addons
|
||||
]
|
||||
location = location or LOCATION_LOCAL
|
||||
extra_metadata = details.extra or {}
|
||||
return AgentBackup(
|
||||
addons=addons,
|
||||
backup_id=details.slug,
|
||||
database_included=database_included,
|
||||
date=details.date.isoformat(),
|
||||
date=extra_metadata.get(
|
||||
"supervisor.backup_request_date", details.date.isoformat()
|
||||
),
|
||||
extra_metadata=details.extra or {},
|
||||
folders=[Folder(folder) for folder in details.folders],
|
||||
homeassistant_included=homeassistant_included,
|
||||
@@ -134,7 +148,7 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
|
||||
domain = DOMAIN
|
||||
|
||||
def __init__(self, hass: HomeAssistant, name: str, location: str | None) -> None:
|
||||
def __init__(self, hass: HomeAssistant, name: str, location: str) -> None:
|
||||
"""Initialize the backup agent."""
|
||||
super().__init__()
|
||||
self._hass = hass
|
||||
@@ -149,10 +163,15 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file."""
|
||||
return await self._client.backups.download_backup(
|
||||
backup_id,
|
||||
options=supervisor_backups.DownloadBackupOptions(location=self.location),
|
||||
)
|
||||
try:
|
||||
return await self._client.backups.download_backup(
|
||||
backup_id,
|
||||
options=supervisor_backups.DownloadBackupOptions(
|
||||
location=self.location
|
||||
),
|
||||
)
|
||||
except SupervisorNotFoundError as err:
|
||||
raise BackupNotFound from err
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
@@ -174,7 +193,8 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
return
|
||||
stream = await open_stream()
|
||||
upload_options = supervisor_backups.UploadBackupOptions(
|
||||
location={self.location}
|
||||
location={self.location},
|
||||
filename=PurePath(suggested_backup_filename(backup)),
|
||||
)
|
||||
await self._client.backups.upload_backup(
|
||||
stream,
|
||||
@@ -186,7 +206,7 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
backup_list = await self._client.backups.list()
|
||||
result = []
|
||||
for backup in backup_list:
|
||||
if not backup.locations or self.location not in backup.locations:
|
||||
if self.location not in backup.location_attributes:
|
||||
continue
|
||||
details = await self._client.backups.backup_info(backup.slug)
|
||||
result.append(_backup_details_to_agent_backup(details, self.location))
|
||||
@@ -198,8 +218,11 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
details = await self._client.backups.backup_info(backup_id)
|
||||
if self.location not in details.locations:
|
||||
try:
|
||||
details = await self._client.backups.backup_info(backup_id)
|
||||
except SupervisorNotFoundError:
|
||||
return None
|
||||
if self.location not in details.location_attributes:
|
||||
return None
|
||||
return _backup_details_to_agent_backup(details, self.location)
|
||||
|
||||
@@ -212,10 +235,6 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
location={self.location}
|
||||
),
|
||||
)
|
||||
except SupervisorBadRequestError as err:
|
||||
if err.args[0] != "Backup does not exist":
|
||||
raise
|
||||
_LOGGER.debug("Backup %s does not exist", backup_id)
|
||||
except SupervisorNotFoundError:
|
||||
_LOGGER.debug("Backup %s does not exist", backup_id)
|
||||
|
||||
@@ -276,8 +295,8 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
# will be handled by async_upload_backup.
|
||||
# If the lists are the same length, it does not matter which one we send,
|
||||
# we send the encrypted list to have a well defined behavior.
|
||||
encrypted_locations: list[str | None] = []
|
||||
decrypted_locations: list[str | None] = []
|
||||
encrypted_locations: list[str] = []
|
||||
decrypted_locations: list[str] = []
|
||||
agents_settings = manager.config.data.agents
|
||||
for hassio_agent in hassio_agents:
|
||||
if password is not None:
|
||||
@@ -302,6 +321,9 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
locations = []
|
||||
locations = locations or [LOCATION_CLOUD_BACKUP]
|
||||
|
||||
date = dt_util.now().isoformat()
|
||||
extra_metadata = extra_metadata | {"supervisor.backup_request_date": date}
|
||||
filename = suggested_filename_from_name_date(backup_name, date)
|
||||
try:
|
||||
backup = await self._client.backups.partial_backup(
|
||||
supervisor_backups.PartialBackupOptions(
|
||||
@@ -315,6 +337,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
homeassistant_exclude_database=not include_database,
|
||||
background=True,
|
||||
extra=extra_metadata,
|
||||
filename=PurePath(filename),
|
||||
)
|
||||
)
|
||||
except SupervisorError as err:
|
||||
@@ -323,40 +346,56 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
self._async_wait_for_backup(
|
||||
backup,
|
||||
locations,
|
||||
on_progress=on_progress,
|
||||
remove_after_upload=locations == [LOCATION_CLOUD_BACKUP],
|
||||
),
|
||||
name="backup_manager_create_backup",
|
||||
eager_start=False, # To ensure the task is not started before we return
|
||||
)
|
||||
|
||||
return (NewBackup(backup_job_id=backup.job_id), backup_task)
|
||||
return (NewBackup(backup_job_id=backup.job_id.hex), backup_task)
|
||||
|
||||
async def _async_wait_for_backup(
|
||||
self,
|
||||
backup: supervisor_backups.NewBackup,
|
||||
locations: list[str | None],
|
||||
locations: list[str],
|
||||
*,
|
||||
on_progress: Callable[[CreateBackupEvent], None],
|
||||
remove_after_upload: bool,
|
||||
) -> WrittenBackup:
|
||||
"""Wait for a backup to complete."""
|
||||
backup_complete = asyncio.Event()
|
||||
backup_id: str | None = None
|
||||
create_errors: list[dict[str, str]] = []
|
||||
|
||||
@callback
|
||||
def on_job_progress(data: Mapping[str, Any]) -> None:
|
||||
"""Handle backup progress."""
|
||||
nonlocal backup_id
|
||||
if not (stage := try_parse_enum(CreateBackupStage, data.get("stage"))):
|
||||
_LOGGER.debug("Unknown create stage: %s", data.get("stage"))
|
||||
else:
|
||||
on_progress(
|
||||
CreateBackupEvent(
|
||||
reason=None, stage=stage, state=CreateBackupState.IN_PROGRESS
|
||||
)
|
||||
)
|
||||
if data.get("done") is True:
|
||||
backup_id = data.get("reference")
|
||||
create_errors.extend(data.get("errors", []))
|
||||
backup_complete.set()
|
||||
|
||||
unsub = self._async_listen_job_events(backup.job_id, on_job_progress)
|
||||
try:
|
||||
unsub = self._async_listen_job_events(backup.job_id, on_job_progress)
|
||||
await self._get_job_state(backup.job_id, on_job_progress)
|
||||
await backup_complete.wait()
|
||||
finally:
|
||||
unsub()
|
||||
if not backup_id:
|
||||
raise BackupReaderWriterError("Backup failed")
|
||||
if not backup_id or create_errors:
|
||||
# We should add more specific error handling here in the future
|
||||
raise BackupReaderWriterError(
|
||||
f"Backup failed: {create_errors or 'no backup_id'}"
|
||||
)
|
||||
|
||||
async def open_backup() -> AsyncIterator[bytes]:
|
||||
try:
|
||||
@@ -469,7 +508,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
else None
|
||||
)
|
||||
|
||||
restore_location: str | None
|
||||
restore_location: str
|
||||
if manager.backup_agents[agent_id].domain != DOMAIN:
|
||||
# Download the backup to the supervisor. Supervisor will clean up the backup
|
||||
# two days after the restore is done.
|
||||
@@ -495,6 +534,8 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
location=restore_location,
|
||||
),
|
||||
)
|
||||
except SupervisorNotFoundError as err:
|
||||
raise BackupNotFound from err
|
||||
except SupervisorBadRequestError as err:
|
||||
# Supervisor currently does not transmit machine parsable error types
|
||||
message = err.args[0]
|
||||
@@ -503,16 +544,30 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
raise HomeAssistantError(message) from err
|
||||
|
||||
restore_complete = asyncio.Event()
|
||||
restore_errors: list[dict[str, str]] = []
|
||||
|
||||
@callback
|
||||
def on_job_progress(data: Mapping[str, Any]) -> None:
|
||||
"""Handle backup progress."""
|
||||
"""Handle backup restore progress."""
|
||||
if not (stage := try_parse_enum(RestoreBackupStage, data.get("stage"))):
|
||||
_LOGGER.debug("Unknown restore stage: %s", data.get("stage"))
|
||||
else:
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason=None, stage=stage, state=RestoreBackupState.IN_PROGRESS
|
||||
)
|
||||
)
|
||||
if data.get("done") is True:
|
||||
restore_complete.set()
|
||||
restore_errors.extend(data.get("errors", []))
|
||||
|
||||
unsub = self._async_listen_job_events(job.job_id, on_job_progress)
|
||||
try:
|
||||
unsub = self._async_listen_job_events(job.job_id, on_job_progress)
|
||||
await self._get_job_state(job.job_id, on_job_progress)
|
||||
await restore_complete.wait()
|
||||
if restore_errors:
|
||||
# We should add more specific error handling here in the future
|
||||
raise BackupReaderWriterError(f"Restore failed: {restore_errors}")
|
||||
finally:
|
||||
unsub()
|
||||
|
||||
@@ -522,28 +577,52 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
on_progress: Callable[[RestoreBackupEvent | IdleEvent], None],
|
||||
) -> None:
|
||||
"""Check restore status after core restart."""
|
||||
if not (restore_job_id := os.environ.get(RESTORE_JOB_ID_ENV)):
|
||||
if not (restore_job_str := os.environ.get(RESTORE_JOB_ID_ENV)):
|
||||
_LOGGER.debug("No restore job ID found in environment")
|
||||
return
|
||||
|
||||
restore_job_id = UUID(restore_job_str)
|
||||
_LOGGER.debug("Found restore job ID %s in environment", restore_job_id)
|
||||
|
||||
sent_event = False
|
||||
|
||||
@callback
|
||||
def on_job_progress(data: Mapping[str, Any]) -> None:
|
||||
"""Handle backup restore progress."""
|
||||
nonlocal sent_event
|
||||
|
||||
if not (stage := try_parse_enum(RestoreBackupStage, data.get("stage"))):
|
||||
_LOGGER.debug("Unknown restore stage: %s", data.get("stage"))
|
||||
|
||||
if data.get("done") is not True:
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason="", stage=None, state=RestoreBackupState.IN_PROGRESS
|
||||
if stage or not sent_event:
|
||||
sent_event = True
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason=None,
|
||||
stage=stage,
|
||||
state=RestoreBackupState.IN_PROGRESS,
|
||||
)
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason="", stage=None, state=RestoreBackupState.COMPLETED
|
||||
restore_errors = data.get("errors", [])
|
||||
if restore_errors:
|
||||
_LOGGER.warning("Restore backup failed: %s", restore_errors)
|
||||
# We should add more specific error handling here in the future
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason="unknown_error",
|
||||
stage=stage,
|
||||
state=RestoreBackupState.FAILED,
|
||||
)
|
||||
)
|
||||
else:
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason=None, stage=stage, state=RestoreBackupState.COMPLETED
|
||||
)
|
||||
)
|
||||
)
|
||||
on_progress(IdleEvent())
|
||||
unsub()
|
||||
|
||||
@@ -556,7 +635,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
|
||||
@callback
|
||||
def _async_listen_job_events(
|
||||
self, job_id: str, on_event: Callable[[Mapping[str, Any]], None]
|
||||
self, job_id: UUID, on_event: Callable[[Mapping[str, Any]], None]
|
||||
) -> Callable[[], None]:
|
||||
"""Listen for job events."""
|
||||
|
||||
@@ -571,7 +650,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
if (
|
||||
data.get("event") != "job"
|
||||
or not (event_data := data.get("data"))
|
||||
or event_data.get("uuid") != job_id
|
||||
or event_data.get("uuid") != job_id.hex
|
||||
):
|
||||
return
|
||||
on_event(event_data)
|
||||
@@ -582,10 +661,10 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
return unsub
|
||||
|
||||
async def _get_job_state(
|
||||
self, job_id: str, on_event: Callable[[Mapping[str, Any]], None]
|
||||
self, job_id: UUID, on_event: Callable[[Mapping[str, Any]], None]
|
||||
) -> None:
|
||||
"""Poll a job for its state."""
|
||||
job = await self._client.jobs.get_job(UUID(job_id))
|
||||
job = await self._client.jobs.get_job(job_id)
|
||||
_LOGGER.debug("Job state: %s", job)
|
||||
on_event(job.to_dict())
|
||||
|
||||
@@ -613,10 +692,20 @@ async def backup_addon_before_update(
|
||||
else:
|
||||
password = None
|
||||
|
||||
def addon_update_backup_filter(
|
||||
backups: dict[str, ManagerBackup],
|
||||
) -> dict[str, ManagerBackup]:
|
||||
"""Return addon update backups."""
|
||||
return {
|
||||
backup_id: backup
|
||||
for backup_id, backup in backups.items()
|
||||
if backup.extra_metadata.get(TAG_ADDON_UPDATE) == addon
|
||||
}
|
||||
|
||||
try:
|
||||
await backup_manager.async_create_backup(
|
||||
agent_ids=[await _default_agent(client)],
|
||||
extra_metadata={"supervisor.addon_update": addon},
|
||||
extra_metadata={TAG_ADDON_UPDATE: addon},
|
||||
include_addons=[addon],
|
||||
include_all_addons=False,
|
||||
include_database=False,
|
||||
@@ -627,6 +716,14 @@ async def backup_addon_before_update(
|
||||
)
|
||||
except BackupManagerError as err:
|
||||
raise HomeAssistantError(f"Error creating backup: {err}") from err
|
||||
else:
|
||||
try:
|
||||
await backup_manager.async_delete_filtered_backups(
|
||||
include_filter=addon_update_backup_filter,
|
||||
delete_filter=lambda backups: backups,
|
||||
)
|
||||
except BackupManagerError as err:
|
||||
raise HomeAssistantError(f"Error deleting old backups: {err}") from err
|
||||
|
||||
|
||||
async def backup_core_before_update(hass: HomeAssistant) -> None:
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/hassio",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["aiohasupervisor==0.2.2b6"],
|
||||
"requirements": ["aiohasupervisor==0.3.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -37,11 +37,24 @@ async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool
|
||||
for device in device_registry.devices.get_devices_for_config_entry_id(
|
||||
entry.entry_id
|
||||
):
|
||||
for domain, player_id in device.identifiers:
|
||||
if domain == DOMAIN and not isinstance(player_id, str):
|
||||
device_registry.async_update_device( # type: ignore[unreachable]
|
||||
device.id, new_identifiers={(DOMAIN, str(player_id))}
|
||||
for ident in device.identifiers:
|
||||
if ident[0] != DOMAIN or isinstance(ident[1], str):
|
||||
continue
|
||||
|
||||
player_id = int(ident[1]) # type: ignore[unreachable]
|
||||
|
||||
# Create set of identifiers excluding this integration
|
||||
identifiers = {ident for ident in device.identifiers if ident[0] != DOMAIN}
|
||||
migrated_identifiers = {(DOMAIN, str(player_id))}
|
||||
# Add migrated if not already present in another device, which occurs if the user downgraded and then upgraded
|
||||
if not device_registry.async_get_device(migrated_identifiers):
|
||||
identifiers.update(migrated_identifiers)
|
||||
if len(identifiers) > 0:
|
||||
device_registry.async_update_device(
|
||||
device.id, new_identifiers=identifiers
|
||||
)
|
||||
else:
|
||||
device_registry.async_remove_device(device.id)
|
||||
break
|
||||
|
||||
coordinator = HeosCoordinator(hass, entry)
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyheos"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["pyheos==1.0.1"],
|
||||
"requirements": ["pyheos==1.0.2"],
|
||||
"single_config_entry": true,
|
||||
"ssdp": [
|
||||
{
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.65", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.66", "babel==2.15.0"]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Constants for the homee integration."""
|
||||
|
||||
from homeassistant.const import (
|
||||
DEGREE,
|
||||
LIGHT_LUX,
|
||||
PERCENTAGE,
|
||||
REVOLUTIONS_PER_MINUTE,
|
||||
@@ -32,6 +33,7 @@ HOMEE_UNIT_TO_HA_UNIT = {
|
||||
"W": UnitOfPower.WATT,
|
||||
"m/s": UnitOfSpeed.METERS_PER_SECOND,
|
||||
"km/h": UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
"°": DEGREE,
|
||||
"°F": UnitOfTemperature.FAHRENHEIT,
|
||||
"°C": UnitOfTemperature.CELSIUS,
|
||||
"K": UnitOfTemperature.KELVIN,
|
||||
@@ -51,7 +53,7 @@ OPEN_CLOSE_MAP_REVERSED = {
|
||||
0.0: "closed",
|
||||
1.0: "open",
|
||||
2.0: "partial",
|
||||
3.0: "cosing",
|
||||
3.0: "closing",
|
||||
4.0: "opening",
|
||||
}
|
||||
WINDOW_MAP = {
|
||||
|
||||
@@ -78,6 +78,7 @@ from .const import (
|
||||
CONF_VIDEO_CODEC,
|
||||
CONF_VIDEO_MAP,
|
||||
CONF_VIDEO_PACKET_SIZE,
|
||||
CONF_VIDEO_PROFILE_NAMES,
|
||||
DEFAULT_AUDIO_CODEC,
|
||||
DEFAULT_AUDIO_MAP,
|
||||
DEFAULT_AUDIO_PACKET_SIZE,
|
||||
@@ -90,6 +91,7 @@ from .const import (
|
||||
DEFAULT_VIDEO_CODEC,
|
||||
DEFAULT_VIDEO_MAP,
|
||||
DEFAULT_VIDEO_PACKET_SIZE,
|
||||
DEFAULT_VIDEO_PROFILE_NAMES,
|
||||
DOMAIN,
|
||||
FEATURE_ON_OFF,
|
||||
FEATURE_PLAY_PAUSE,
|
||||
@@ -163,6 +165,9 @@ CAMERA_SCHEMA = BASIC_INFO_SCHEMA.extend(
|
||||
vol.Optional(CONF_VIDEO_CODEC, default=DEFAULT_VIDEO_CODEC): vol.In(
|
||||
VALID_VIDEO_CODECS
|
||||
),
|
||||
vol.Optional(CONF_VIDEO_PROFILE_NAMES, default=DEFAULT_VIDEO_PROFILE_NAMES): [
|
||||
cv.string
|
||||
],
|
||||
vol.Optional(
|
||||
CONF_AUDIO_PACKET_SIZE, default=DEFAULT_AUDIO_PACKET_SIZE
|
||||
): cv.positive_int,
|
||||
|
||||
@@ -25,7 +25,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeWizardConfigEntry) -
|
||||
|
||||
api: HomeWizardEnergy
|
||||
|
||||
if token := entry.data.get(CONF_TOKEN):
|
||||
is_battery = entry.unique_id.startswith("HWE-BAT") if entry.unique_id else False
|
||||
|
||||
if (token := entry.data.get(CONF_TOKEN)) and is_battery:
|
||||
api = HomeWizardEnergyV2(
|
||||
entry.data[CONF_IP_ADDRESS],
|
||||
token=token,
|
||||
@@ -37,7 +39,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeWizardConfigEntry) -
|
||||
clientsession=async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
await async_check_v2_support_and_create_issue(hass, entry)
|
||||
if is_battery:
|
||||
await async_check_v2_support_and_create_issue(hass, entry)
|
||||
|
||||
coordinator = HWEnergyDeviceUpdateCoordinator(hass, api)
|
||||
try:
|
||||
|
||||
@@ -272,9 +272,14 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the integration."""
|
||||
errors: dict[str, str] = {}
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input:
|
||||
try:
|
||||
device_info = await async_try_connect(user_input[CONF_IP_ADDRESS])
|
||||
device_info = await async_try_connect(
|
||||
user_input[CONF_IP_ADDRESS],
|
||||
token=reconfigure_entry.data.get(CONF_TOKEN),
|
||||
)
|
||||
|
||||
except RecoverableError as ex:
|
||||
LOGGER.error(ex)
|
||||
@@ -288,7 +293,6 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._get_reconfigure_entry(),
|
||||
data_updates=user_input,
|
||||
)
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=vol.Schema(
|
||||
@@ -306,7 +310,7 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
|
||||
async def async_try_connect(ip_address: str) -> Device:
|
||||
async def async_try_connect(ip_address: str, token: str | None = None) -> Device:
|
||||
"""Try to connect.
|
||||
|
||||
Make connection with device to test the connection
|
||||
@@ -317,7 +321,7 @@ async def async_try_connect(ip_address: str) -> Device:
|
||||
|
||||
# Determine if device is v1 or v2 capable
|
||||
if await has_v2_api(ip_address):
|
||||
energy_api = HomeWizardEnergyV2(ip_address)
|
||||
energy_api = HomeWizardEnergyV2(ip_address, token=token)
|
||||
else:
|
||||
energy_api = HomeWizardEnergyV1(ip_address)
|
||||
|
||||
|
||||
@@ -12,6 +12,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["homewizard_energy"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-homewizard-energy==v8.3.0"],
|
||||
"requirements": ["python-homewizard-energy==v8.3.2"],
|
||||
"zeroconf": ["_hwenergy._tcp.local.", "_homewizard._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/hydrawise",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pydrawise"],
|
||||
"requirements": ["pydrawise==2025.1.0"]
|
||||
"requirements": ["pydrawise==2025.2.0"]
|
||||
}
|
||||
|
||||
@@ -87,7 +87,7 @@ class IdasenDeskConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if discovery := self._discovery_info:
|
||||
self._discovered_devices[discovery.address] = discovery
|
||||
else:
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery in async_discovered_service_info(self.hass):
|
||||
if (
|
||||
discovery.address in current_addresses
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/imap",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioimaplib"],
|
||||
"requirements": ["aioimaplib==2.0.0"]
|
||||
"requirements": ["aioimaplib==2.0.1"]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"domain": "incomfort",
|
||||
"name": "Intergas InComfort/Intouch Lan2RF gateway",
|
||||
"name": "Intergas gateway",
|
||||
"codeowners": ["@jbouwh"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
|
||||
@@ -2,20 +2,20 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Set up new Intergas InComfort Lan2RF Gateway, some older systems might not need credentials to be set up. For newer devices authentication is required.",
|
||||
"description": "Set up new Intergas gateway, some older systems might not need credentials to be set up. For newer devices authentication is required.",
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "Hostname or IP-address of the Intergas InComfort Lan2RF Gateway.",
|
||||
"host": "Hostname or IP-address of the Intergas gateway.",
|
||||
"username": "The username to log into the gateway. This is `admin` in most cases.",
|
||||
"password": "The password to log into the gateway, is printed at the bottom of the Lan2RF Gateway or is `intergas` for some older devices."
|
||||
"password": "The password to log into the gateway, is printed at the bottom of the gateway or is `intergas` for some older devices."
|
||||
}
|
||||
},
|
||||
"dhcp_auth": {
|
||||
"title": "Set up Intergas InComfort Lan2RF Gateway",
|
||||
"title": "Set up Intergas gateway",
|
||||
"description": "Please enter authentication details for gateway {host}",
|
||||
"data": {
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
@@ -23,12 +23,12 @@
|
||||
},
|
||||
"data_description": {
|
||||
"username": "The username to log into the gateway. This is `admin` in most cases.",
|
||||
"password": "The password to log into the gateway, is printed at the bottom of the Lan2RF Gateway or is `intergas` for some older devices."
|
||||
"password": "The password to log into the gateway, is printed at the bottom of the Gateway or is `intergas` for some older devices."
|
||||
}
|
||||
},
|
||||
"dhcp_confirm": {
|
||||
"title": "Set up Intergas InComfort Lan2RF Gateway",
|
||||
"description": "Do you want to set up the discovered Intergas InComfort Lan2RF Gateway ({host})?"
|
||||
"title": "Set up Intergas gateway",
|
||||
"description": "Do you want to set up the discovered Intergas gateway ({host})?"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
@@ -48,9 +48,9 @@
|
||||
"error": {
|
||||
"auth_error": "Invalid credentials.",
|
||||
"no_heaters": "No heaters found.",
|
||||
"not_found": "No Lan2RF gateway found.",
|
||||
"timeout_error": "Time out when connecting to Lan2RF gateway.",
|
||||
"unknown": "Unknown error when connecting to Lan2RF gateway."
|
||||
"not_found": "No gateway found.",
|
||||
"timeout_error": "Time out when connecting to the gateway.",
|
||||
"unknown": "Unknown error when connecting to the gateway."
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
@@ -70,7 +70,7 @@
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Intergas InComfort Lan2RF Gateway options",
|
||||
"title": "Intergas gateway options",
|
||||
"data": {
|
||||
"legacy_setpoint_status": "Legacy setpoint handling"
|
||||
},
|
||||
|
||||
@@ -72,7 +72,7 @@ class INKBIRDConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["jellyfin_apiclient_python"],
|
||||
"requirements": ["jellyfin-apiclient-python==1.9.2"],
|
||||
"requirements": ["jellyfin-apiclient-python==1.10.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"requirements": [
|
||||
"xknx==3.5.0",
|
||||
"xknxproject==3.8.1",
|
||||
"knx-frontend==2025.1.28.225404"
|
||||
"knx-frontend==2025.1.30.194235"
|
||||
],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -10,8 +10,8 @@ from lacrosse_view import HTTPError, LaCrosse, Location, LoginError, Sensor
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import SCAN_INTERVAL
|
||||
|
||||
@@ -26,6 +26,7 @@ class LaCrosseUpdateCoordinator(DataUpdateCoordinator[list[Sensor]]):
|
||||
name: str
|
||||
id: str
|
||||
hass: HomeAssistant
|
||||
devices: list[Sensor] | None = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -60,24 +61,34 @@ class LaCrosseUpdateCoordinator(DataUpdateCoordinator[list[Sensor]]):
|
||||
except LoginError as error:
|
||||
raise ConfigEntryAuthFailed from error
|
||||
|
||||
if self.devices is None:
|
||||
_LOGGER.debug("Getting devices")
|
||||
try:
|
||||
self.devices = await self.api.get_devices(
|
||||
location=Location(id=self.id, name=self.name),
|
||||
)
|
||||
except HTTPError as error:
|
||||
raise UpdateFailed from error
|
||||
|
||||
try:
|
||||
# Fetch last hour of data
|
||||
sensors = await self.api.get_sensors(
|
||||
location=Location(id=self.id, name=self.name),
|
||||
tz=self.hass.config.time_zone,
|
||||
start=str(now - 3600),
|
||||
end=str(now),
|
||||
)
|
||||
except HTTPError as error:
|
||||
raise ConfigEntryNotReady from error
|
||||
for sensor in self.devices:
|
||||
sensor.data = (
|
||||
await self.api.get_sensor_status(
|
||||
sensor=sensor,
|
||||
tz=self.hass.config.time_zone,
|
||||
)
|
||||
)["data"]["current"]
|
||||
_LOGGER.debug("Got data: %s", sensor.data)
|
||||
|
||||
_LOGGER.debug("Got data: %s", sensors)
|
||||
except HTTPError as error:
|
||||
raise UpdateFailed from error
|
||||
|
||||
# Verify that we have permission to read the sensors
|
||||
for sensor in sensors:
|
||||
for sensor in self.devices:
|
||||
if not sensor.permissions.get("read", False):
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"This account does not have permission to read {sensor.name}"
|
||||
)
|
||||
|
||||
return sensors
|
||||
return self.devices
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/lacrosse_view",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["lacrosse_view"],
|
||||
"requirements": ["lacrosse-view==1.0.3"]
|
||||
"requirements": ["lacrosse-view==1.1.1"]
|
||||
}
|
||||
|
||||
@@ -45,10 +45,10 @@ class LaCrosseSensorEntityDescription(SensorEntityDescription):
|
||||
|
||||
def get_value(sensor: Sensor, field: str) -> float | int | str | None:
|
||||
"""Get the value of a sensor field."""
|
||||
field_data = sensor.data.get(field)
|
||||
field_data = sensor.data.get(field) if sensor.data is not None else None
|
||||
if field_data is None:
|
||||
return None
|
||||
value = field_data["values"][-1]["s"]
|
||||
value = field_data["spot"]["value"]
|
||||
try:
|
||||
value = float(value)
|
||||
except ValueError:
|
||||
@@ -178,7 +178,7 @@ async def async_setup_entry(
|
||||
continue
|
||||
|
||||
# if the API returns a different unit of measurement from the description, update it
|
||||
if sensor.data.get(field) is not None:
|
||||
if sensor.data is not None and sensor.data.get(field) is not None:
|
||||
native_unit_of_measurement = UNIT_OF_MEASUREMENT_MAP.get(
|
||||
sensor.data[field].get("unit")
|
||||
)
|
||||
@@ -240,7 +240,9 @@ class LaCrosseViewSensor(
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
data = self.coordinator.data[self.index].data
|
||||
return (
|
||||
super().available
|
||||
and self.entity_description.key in self.coordinator.data[self.index].data
|
||||
and data is not None
|
||||
and self.entity_description.key in data
|
||||
)
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/lcn",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pypck"],
|
||||
"requirements": ["pypck==0.8.3", "lcn-frontend==0.2.3"]
|
||||
"requirements": ["pypck==0.8.5", "lcn-frontend==0.2.3"]
|
||||
}
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ld2410_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bluetooth-data-tools==1.22.0", "ld2410-ble==0.1.1"]
|
||||
"requirements": ["bluetooth-data-tools==1.23.4", "ld2410-ble==0.1.1"]
|
||||
}
|
||||
|
||||
@@ -35,5 +35,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/led_ble",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["bluetooth-data-tools==1.22.0", "led-ble==1.1.4"]
|
||||
"requirements": ["bluetooth-data-tools==1.23.4", "led-ble==1.1.6"]
|
||||
}
|
||||
|
||||
@@ -113,7 +113,7 @@ def find_hsbk(hass: HomeAssistant, **kwargs: Any) -> list[float | int | None] |
|
||||
saturation = int(saturation / 100 * 65535)
|
||||
kelvin = 3500
|
||||
|
||||
if _ATTR_COLOR_TEMP in kwargs:
|
||||
if ATTR_COLOR_TEMP_KELVIN not in kwargs and _ATTR_COLOR_TEMP in kwargs:
|
||||
# added in 2025.1, can be removed in 2026.1
|
||||
_LOGGER.warning(
|
||||
"The 'color_temp' parameter is deprecated. Please use 'color_temp_kelvin' for"
|
||||
|
||||
@@ -277,20 +277,6 @@ FOUR_GROUP_REMOTE_TRIGGER_SCHEMA = LUTRON_BUTTON_TRIGGER_SCHEMA.extend(
|
||||
}
|
||||
)
|
||||
|
||||
PADDLE_SWITCH_PICO_BUTTON_TYPES_TO_LIP = {
|
||||
"button_0": 2,
|
||||
"button_2": 4,
|
||||
}
|
||||
PADDLE_SWITCH_PICO_BUTTON_TYPES_TO_LEAP = {
|
||||
"button_0": 0,
|
||||
"button_2": 2,
|
||||
}
|
||||
PADDLE_SWITCH_PICO_TRIGGER_SCHEMA = LUTRON_BUTTON_TRIGGER_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_SUBTYPE): vol.In(PADDLE_SWITCH_PICO_BUTTON_TYPES_TO_LIP),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
DEVICE_TYPE_SCHEMA_MAP = {
|
||||
"Pico2Button": PICO_2_BUTTON_TRIGGER_SCHEMA,
|
||||
@@ -302,7 +288,6 @@ DEVICE_TYPE_SCHEMA_MAP = {
|
||||
"Pico4ButtonZone": PICO_4_BUTTON_ZONE_TRIGGER_SCHEMA,
|
||||
"Pico4Button2Group": PICO_4_BUTTON_2_GROUP_TRIGGER_SCHEMA,
|
||||
"FourGroupRemote": FOUR_GROUP_REMOTE_TRIGGER_SCHEMA,
|
||||
"PaddleSwitchPico": PADDLE_SWITCH_PICO_TRIGGER_SCHEMA,
|
||||
}
|
||||
|
||||
DEVICE_TYPE_SUBTYPE_MAP_TO_LIP = {
|
||||
@@ -315,7 +300,6 @@ DEVICE_TYPE_SUBTYPE_MAP_TO_LIP = {
|
||||
"Pico4ButtonZone": PICO_4_BUTTON_ZONE_BUTTON_TYPES_TO_LIP,
|
||||
"Pico4Button2Group": PICO_4_BUTTON_2_GROUP_BUTTON_TYPES_TO_LIP,
|
||||
"FourGroupRemote": FOUR_GROUP_REMOTE_BUTTON_TYPES_TO_LIP,
|
||||
"PaddleSwitchPico": PADDLE_SWITCH_PICO_BUTTON_TYPES_TO_LIP,
|
||||
}
|
||||
|
||||
DEVICE_TYPE_SUBTYPE_MAP_TO_LEAP = {
|
||||
@@ -328,7 +312,6 @@ DEVICE_TYPE_SUBTYPE_MAP_TO_LEAP = {
|
||||
"Pico4ButtonZone": PICO_4_BUTTON_ZONE_BUTTON_TYPES_TO_LEAP,
|
||||
"Pico4Button2Group": PICO_4_BUTTON_2_GROUP_BUTTON_TYPES_TO_LEAP,
|
||||
"FourGroupRemote": FOUR_GROUP_REMOTE_BUTTON_TYPES_TO_LEAP,
|
||||
"PaddleSwitchPico": PADDLE_SWITCH_PICO_BUTTON_TYPES_TO_LEAP,
|
||||
}
|
||||
|
||||
LEAP_TO_DEVICE_TYPE_SUBTYPE_MAP: dict[str, dict[int, str]] = {
|
||||
@@ -343,7 +326,6 @@ TRIGGER_SCHEMA = vol.Any(
|
||||
PICO_4_BUTTON_ZONE_TRIGGER_SCHEMA,
|
||||
PICO_4_BUTTON_2_GROUP_TRIGGER_SCHEMA,
|
||||
FOUR_GROUP_REMOTE_TRIGGER_SCHEMA,
|
||||
PADDLE_SWITCH_PICO_TRIGGER_SCHEMA,
|
||||
)
|
||||
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user