forked from home-assistant/core
Compare commits
121 Commits
energy_sen
...
2025.2.5
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cc792403ab | ||
|
|
3d2ab3b59e | ||
|
|
ba1650bd05 | ||
|
|
df5f6fc1e6 | ||
|
|
0dbdb42947 | ||
|
|
325022ec77 | ||
|
|
3ea1d2823e | ||
|
|
83d9c000d3 | ||
|
|
266612e4d9 | ||
|
|
dc7cba60bd | ||
|
|
d752a3a24c | ||
|
|
8c3ee80203 | ||
|
|
94555f533b | ||
|
|
6da33a8883 | ||
|
|
d42e31b5e7 | ||
|
|
441917706b | ||
|
|
12e530dc75 | ||
|
|
59651c6f10 | ||
|
|
ac21d2855c | ||
|
|
6070feea73 | ||
|
|
167881e434 | ||
|
|
35bcf82627 | ||
|
|
66bb501621 | ||
|
|
179ba8309d | ||
|
|
2b7543aca2 | ||
|
|
1e49e04491 | ||
|
|
e60b6482ab | ||
|
|
7b82781f4c | ||
|
|
b40daf0152 | ||
|
|
417ac56bd6 | ||
|
|
c9a0814142 | ||
|
|
2d8a619b54 | ||
|
|
759cc3303a | ||
|
|
5328429b08 | ||
|
|
21b98a76cc | ||
|
|
95f632a13a | ||
|
|
33d4d1f8e5 | ||
|
|
72878c18d0 | ||
|
|
ccd220ad0f | ||
|
|
f191f6ae22 | ||
|
|
28a18e538d | ||
|
|
c2f6255d16 | ||
|
|
e5fd08ae76 | ||
|
|
4b5633d9d8 | ||
|
|
a9c6a06704 | ||
|
|
0faa8efd5a | ||
|
|
5a257b090e | ||
|
|
41fb6a537f | ||
|
|
b166c32eb8 | ||
|
|
288acfb511 | ||
|
|
2cb9682303 | ||
|
|
7e52170789 | ||
|
|
979b3d4269 | ||
|
|
9772014bce | ||
|
|
f8763c49ef | ||
|
|
b4ef00659c | ||
|
|
df49c53bb6 | ||
|
|
8dfe483b38 | ||
|
|
b45d7cbbc3 | ||
|
|
239ba9b1cc | ||
|
|
2d5a75d4f2 | ||
|
|
e1ad3f05e6 | ||
|
|
b9280edbfa | ||
|
|
010993fc5f | ||
|
|
713931661e | ||
|
|
af06521f66 | ||
|
|
c32f57f85a | ||
|
|
171061a778 | ||
|
|
476ea35bdb | ||
|
|
00e6866664 | ||
|
|
201bf95ab8 | ||
|
|
ff22bbd0e4 | ||
|
|
fd8d4e937c | ||
|
|
7903348d79 | ||
|
|
090dbba06e | ||
|
|
af77e69eb0 | ||
|
|
23e7638687 | ||
|
|
36b722960a | ||
|
|
3dd241a398 | ||
|
|
b5a9c3d1f6 | ||
|
|
eca714a45a | ||
|
|
8049699efb | ||
|
|
7c6afd50dc | ||
|
|
42d8889778 | ||
|
|
a4c0304e1f | ||
|
|
c63e688ba8 | ||
|
|
16298b4195 | ||
|
|
da23eb22db | ||
|
|
4bd1d0199b | ||
|
|
efe7050030 | ||
|
|
79ff85f517 | ||
|
|
73ad4caf94 | ||
|
|
e3d649d349 | ||
|
|
657e3488ba | ||
|
|
7508c14a53 | ||
|
|
ac84970da8 | ||
|
|
30073f3493 | ||
|
|
3abd7b8ba3 | ||
|
|
62bc6e4bf6 | ||
|
|
5faa189fef | ||
|
|
e09ae1c83d | ||
|
|
7b20299de7 | ||
|
|
81e501aba1 | ||
|
|
568ac22ce8 | ||
|
|
c71ab054f1 | ||
|
|
bea201f9f6 | ||
|
|
dda90bc04c | ||
|
|
a033e4c88d | ||
|
|
42b6f83e7c | ||
|
|
cb937bc115 | ||
|
|
bec569caf9 | ||
|
|
3390fb32a8 | ||
|
|
3ebb58f780 | ||
|
|
30b131d3b9 | ||
|
|
cd40232beb | ||
|
|
f27fe365c5 | ||
|
|
1c769418fb | ||
|
|
db7c2dab52 | ||
|
|
627377872b | ||
|
|
8504162539 | ||
|
|
67c6a1d436 |
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airgradient",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["airgradient==0.9.1"],
|
||||
"requirements": ["airgradient==0.9.2"],
|
||||
"zeroconf": ["_airgradient._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -19,10 +19,20 @@ class ApSystemsEntity(Entity):
|
||||
data: ApSystemsData,
|
||||
) -> None:
|
||||
"""Initialize the APsystems entity."""
|
||||
|
||||
# Handle device version safely
|
||||
sw_version = None
|
||||
if data.coordinator.device_version:
|
||||
version_parts = data.coordinator.device_version.split(" ")
|
||||
if len(version_parts) > 1:
|
||||
sw_version = version_parts[1]
|
||||
else:
|
||||
sw_version = version_parts[0]
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, data.device_id)},
|
||||
manufacturer="APsystems",
|
||||
model="EZ1-M",
|
||||
serial_number=data.device_id,
|
||||
sw_version=data.coordinator.device_version.split(" ")[1],
|
||||
sw_version=sw_version,
|
||||
)
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""Assist Satellite intents."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -7,6 +9,8 @@ from homeassistant.helpers import entity_registry as er, intent
|
||||
|
||||
from .const import DOMAIN, AssistSatelliteEntityFeature
|
||||
|
||||
EXCLUDED_DOMAINS: Final[set[str]] = {"voip"}
|
||||
|
||||
|
||||
async def async_setup_intents(hass: HomeAssistant) -> None:
|
||||
"""Set up the intents."""
|
||||
@@ -30,19 +34,36 @@ class BroadcastIntentHandler(intent.IntentHandler):
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
# Find all assist satellite entities that are not the one invoking the intent
|
||||
entities = {
|
||||
entity: entry
|
||||
for entity in hass.states.async_entity_ids(DOMAIN)
|
||||
if (entry := ent_reg.async_get(entity))
|
||||
and entry.supported_features & AssistSatelliteEntityFeature.ANNOUNCE
|
||||
}
|
||||
entities: dict[str, er.RegistryEntry] = {}
|
||||
for entity in hass.states.async_entity_ids(DOMAIN):
|
||||
entry = ent_reg.async_get(entity)
|
||||
if (
|
||||
(entry is None)
|
||||
or (
|
||||
# Supports announce
|
||||
not (
|
||||
entry.supported_features & AssistSatelliteEntityFeature.ANNOUNCE
|
||||
)
|
||||
)
|
||||
# Not the invoking device
|
||||
or (intent_obj.device_id and (entry.device_id == intent_obj.device_id))
|
||||
):
|
||||
# Skip satellite
|
||||
continue
|
||||
|
||||
if intent_obj.device_id:
|
||||
entities = {
|
||||
entity: entry
|
||||
for entity, entry in entities.items()
|
||||
if entry.device_id != intent_obj.device_id
|
||||
}
|
||||
# Check domain of config entry against excluded domains
|
||||
if (
|
||||
entry.config_entry_id
|
||||
and (
|
||||
config_entry := hass.config_entries.async_get_entry(
|
||||
entry.config_entry_id
|
||||
)
|
||||
)
|
||||
and (config_entry.domain in EXCLUDED_DOMAINS)
|
||||
):
|
||||
continue
|
||||
|
||||
entities[entity] = entry
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
@@ -54,7 +75,6 @@ class BroadcastIntentHandler(intent.IntentHandler):
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.async_set_speech("Done")
|
||||
response.response_type = intent.IntentResponseType.ACTION_DONE
|
||||
response.async_set_results(
|
||||
success_results=[
|
||||
|
||||
@@ -16,6 +16,7 @@ from .agent import (
|
||||
BackupAgentPlatformProtocol,
|
||||
LocalBackupAgent,
|
||||
)
|
||||
from .config import BackupConfig, CreateBackupParametersDict
|
||||
from .const import DATA_MANAGER, DOMAIN
|
||||
from .http import async_register_http_views
|
||||
from .manager import (
|
||||
@@ -47,12 +48,14 @@ __all__ = [
|
||||
"BackupAgent",
|
||||
"BackupAgentError",
|
||||
"BackupAgentPlatformProtocol",
|
||||
"BackupConfig",
|
||||
"BackupManagerError",
|
||||
"BackupNotFound",
|
||||
"BackupPlatformProtocol",
|
||||
"BackupReaderWriter",
|
||||
"BackupReaderWriterError",
|
||||
"CreateBackupEvent",
|
||||
"CreateBackupParametersDict",
|
||||
"CreateBackupStage",
|
||||
"CreateBackupState",
|
||||
"Folder",
|
||||
|
||||
@@ -154,7 +154,8 @@ class BackupConfig:
|
||||
self.data.retention.apply(self._manager)
|
||||
self.data.schedule.apply(self._manager)
|
||||
|
||||
async def update(
|
||||
@callback
|
||||
def update(
|
||||
self,
|
||||
*,
|
||||
agents: dict[str, AgentParametersDict] | UndefinedType = UNDEFINED,
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import asyncio
|
||||
from collections import defaultdict
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from dataclasses import dataclass, replace
|
||||
from enum import StrEnum
|
||||
@@ -42,7 +43,11 @@ from .agent import (
|
||||
BackupAgentPlatformProtocol,
|
||||
LocalBackupAgent,
|
||||
)
|
||||
from .config import BackupConfig, delete_backups_exceeding_configured_count
|
||||
from .config import (
|
||||
BackupConfig,
|
||||
CreateBackupParametersDict,
|
||||
delete_backups_exceeding_configured_count,
|
||||
)
|
||||
from .const import (
|
||||
BUF_SIZE,
|
||||
DATA_MANAGER,
|
||||
@@ -281,6 +286,10 @@ class BackupReaderWriter(abc.ABC):
|
||||
) -> None:
|
||||
"""Get restore events after core restart."""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_validate_config(self, *, config: BackupConfig) -> None:
|
||||
"""Validate backup config."""
|
||||
|
||||
|
||||
class IncorrectPasswordError(BackupReaderWriterError):
|
||||
"""Raised when the password is incorrect."""
|
||||
@@ -332,6 +341,7 @@ class BackupManager:
|
||||
self.config.load(stored["config"])
|
||||
self.known_backups.load(stored["backups"])
|
||||
|
||||
await self._reader_writer.async_validate_config(config=self.config)
|
||||
await self._reader_writer.async_resume_restore_progress_after_restart(
|
||||
on_progress=self.async_on_backup_event
|
||||
)
|
||||
@@ -560,8 +570,15 @@ class BackupManager:
|
||||
return_exceptions=True,
|
||||
)
|
||||
for idx, result in enumerate(list_backups_results):
|
||||
agent_id = agent_ids[idx]
|
||||
if isinstance(result, BackupAgentError):
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
agent_errors[agent_id] = result
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
agent_errors[agent_id] = result
|
||||
LOGGER.error(
|
||||
"Unexpected error for %s: %s", agent_id, result, exc_info=result
|
||||
)
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result # unexpected error
|
||||
@@ -588,7 +605,7 @@ class BackupManager:
|
||||
name=agent_backup.name,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
backups[backup_id].agents[agent_ids[idx]] = AgentBackupStatus(
|
||||
backups[backup_id].agents[agent_id] = AgentBackupStatus(
|
||||
protected=agent_backup.protected,
|
||||
size=agent_backup.size,
|
||||
)
|
||||
@@ -611,8 +628,15 @@ class BackupManager:
|
||||
return_exceptions=True,
|
||||
)
|
||||
for idx, result in enumerate(get_backup_results):
|
||||
agent_id = agent_ids[idx]
|
||||
if isinstance(result, BackupAgentError):
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
agent_errors[agent_id] = result
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
agent_errors[agent_id] = result
|
||||
LOGGER.error(
|
||||
"Unexpected error for %s: %s", agent_id, result, exc_info=result
|
||||
)
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result # unexpected error
|
||||
@@ -640,7 +664,7 @@ class BackupManager:
|
||||
name=result.name,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
backup.agents[agent_ids[idx]] = AgentBackupStatus(
|
||||
backup.agents[agent_id] = AgentBackupStatus(
|
||||
protected=result.protected,
|
||||
size=result.size,
|
||||
)
|
||||
@@ -663,21 +687,31 @@ class BackupManager:
|
||||
return None
|
||||
return with_automatic_settings
|
||||
|
||||
async def async_delete_backup(self, backup_id: str) -> dict[str, Exception]:
|
||||
async def async_delete_backup(
|
||||
self, backup_id: str, *, agent_ids: list[str] | None = None
|
||||
) -> dict[str, Exception]:
|
||||
"""Delete a backup."""
|
||||
agent_errors: dict[str, Exception] = {}
|
||||
agent_ids = list(self.backup_agents)
|
||||
if agent_ids is None:
|
||||
agent_ids = list(self.backup_agents)
|
||||
|
||||
delete_backup_results = await asyncio.gather(
|
||||
*(
|
||||
agent.async_delete_backup(backup_id)
|
||||
for agent in self.backup_agents.values()
|
||||
self.backup_agents[agent_id].async_delete_backup(backup_id)
|
||||
for agent_id in agent_ids
|
||||
),
|
||||
return_exceptions=True,
|
||||
)
|
||||
for idx, result in enumerate(delete_backup_results):
|
||||
agent_id = agent_ids[idx]
|
||||
if isinstance(result, BackupAgentError):
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
agent_errors[agent_id] = result
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
agent_errors[agent_id] = result
|
||||
LOGGER.error(
|
||||
"Unexpected error for %s: %s", agent_id, result, exc_info=result
|
||||
)
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result # unexpected error
|
||||
@@ -710,35 +744,71 @@ class BackupManager:
|
||||
# Run the include filter first to ensure we only consider backups that
|
||||
# should be included in the deletion process.
|
||||
backups = include_filter(backups)
|
||||
backups_by_agent: dict[str, dict[str, ManagerBackup]] = defaultdict(dict)
|
||||
for backup_id, backup in backups.items():
|
||||
for agent_id in backup.agents:
|
||||
backups_by_agent[agent_id][backup_id] = backup
|
||||
|
||||
LOGGER.debug("Total automatic backups: %s", backups)
|
||||
LOGGER.debug("Backups returned by include filter: %s", backups)
|
||||
LOGGER.debug(
|
||||
"Backups returned by include filter by agent: %s",
|
||||
{agent_id: list(backups) for agent_id, backups in backups_by_agent.items()},
|
||||
)
|
||||
|
||||
backups_to_delete = delete_filter(backups)
|
||||
|
||||
LOGGER.debug("Backups returned by delete filter: %s", backups_to_delete)
|
||||
|
||||
if not backups_to_delete:
|
||||
return
|
||||
|
||||
# always delete oldest backup first
|
||||
backups_to_delete = dict(
|
||||
sorted(
|
||||
backups_to_delete.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)
|
||||
backups_to_delete_by_agent: dict[str, dict[str, ManagerBackup]] = defaultdict(
|
||||
dict
|
||||
)
|
||||
for backup_id, backup in sorted(
|
||||
backups_to_delete.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
):
|
||||
for agent_id in backup.agents:
|
||||
backups_to_delete_by_agent[agent_id][backup_id] = backup
|
||||
LOGGER.debug(
|
||||
"Backups returned by delete filter by agent: %s",
|
||||
{
|
||||
agent_id: list(backups)
|
||||
for agent_id, backups in backups_to_delete_by_agent.items()
|
||||
},
|
||||
)
|
||||
for agent_id, to_delete_from_agent in backups_to_delete_by_agent.items():
|
||||
if len(to_delete_from_agent) >= len(backups_by_agent[agent_id]):
|
||||
# Never delete the last backup.
|
||||
last_backup = to_delete_from_agent.popitem()
|
||||
LOGGER.debug(
|
||||
"Keeping the last backup %s for agent %s", last_backup, agent_id
|
||||
)
|
||||
|
||||
LOGGER.debug(
|
||||
"Backups to delete by agent: %s",
|
||||
{
|
||||
agent_id: list(backups)
|
||||
for agent_id, backups in backups_to_delete_by_agent.items()
|
||||
},
|
||||
)
|
||||
|
||||
if len(backups_to_delete) >= len(backups):
|
||||
# Never delete the last backup.
|
||||
last_backup = backups_to_delete.popitem()
|
||||
LOGGER.debug("Keeping the last backup: %s", last_backup)
|
||||
backup_ids_to_delete: dict[str, set[str]] = defaultdict(set)
|
||||
for agent_id, to_delete in backups_to_delete_by_agent.items():
|
||||
for backup_id in to_delete:
|
||||
backup_ids_to_delete[backup_id].add(agent_id)
|
||||
|
||||
LOGGER.debug("Backups to delete: %s", backups_to_delete)
|
||||
|
||||
if not backups_to_delete:
|
||||
if not backup_ids_to_delete:
|
||||
return
|
||||
|
||||
backup_ids = list(backups_to_delete)
|
||||
backup_ids = list(backup_ids_to_delete)
|
||||
delete_results = await asyncio.gather(
|
||||
*(self.async_delete_backup(backup_id) for backup_id in backups_to_delete)
|
||||
*(
|
||||
self.async_delete_backup(backup_id, agent_ids=list(agent_ids))
|
||||
for backup_id, agent_ids in backup_ids_to_delete.items()
|
||||
)
|
||||
)
|
||||
agent_errors = {
|
||||
backup_id: error
|
||||
@@ -1771,6 +1841,44 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
)
|
||||
on_progress(IdleEvent())
|
||||
|
||||
async def async_validate_config(self, *, config: BackupConfig) -> None:
|
||||
"""Validate backup config.
|
||||
|
||||
Update automatic backup settings to not include addons or folders and remove
|
||||
hassio agents in case a backup created by supervisor was restored.
|
||||
"""
|
||||
create_backup = config.data.create_backup
|
||||
if (
|
||||
not create_backup.include_addons
|
||||
and not create_backup.include_all_addons
|
||||
and not create_backup.include_folders
|
||||
and not any(a_id.startswith("hassio.") for a_id in create_backup.agent_ids)
|
||||
):
|
||||
LOGGER.debug("Backup settings don't need to be adjusted")
|
||||
return
|
||||
|
||||
LOGGER.info(
|
||||
"Adjusting backup settings to not include addons, folders or supervisor locations"
|
||||
)
|
||||
automatic_agents = [
|
||||
agent_id
|
||||
for agent_id in create_backup.agent_ids
|
||||
if not agent_id.startswith("hassio.")
|
||||
]
|
||||
if (
|
||||
self._local_agent_id not in automatic_agents
|
||||
and "hassio.local" in create_backup.agent_ids
|
||||
):
|
||||
automatic_agents = [self._local_agent_id, *automatic_agents]
|
||||
config.update(
|
||||
create_backup=CreateBackupParametersDict(
|
||||
agent_ids=automatic_agents,
|
||||
include_addons=None,
|
||||
include_all_addons=False,
|
||||
include_folders=None,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _generate_backup_id(date: str, name: str) -> str:
|
||||
"""Generate a backup ID."""
|
||||
|
||||
@@ -16,7 +16,7 @@ if TYPE_CHECKING:
|
||||
STORE_DELAY_SAVE = 30
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_VERSION_MINOR = 3
|
||||
STORAGE_VERSION_MINOR = 4
|
||||
|
||||
|
||||
class StoredBackupData(TypedDict):
|
||||
@@ -60,6 +60,13 @@ class _BackupStore(Store[StoredBackupData]):
|
||||
else:
|
||||
data["config"]["schedule"]["days"] = [state]
|
||||
data["config"]["schedule"]["recurrence"] = "custom_days"
|
||||
if old_minor_version < 4:
|
||||
# Workaround for a bug in frontend which incorrectly set days to 0
|
||||
# instead of to None for unlimited retention.
|
||||
if data["config"]["retention"]["copies"] == 0:
|
||||
data["config"]["retention"]["copies"] = None
|
||||
if data["config"]["retention"]["days"] == 0:
|
||||
data["config"]["retention"]["days"] = None
|
||||
|
||||
# Note: We allow reading data with major version 2.
|
||||
# Reject if major version is higher than 2.
|
||||
|
||||
@@ -104,12 +104,15 @@ def read_backup(backup_path: Path) -> AgentBackup:
|
||||
bool, homeassistant.get("exclude_database", False)
|
||||
)
|
||||
|
||||
extra_metadata = cast(dict[str, bool | str], data.get("extra", {}))
|
||||
date = extra_metadata.get("supervisor.backup_request_date", data["date"])
|
||||
|
||||
return AgentBackup(
|
||||
addons=addons,
|
||||
backup_id=cast(str, data["slug"]),
|
||||
database_included=database_included,
|
||||
date=cast(str, data["date"]),
|
||||
extra_metadata=cast(dict[str, bool | str], data.get("extra", {})),
|
||||
date=cast(str, date),
|
||||
extra_metadata=extra_metadata,
|
||||
folders=folders,
|
||||
homeassistant_included=homeassistant_included,
|
||||
homeassistant_version=homeassistant_version,
|
||||
|
||||
@@ -346,6 +346,7 @@ async def handle_config_info(
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
@@ -368,8 +369,10 @@ async def handle_config_info(
|
||||
),
|
||||
vol.Optional("retention"): vol.Schema(
|
||||
{
|
||||
vol.Optional("copies"): vol.Any(int, None),
|
||||
vol.Optional("days"): vol.Any(int, None),
|
||||
# Note: We can't use cv.positive_int because it allows 0 even
|
||||
# though 0 is not positive.
|
||||
vol.Optional("copies"): vol.Any(vol.All(int, vol.Range(min=1)), None),
|
||||
vol.Optional("days"): vol.Any(vol.All(int, vol.Range(min=1)), None),
|
||||
},
|
||||
),
|
||||
vol.Optional("schedule"): vol.Schema(
|
||||
@@ -385,8 +388,7 @@ async def handle_config_info(
|
||||
),
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_config_update(
|
||||
def handle_config_update(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
@@ -396,7 +398,7 @@ async def handle_config_update(
|
||||
changes = dict(msg)
|
||||
changes.pop("id")
|
||||
changes.pop("type")
|
||||
await manager.config.update(**changes)
|
||||
manager.config.update(**changes)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
|
||||
@@ -411,7 +411,7 @@ def ble_device_matches(
|
||||
) and service_data_uuid not in service_info.service_data:
|
||||
return False
|
||||
|
||||
if manufacturer_id := matcher.get(MANUFACTURER_ID):
|
||||
if (manufacturer_id := matcher.get(MANUFACTURER_ID)) is not None:
|
||||
if manufacturer_id not in service_info.manufacturer_data:
|
||||
return False
|
||||
|
||||
|
||||
@@ -3,21 +3,20 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import base64
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
|
||||
import hashlib
|
||||
import logging
|
||||
import random
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientError, ClientTimeout
|
||||
from aiohttp import ClientError
|
||||
from hass_nabucasa import Cloud, CloudError
|
||||
from hass_nabucasa.api import CloudApiNonRetryableError
|
||||
from hass_nabucasa.cloud_api import (
|
||||
FilesHandlerListEntry,
|
||||
async_files_delete_file,
|
||||
async_files_download_details,
|
||||
async_files_list,
|
||||
async_files_upload_details,
|
||||
)
|
||||
from hass_nabucasa.files import FilesError, StorageType, calculate_b64md5
|
||||
|
||||
from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -28,20 +27,11 @@ from .client import CloudClient
|
||||
from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_STORAGE_BACKUP = "backup"
|
||||
_RETRY_LIMIT = 5
|
||||
_RETRY_SECONDS_MIN = 60
|
||||
_RETRY_SECONDS_MAX = 600
|
||||
|
||||
|
||||
async def _b64md5(stream: AsyncIterator[bytes]) -> str:
|
||||
"""Calculate the MD5 hash of a file."""
|
||||
file_hash = hashlib.md5()
|
||||
async for chunk in stream:
|
||||
file_hash.update(chunk)
|
||||
return base64.b64encode(file_hash.digest()).decode()
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
hass: HomeAssistant,
|
||||
**kwargs: Any,
|
||||
@@ -90,11 +80,6 @@ class CloudBackupAgent(BackupAgent):
|
||||
self._cloud = cloud
|
||||
self._hass = hass
|
||||
|
||||
@callback
|
||||
def _get_backup_filename(self) -> str:
|
||||
"""Return the backup filename."""
|
||||
return f"{self._cloud.client.prefs.instance_id}.tar"
|
||||
|
||||
async def async_download_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
@@ -105,67 +90,18 @@ class CloudBackupAgent(BackupAgent):
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
:return: An async iterator that yields bytes.
|
||||
"""
|
||||
if not await self.async_get_backup(backup_id):
|
||||
if not (backup := await self._async_get_backup(backup_id)):
|
||||
raise BackupAgentError("Backup not found")
|
||||
|
||||
try:
|
||||
details = await async_files_download_details(
|
||||
self._cloud,
|
||||
storage_type=_STORAGE_BACKUP,
|
||||
filename=self._get_backup_filename(),
|
||||
content = await self._cloud.files.download(
|
||||
storage_type=StorageType.BACKUP,
|
||||
filename=backup["Key"],
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to get download details") from err
|
||||
except CloudError as err:
|
||||
raise BackupAgentError(f"Failed to download backup: {err}") from err
|
||||
|
||||
try:
|
||||
resp = await self._cloud.websession.get(
|
||||
details["url"],
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
|
||||
resp.raise_for_status()
|
||||
except ClientError as err:
|
||||
raise BackupAgentError("Failed to download backup") from err
|
||||
|
||||
return ChunkAsyncStreamIterator(resp.content)
|
||||
|
||||
async def _async_do_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
filename: str,
|
||||
base64md5hash: str,
|
||||
metadata: dict[str, Any],
|
||||
size: int,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
try:
|
||||
details = await async_files_upload_details(
|
||||
self._cloud,
|
||||
storage_type=_STORAGE_BACKUP,
|
||||
filename=filename,
|
||||
metadata=metadata,
|
||||
size=size,
|
||||
base64md5hash=base64md5hash,
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to get upload details") from err
|
||||
|
||||
try:
|
||||
upload_status = await self._cloud.websession.put(
|
||||
details["url"],
|
||||
data=await open_stream(),
|
||||
headers=details["headers"] | {"content-length": str(size)},
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
_LOGGER.log(
|
||||
logging.DEBUG if upload_status.status < 400 else logging.WARNING,
|
||||
"Backup upload status: %s",
|
||||
upload_status.status,
|
||||
)
|
||||
upload_status.raise_for_status()
|
||||
except (TimeoutError, ClientError) as err:
|
||||
raise BackupAgentError("Failed to upload backup") from err
|
||||
return ChunkAsyncStreamIterator(content)
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
@@ -182,15 +118,19 @@ class CloudBackupAgent(BackupAgent):
|
||||
if not backup.protected:
|
||||
raise BackupAgentError("Cloud backups must be protected")
|
||||
|
||||
base64md5hash = await _b64md5(await open_stream())
|
||||
filename = self._get_backup_filename()
|
||||
metadata = backup.as_dict()
|
||||
size = backup.size
|
||||
try:
|
||||
base64md5hash = await calculate_b64md5(open_stream, size)
|
||||
except FilesError as err:
|
||||
raise BackupAgentError(err) from err
|
||||
filename = f"{self._cloud.client.prefs.instance_id}.tar"
|
||||
metadata = backup.as_dict()
|
||||
|
||||
tries = 1
|
||||
while tries <= _RETRY_LIMIT:
|
||||
try:
|
||||
await self._async_do_upload_backup(
|
||||
await self._cloud.files.upload(
|
||||
storage_type=StorageType.BACKUP,
|
||||
open_stream=open_stream,
|
||||
filename=filename,
|
||||
base64md5hash=base64md5hash,
|
||||
@@ -198,9 +138,19 @@ class CloudBackupAgent(BackupAgent):
|
||||
size=size,
|
||||
)
|
||||
break
|
||||
except BackupAgentError as err:
|
||||
except CloudApiNonRetryableError as err:
|
||||
if err.code == "NC-SH-FH-03":
|
||||
raise BackupAgentError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="backup_size_too_large",
|
||||
translation_placeholders={
|
||||
"size": str(round(size / (1024**3), 2))
|
||||
},
|
||||
) from err
|
||||
raise BackupAgentError(f"Failed to upload backup {err}") from err
|
||||
except CloudError as err:
|
||||
if tries == _RETRY_LIMIT:
|
||||
raise
|
||||
raise BackupAgentError(f"Failed to upload backup {err}") from err
|
||||
tries += 1
|
||||
retry_timer = random.randint(_RETRY_SECONDS_MIN, _RETRY_SECONDS_MAX)
|
||||
_LOGGER.info(
|
||||
@@ -221,27 +171,34 @@ class CloudBackupAgent(BackupAgent):
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
"""
|
||||
if not await self.async_get_backup(backup_id):
|
||||
if not (backup := await self._async_get_backup(backup_id)):
|
||||
return
|
||||
|
||||
try:
|
||||
await async_files_delete_file(
|
||||
self._cloud,
|
||||
storage_type=_STORAGE_BACKUP,
|
||||
filename=self._get_backup_filename(),
|
||||
storage_type=StorageType.BACKUP,
|
||||
filename=backup["Key"],
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to delete backup") from err
|
||||
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
backups = await self._async_list_backups()
|
||||
return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups]
|
||||
|
||||
async def _async_list_backups(self) -> list[FilesHandlerListEntry]:
|
||||
"""List backups."""
|
||||
try:
|
||||
backups = await async_files_list(self._cloud, storage_type=_STORAGE_BACKUP)
|
||||
_LOGGER.debug("Cloud backups: %s", backups)
|
||||
backups = await async_files_list(
|
||||
self._cloud, storage_type=StorageType.BACKUP
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to list backups") from err
|
||||
|
||||
return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups]
|
||||
_LOGGER.debug("Cloud backups: %s", backups)
|
||||
return backups
|
||||
|
||||
async def async_get_backup(
|
||||
self,
|
||||
@@ -249,10 +206,19 @@ class CloudBackupAgent(BackupAgent):
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
backups = await self.async_list_backups()
|
||||
if not (backup := await self._async_get_backup(backup_id)):
|
||||
return None
|
||||
return AgentBackup.from_dict(backup["Metadata"])
|
||||
|
||||
async def _async_get_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
) -> FilesHandlerListEntry | None:
|
||||
"""Return a backup."""
|
||||
backups = await self._async_list_backups()
|
||||
|
||||
for backup in backups:
|
||||
if backup.backup_id == backup_id:
|
||||
if backup["Metadata"]["backup_id"] == backup_id:
|
||||
return backup
|
||||
|
||||
return None
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["hass_nabucasa"],
|
||||
"requirements": ["hass-nabucasa==0.88.1"],
|
||||
"requirements": ["hass-nabucasa==0.92.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -17,6 +17,11 @@
|
||||
"subscription_expiration": "Subscription expiration"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"backup_size_too_large": {
|
||||
"message": "The backup size of {size}GB is too large to be uploaded to Home Assistant Cloud."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_gender": {
|
||||
"title": "The {deprecated_option} text-to-speech option is deprecated",
|
||||
|
||||
@@ -140,8 +140,10 @@ def get_accounts(client, version):
|
||||
API_ACCOUNT_ID: account[API_V3_ACCOUNT_ID],
|
||||
API_ACCOUNT_NAME: account[API_ACCOUNT_NAME],
|
||||
API_ACCOUNT_CURRENCY: account[API_ACCOUNT_CURRENCY],
|
||||
API_ACCOUNT_AMOUNT: account[API_ACCOUNT_AVALIABLE][API_ACCOUNT_VALUE]
|
||||
+ account[API_ACCOUNT_HOLD][API_ACCOUNT_VALUE],
|
||||
API_ACCOUNT_AMOUNT: (
|
||||
float(account[API_ACCOUNT_AVALIABLE][API_ACCOUNT_VALUE])
|
||||
+ float(account[API_ACCOUNT_HOLD][API_ACCOUNT_VALUE])
|
||||
),
|
||||
ACCOUNT_IS_VAULT: account[API_RESOURCE_TYPE] == API_V3_TYPE_VAULT,
|
||||
}
|
||||
for account in accounts
|
||||
|
||||
@@ -44,9 +44,7 @@ class DiscovergyUpdateCoordinator(DataUpdateCoordinator[Reading]):
|
||||
)
|
||||
except InvalidLogin as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"Auth expired while fetching last reading for meter {self.meter.meter_id}"
|
||||
"Auth expired while fetching last reading"
|
||||
) from err
|
||||
except (HTTPError, DiscovergyClientError) as err:
|
||||
raise UpdateFailed(
|
||||
f"Error while fetching last reading for meter {self.meter.meter_id}"
|
||||
) from err
|
||||
raise UpdateFailed(f"Error while fetching last reading: {err}") from err
|
||||
|
||||
@@ -23,7 +23,7 @@ from homeassistant.components.climate import (
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||
|
||||
from . import EconetConfigEntry
|
||||
from .const import DOMAIN
|
||||
@@ -35,8 +35,13 @@ ECONET_STATE_TO_HA = {
|
||||
ThermostatOperationMode.OFF: HVACMode.OFF,
|
||||
ThermostatOperationMode.AUTO: HVACMode.HEAT_COOL,
|
||||
ThermostatOperationMode.FAN_ONLY: HVACMode.FAN_ONLY,
|
||||
ThermostatOperationMode.EMERGENCY_HEAT: HVACMode.HEAT,
|
||||
}
|
||||
HA_STATE_TO_ECONET = {
|
||||
value: key
|
||||
for key, value in ECONET_STATE_TO_HA.items()
|
||||
if key != ThermostatOperationMode.EMERGENCY_HEAT
|
||||
}
|
||||
HA_STATE_TO_ECONET = {value: key for key, value in ECONET_STATE_TO_HA.items()}
|
||||
|
||||
ECONET_FAN_STATE_TO_HA = {
|
||||
ThermostatFanMode.AUTO: FAN_AUTO,
|
||||
@@ -209,7 +214,7 @@ class EcoNetThermostat(EcoNetEntity[Thermostat], ClimateEntity):
|
||||
|
||||
def turn_aux_heat_on(self) -> None:
|
||||
"""Turn auxiliary heater on."""
|
||||
async_create_issue(
|
||||
create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
"migrate_aux_heat",
|
||||
@@ -223,7 +228,7 @@ class EcoNetThermostat(EcoNetEntity[Thermostat], ClimateEntity):
|
||||
|
||||
def turn_aux_heat_off(self) -> None:
|
||||
"""Turn auxiliary heater off."""
|
||||
async_create_issue(
|
||||
create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
"migrate_aux_heat",
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.0.0"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.2.0"]
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eheimdigital"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["eheimdigital==1.0.5"],
|
||||
"requirements": ["eheimdigital==1.0.6"],
|
||||
"zeroconf": [
|
||||
{ "type": "_http._tcp.local.", "name": "eheimdigital._http._tcp.local." }
|
||||
]
|
||||
|
||||
@@ -4,12 +4,16 @@ from __future__ import annotations
|
||||
|
||||
import aiohttp
|
||||
from electrickiwi_api import ElectricKiwiApi
|
||||
from electrickiwi_api.exceptions import ApiException
|
||||
from electrickiwi_api.exceptions import ApiException, AuthException
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow
|
||||
from homeassistant.helpers import (
|
||||
aiohttp_client,
|
||||
config_entry_oauth2_flow,
|
||||
entity_registry as er,
|
||||
)
|
||||
|
||||
from . import api
|
||||
from .coordinator import (
|
||||
@@ -44,7 +48,9 @@ async def async_setup_entry(
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
ek_api = ElectricKiwiApi(
|
||||
api.AsyncConfigEntryAuth(aiohttp_client.async_get_clientsession(hass), session)
|
||||
api.ConfigEntryElectricKiwiAuth(
|
||||
aiohttp_client.async_get_clientsession(hass), session
|
||||
)
|
||||
)
|
||||
hop_coordinator = ElectricKiwiHOPDataCoordinator(hass, entry, ek_api)
|
||||
account_coordinator = ElectricKiwiAccountDataCoordinator(hass, entry, ek_api)
|
||||
@@ -53,6 +59,8 @@ async def async_setup_entry(
|
||||
await ek_api.set_active_session()
|
||||
await hop_coordinator.async_config_entry_first_refresh()
|
||||
await account_coordinator.async_config_entry_first_refresh()
|
||||
except AuthException as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except ApiException as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
@@ -70,3 +78,53 @@ async def async_unload_entry(
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, config_entry: ElectricKiwiConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate old entry."""
|
||||
if config_entry.version == 1 and config_entry.minor_version == 1:
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, config_entry
|
||||
)
|
||||
)
|
||||
|
||||
session = config_entry_oauth2_flow.OAuth2Session(
|
||||
hass, config_entry, implementation
|
||||
)
|
||||
|
||||
ek_api = ElectricKiwiApi(
|
||||
api.ConfigEntryElectricKiwiAuth(
|
||||
aiohttp_client.async_get_clientsession(hass), session
|
||||
)
|
||||
)
|
||||
try:
|
||||
await ek_api.set_active_session()
|
||||
connection_details = await ek_api.get_connection_details()
|
||||
except AuthException:
|
||||
config_entry.async_start_reauth(hass)
|
||||
return False
|
||||
except ApiException:
|
||||
return False
|
||||
unique_id = str(ek_api.customer_number)
|
||||
identifier = ek_api.electricity.identifier
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry, unique_id=unique_id, minor_version=2
|
||||
)
|
||||
entity_registry = er.async_get(hass)
|
||||
entity_entries = er.async_entries_for_config_entry(
|
||||
entity_registry, config_entry_id=config_entry.entry_id
|
||||
)
|
||||
|
||||
for entity in entity_entries:
|
||||
assert entity.config_entry_id
|
||||
entity_registry.async_update_entity(
|
||||
entity.entity_id,
|
||||
new_unique_id=entity.unique_id.replace(
|
||||
f"{unique_id}_{connection_details.id}", f"{unique_id}_{identifier}"
|
||||
),
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -2,17 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from electrickiwi_api import AbstractAuth
|
||||
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow
|
||||
|
||||
from .const import API_BASE_URL
|
||||
|
||||
|
||||
class AsyncConfigEntryAuth(AbstractAuth):
|
||||
class ConfigEntryElectricKiwiAuth(AbstractAuth):
|
||||
"""Provide Electric Kiwi authentication tied to an OAuth2 based config entry."""
|
||||
|
||||
def __init__(
|
||||
@@ -29,4 +28,21 @@ class AsyncConfigEntryAuth(AbstractAuth):
|
||||
"""Return a valid access token."""
|
||||
await self._oauth_session.async_ensure_token_valid()
|
||||
|
||||
return cast(str, self._oauth_session.token["access_token"])
|
||||
return str(self._oauth_session.token["access_token"])
|
||||
|
||||
|
||||
class ConfigFlowElectricKiwiAuth(AbstractAuth):
|
||||
"""Provide Electric Kiwi authentication tied to an OAuth2 based config flow."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
token: str,
|
||||
) -> None:
|
||||
"""Initialize ConfigFlowFitbitApi."""
|
||||
super().__init__(aiohttp_client.async_get_clientsession(hass), API_BASE_URL)
|
||||
self._token = token
|
||||
|
||||
async def async_get_access_token(self) -> str:
|
||||
"""Return the token for the Electric Kiwi API."""
|
||||
return self._token
|
||||
|
||||
@@ -6,9 +6,14 @@ from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.config_entries import ConfigFlowResult
|
||||
from electrickiwi_api import ElectricKiwiApi
|
||||
from electrickiwi_api.exceptions import ApiException
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
from . import api
|
||||
from .const import DOMAIN, SCOPE_VALUES
|
||||
|
||||
|
||||
@@ -17,6 +22,8 @@ class ElectricKiwiOauth2FlowHandler(
|
||||
):
|
||||
"""Config flow to handle Electric Kiwi OAuth2 authentication."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
DOMAIN = DOMAIN
|
||||
|
||||
@property
|
||||
@@ -40,12 +47,30 @@ class ElectricKiwiOauth2FlowHandler(
|
||||
) -> ConfigFlowResult:
|
||||
"""Dialog that informs the user that reauth is required."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="reauth_confirm")
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
description_placeholders={CONF_NAME: self._get_reauth_entry().title},
|
||||
)
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
|
||||
"""Create an entry for Electric Kiwi."""
|
||||
existing_entry = await self.async_set_unique_id(DOMAIN)
|
||||
if existing_entry:
|
||||
return self.async_update_reload_and_abort(existing_entry, data=data)
|
||||
return await super().async_oauth_create_entry(data)
|
||||
ek_api = ElectricKiwiApi(
|
||||
api.ConfigFlowElectricKiwiAuth(self.hass, data["token"]["access_token"])
|
||||
)
|
||||
|
||||
try:
|
||||
session = await ek_api.get_active_session()
|
||||
except ApiException:
|
||||
return self.async_abort(reason="connection_error")
|
||||
|
||||
unique_id = str(session.data.customer_number)
|
||||
await self.async_set_unique_id(unique_id)
|
||||
if self.source == SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_account")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data=data
|
||||
)
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=unique_id, data=data)
|
||||
|
||||
@@ -8,4 +8,4 @@ OAUTH2_AUTHORIZE = "https://welcome.electrickiwi.co.nz/oauth/authorize"
|
||||
OAUTH2_TOKEN = "https://welcome.electrickiwi.co.nz/oauth/token"
|
||||
API_BASE_URL = "https://api.electrickiwi.co.nz"
|
||||
|
||||
SCOPE_VALUES = "read_connection_detail read_billing_frequency read_account_running_balance read_consumption_summary read_consumption_averages read_hop_intervals_config read_hop_connection save_hop_connection read_session"
|
||||
SCOPE_VALUES = "read_customer_details read_connection_detail read_connection read_billing_address get_bill_address read_billing_frequency read_billing_details read_billing_bills read_billing_bill read_billing_bill_id read_billing_bill_file read_account_running_balance read_customer_account_summary read_consumption_summary download_consumption_file read_consumption_averages get_consumption_averages read_hop_intervals_config read_hop_intervals read_hop_connection read_hop_specific_connection save_hop_connection save_hop_specific_connection read_outage_contact get_outage_contact_info_for_icp read_session read_session_data_login"
|
||||
|
||||
@@ -10,7 +10,7 @@ import logging
|
||||
|
||||
from electrickiwi_api import ElectricKiwiApi
|
||||
from electrickiwi_api.exceptions import ApiException, AuthException
|
||||
from electrickiwi_api.model import AccountBalance, Hop, HopIntervals
|
||||
from electrickiwi_api.model import AccountSummary, Hop, HopIntervals
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -34,7 +34,7 @@ class ElectricKiwiRuntimeData:
|
||||
type ElectricKiwiConfigEntry = ConfigEntry[ElectricKiwiRuntimeData]
|
||||
|
||||
|
||||
class ElectricKiwiAccountDataCoordinator(DataUpdateCoordinator[AccountBalance]):
|
||||
class ElectricKiwiAccountDataCoordinator(DataUpdateCoordinator[AccountSummary]):
|
||||
"""ElectricKiwi Account Data object."""
|
||||
|
||||
def __init__(
|
||||
@@ -51,13 +51,13 @@ class ElectricKiwiAccountDataCoordinator(DataUpdateCoordinator[AccountBalance]):
|
||||
name="Electric Kiwi Account Data",
|
||||
update_interval=ACCOUNT_SCAN_INTERVAL,
|
||||
)
|
||||
self._ek_api = ek_api
|
||||
self.ek_api = ek_api
|
||||
|
||||
async def _async_update_data(self) -> AccountBalance:
|
||||
async def _async_update_data(self) -> AccountSummary:
|
||||
"""Fetch data from Account balance API endpoint."""
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
return await self._ek_api.get_account_balance()
|
||||
return await self.ek_api.get_account_summary()
|
||||
except AuthException as auth_err:
|
||||
raise ConfigEntryAuthFailed from auth_err
|
||||
except ApiException as api_err:
|
||||
@@ -85,7 +85,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
|
||||
# Polling interval. Will only be polled if there are subscribers.
|
||||
update_interval=HOP_SCAN_INTERVAL,
|
||||
)
|
||||
self._ek_api = ek_api
|
||||
self.ek_api = ek_api
|
||||
self.hop_intervals: HopIntervals | None = None
|
||||
|
||||
def get_hop_options(self) -> dict[str, int]:
|
||||
@@ -100,7 +100,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
|
||||
async def async_update_hop(self, hop_interval: int) -> Hop:
|
||||
"""Update selected hop and data."""
|
||||
try:
|
||||
self.async_set_updated_data(await self._ek_api.post_hop(hop_interval))
|
||||
self.async_set_updated_data(await self.ek_api.post_hop(hop_interval))
|
||||
except AuthException as auth_err:
|
||||
raise ConfigEntryAuthFailed from auth_err
|
||||
except ApiException as api_err:
|
||||
@@ -118,7 +118,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
if self.hop_intervals is None:
|
||||
hop_intervals: HopIntervals = await self._ek_api.get_hop_intervals()
|
||||
hop_intervals: HopIntervals = await self.ek_api.get_hop_intervals()
|
||||
hop_intervals.intervals = OrderedDict(
|
||||
filter(
|
||||
lambda pair: pair[1].active == 1,
|
||||
@@ -127,7 +127,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
|
||||
)
|
||||
|
||||
self.hop_intervals = hop_intervals
|
||||
return await self._ek_api.get_hop()
|
||||
return await self.ek_api.get_hop()
|
||||
except AuthException as auth_err:
|
||||
raise ConfigEntryAuthFailed from auth_err
|
||||
except ApiException as api_err:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/electric_kiwi",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["electrickiwi-api==0.8.5"]
|
||||
"requirements": ["electrickiwi-api==0.9.14"]
|
||||
}
|
||||
|
||||
@@ -53,8 +53,8 @@ class ElectricKiwiSelectHOPEntity(
|
||||
"""Initialise the HOP selection entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator._ek_api.customer_number}" # noqa: SLF001
|
||||
f"_{coordinator._ek_api.connection_id}_{description.key}" # noqa: SLF001
|
||||
f"{coordinator.ek_api.customer_number}"
|
||||
f"_{coordinator.ek_api.electricity.identifier}_{description.key}"
|
||||
)
|
||||
self.entity_description = description
|
||||
self.values_dict = coordinator.get_hop_options()
|
||||
|
||||
@@ -6,7 +6,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from electrickiwi_api.model import AccountBalance, Hop
|
||||
from electrickiwi_api.model import AccountSummary, Hop
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@@ -39,7 +39,15 @@ ATTR_HOP_PERCENTAGE = "hop_percentage"
|
||||
class ElectricKiwiAccountSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Electric Kiwi sensor entity."""
|
||||
|
||||
value_func: Callable[[AccountBalance], float | datetime]
|
||||
value_func: Callable[[AccountSummary], float | datetime]
|
||||
|
||||
|
||||
def _get_hop_percentage(account_balance: AccountSummary) -> float:
|
||||
"""Return the hop percentage from account summary."""
|
||||
if power := account_balance.services.get("power"):
|
||||
if connection := power.connections[0]:
|
||||
return float(connection.hop_percentage)
|
||||
return 0.0
|
||||
|
||||
|
||||
ACCOUNT_SENSOR_TYPES: tuple[ElectricKiwiAccountSensorEntityDescription, ...] = (
|
||||
@@ -72,9 +80,7 @@ ACCOUNT_SENSOR_TYPES: tuple[ElectricKiwiAccountSensorEntityDescription, ...] = (
|
||||
translation_key="hop_power_savings",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_func=lambda account_balance: float(
|
||||
account_balance.connections[0].hop_percentage
|
||||
),
|
||||
value_func=_get_hop_percentage,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -165,8 +171,8 @@ class ElectricKiwiAccountEntity(
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator._ek_api.customer_number}" # noqa: SLF001
|
||||
f"_{coordinator._ek_api.connection_id}_{description.key}" # noqa: SLF001
|
||||
f"{coordinator.ek_api.customer_number}"
|
||||
f"_{coordinator.ek_api.electricity.identifier}_{description.key}"
|
||||
)
|
||||
self.entity_description = description
|
||||
|
||||
@@ -194,8 +200,8 @@ class ElectricKiwiHOPEntity(
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator._ek_api.customer_number}" # noqa: SLF001
|
||||
f"_{coordinator._ek_api.connection_id}_{description.key}" # noqa: SLF001
|
||||
f"{coordinator.ek_api.customer_number}"
|
||||
f"_{coordinator.ek_api.electricity.identifier}_{description.key}"
|
||||
)
|
||||
self.entity_description = description
|
||||
|
||||
|
||||
@@ -21,7 +21,8 @@
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
|
||||
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]"
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
|
||||
"connection_error": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/enphase_envoy",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"requirements": ["pyenphase==1.23.1"],
|
||||
"requirements": ["pyenphase==1.25.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/fireservicerota",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyfireservicerota"],
|
||||
"requirements": ["pyfireservicerota==0.0.43"]
|
||||
"requirements": ["pyfireservicerota==0.0.46"]
|
||||
}
|
||||
|
||||
@@ -85,6 +85,8 @@ async def async_setup_entry(
|
||||
class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity):
|
||||
"""The thermostat class for FRITZ!SmartHome thermostats."""
|
||||
|
||||
_attr_max_temp = MAX_TEMPERATURE
|
||||
_attr_min_temp = MIN_TEMPERATURE
|
||||
_attr_precision = PRECISION_HALVES
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_translation_key = "thermostat"
|
||||
@@ -135,11 +137,13 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity):
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
target_temp = kwargs.get(ATTR_TEMPERATURE)
|
||||
hvac_mode = kwargs.get(ATTR_HVAC_MODE)
|
||||
if hvac_mode == HVACMode.OFF:
|
||||
if (hvac_mode := kwargs.get(ATTR_HVAC_MODE)) is HVACMode.OFF:
|
||||
await self.async_set_hvac_mode(hvac_mode)
|
||||
elif target_temp is not None:
|
||||
elif (target_temp := kwargs.get(ATTR_TEMPERATURE)) is not None:
|
||||
if target_temp == OFF_API_TEMPERATURE:
|
||||
target_temp = OFF_REPORT_SET_TEMPERATURE
|
||||
elif target_temp == ON_API_TEMPERATURE:
|
||||
target_temp = ON_REPORT_SET_TEMPERATURE
|
||||
await self.hass.async_add_executor_job(
|
||||
self.data.set_target_temperature, target_temp, True
|
||||
)
|
||||
@@ -169,12 +173,12 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="change_hvac_while_active_mode",
|
||||
)
|
||||
if self.hvac_mode == hvac_mode:
|
||||
if self.hvac_mode is hvac_mode:
|
||||
LOGGER.debug(
|
||||
"%s is already in requested hvac mode %s", self.name, hvac_mode
|
||||
)
|
||||
return
|
||||
if hvac_mode == HVACMode.OFF:
|
||||
if hvac_mode is HVACMode.OFF:
|
||||
await self.async_set_temperature(temperature=OFF_REPORT_SET_TEMPERATURE)
|
||||
else:
|
||||
if value_scheduled_preset(self.data) == PRESET_ECO:
|
||||
@@ -208,16 +212,6 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity):
|
||||
elif preset_mode == PRESET_ECO:
|
||||
await self.async_set_temperature(temperature=self.data.eco_temperature)
|
||||
|
||||
@property
|
||||
def min_temp(self) -> int:
|
||||
"""Return the minimum temperature."""
|
||||
return MIN_TEMPERATURE
|
||||
|
||||
@property
|
||||
def max_temp(self) -> int:
|
||||
"""Return the maximum temperature."""
|
||||
return MAX_TEMPERATURE
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> ClimateExtraAttributes:
|
||||
"""Return the device specific state attributes."""
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyfritzhome"],
|
||||
"requirements": ["pyfritzhome==0.6.14"],
|
||||
"requirements": ["pyfritzhome==0.6.15"],
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "urn:schemas-upnp-org:device:fritzbox:1"
|
||||
|
||||
@@ -21,5 +21,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250205.0"]
|
||||
"requirements": ["home-assistant-frontend==20250221.0"]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ from collections.abc import Callable
|
||||
from google_drive_api.exceptions import GoogleDriveApiError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import instance_id
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -49,6 +49,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoogleDriveConfigEntry)
|
||||
except GoogleDriveApiError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
_async_notify_backup_listeners_soon(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -56,10 +58,15 @@ async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: GoogleDriveConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
hass.loop.call_soon(_notify_backup_listeners, hass)
|
||||
_async_notify_backup_listeners_soon(hass)
|
||||
return True
|
||||
|
||||
|
||||
def _notify_backup_listeners(hass: HomeAssistant) -> None:
|
||||
def _async_notify_backup_listeners(hass: HomeAssistant) -> None:
|
||||
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||
listener()
|
||||
|
||||
|
||||
@callback
|
||||
def _async_notify_backup_listeners_soon(hass: HomeAssistant) -> None:
|
||||
hass.loop.call_soon(_async_notify_backup_listeners, hass)
|
||||
|
||||
@@ -146,9 +146,10 @@ class DriveClient:
|
||||
backup.backup_id,
|
||||
backup_metadata,
|
||||
)
|
||||
await self._api.upload_file(
|
||||
await self._api.resumable_upload_file(
|
||||
backup_metadata,
|
||||
open_stream,
|
||||
backup.size,
|
||||
timeout=ClientTimeout(total=_UPLOAD_AND_DOWNLOAD_TIMEOUT),
|
||||
)
|
||||
_LOGGER.debug(
|
||||
|
||||
@@ -2,7 +2,10 @@
|
||||
|
||||
from homeassistant.components.application_credentials import AuthorizationServer
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
AUTH_CALLBACK_PATH,
|
||||
MY_AUTH_CALLBACK_PATH,
|
||||
)
|
||||
|
||||
|
||||
async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer:
|
||||
@@ -15,9 +18,14 @@ async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationSe
|
||||
|
||||
async def async_get_description_placeholders(hass: HomeAssistant) -> dict[str, str]:
|
||||
"""Return description placeholders for the credentials dialog."""
|
||||
if "my" in hass.config.components:
|
||||
redirect_url = MY_AUTH_CALLBACK_PATH
|
||||
else:
|
||||
ha_host = hass.config.external_url or "https://YOUR_DOMAIN:PORT"
|
||||
redirect_url = f"{ha_host}{AUTH_CALLBACK_PATH}"
|
||||
return {
|
||||
"oauth_consent_url": "https://console.cloud.google.com/apis/credentials/consent",
|
||||
"more_info_url": "https://www.home-assistant.io/integrations/google_drive/",
|
||||
"oauth_creds_url": "https://console.cloud.google.com/apis/credentials",
|
||||
"redirect_url": config_entry_oauth2_flow.async_get_redirect_uri(hass),
|
||||
"redirect_url": redirect_url,
|
||||
}
|
||||
|
||||
@@ -10,5 +10,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["google_drive_api"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-google-drive-api==0.0.2"]
|
||||
"requirements": ["python-google-drive-api==0.1.0"]
|
||||
}
|
||||
|
||||
@@ -38,6 +38,10 @@
|
||||
"local_name": "GV5126*",
|
||||
"connectable": false
|
||||
},
|
||||
{
|
||||
"local_name": "GV5179*",
|
||||
"connectable": false
|
||||
},
|
||||
{
|
||||
"local_name": "GVH5127*",
|
||||
"connectable": false
|
||||
@@ -131,5 +135,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/govee_ble",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["govee-ble==0.42.0"]
|
||||
"requirements": ["govee-ble==0.43.0"]
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ from typing import Any
|
||||
|
||||
from aiohttp import ClientError
|
||||
from habiticalib import (
|
||||
Avatar,
|
||||
ContentData,
|
||||
Habitica,
|
||||
HabiticaException,
|
||||
@@ -19,7 +20,6 @@ from habiticalib import (
|
||||
TaskFilter,
|
||||
TooManyRequestsError,
|
||||
UserData,
|
||||
UserStyles,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -159,12 +159,10 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
|
||||
else:
|
||||
await self.async_request_refresh()
|
||||
|
||||
async def generate_avatar(self, user_styles: UserStyles) -> bytes:
|
||||
async def generate_avatar(self, avatar: Avatar) -> bytes:
|
||||
"""Generate Avatar."""
|
||||
|
||||
avatar = BytesIO()
|
||||
await self.habitica.generate_avatar(
|
||||
fp=avatar, user_styles=user_styles, fmt="PNG"
|
||||
)
|
||||
png = BytesIO()
|
||||
await self.habitica.generate_avatar(fp=png, avatar=avatar, fmt="PNG")
|
||||
|
||||
return avatar.getvalue()
|
||||
return png.getvalue()
|
||||
|
||||
@@ -23,5 +23,5 @@ async def async_get_config_entry_diagnostics(
|
||||
CONF_URL: config_entry.data[CONF_URL],
|
||||
CONF_API_USER: config_entry.data[CONF_API_USER],
|
||||
},
|
||||
"habitica_data": habitica_data.to_dict()["data"],
|
||||
"habitica_data": habitica_data.to_dict(omit_none=False)["data"],
|
||||
}
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from enum import StrEnum
|
||||
|
||||
from habiticalib import UserStyles
|
||||
from habiticalib import Avatar, extract_avatar
|
||||
|
||||
from homeassistant.components.image import ImageEntity, ImageEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -45,7 +44,7 @@ class HabiticaImage(HabiticaBase, ImageEntity):
|
||||
translation_key=HabiticaImageEntity.AVATAR,
|
||||
)
|
||||
_attr_content_type = "image/png"
|
||||
_current_appearance: UserStyles | None = None
|
||||
_current_appearance: Avatar | None = None
|
||||
_cache: bytes | None = None
|
||||
|
||||
def __init__(
|
||||
@@ -60,7 +59,7 @@ class HabiticaImage(HabiticaBase, ImageEntity):
|
||||
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Check if equipped gear and other things have changed since last avatar image generation."""
|
||||
new_appearance = UserStyles.from_dict(asdict(self.coordinator.data.user))
|
||||
new_appearance = extract_avatar(self.coordinator.data.user)
|
||||
|
||||
if self._current_appearance != new_appearance:
|
||||
self._current_appearance = new_appearance
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/habitica",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["habiticalib"],
|
||||
"requirements": ["habiticalib==0.3.4"]
|
||||
"requirements": ["habiticalib==0.3.7"]
|
||||
}
|
||||
|
||||
@@ -77,7 +77,7 @@ SERVICE_API_CALL_SCHEMA = vol.Schema(
|
||||
|
||||
SERVICE_CAST_SKILL_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Required(ATTR_SKILL): cv.string,
|
||||
vol.Optional(ATTR_TASK): cv.string,
|
||||
}
|
||||
@@ -85,12 +85,12 @@ SERVICE_CAST_SKILL_SCHEMA = vol.Schema(
|
||||
|
||||
SERVICE_MANAGE_QUEST_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
}
|
||||
)
|
||||
SERVICE_SCORE_TASK_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Required(ATTR_TASK): cv.string,
|
||||
vol.Optional(ATTR_DIRECTION): cv.string,
|
||||
}
|
||||
@@ -98,7 +98,7 @@ SERVICE_SCORE_TASK_SCHEMA = vol.Schema(
|
||||
|
||||
SERVICE_TRANSFORMATION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Required(ATTR_ITEM): cv.string,
|
||||
vol.Required(ATTR_TARGET): cv.string,
|
||||
}
|
||||
@@ -106,7 +106,7 @@ SERVICE_TRANSFORMATION_SCHEMA = vol.Schema(
|
||||
|
||||
SERVICE_GET_TASKS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Optional(ATTR_TYPE): vol.All(
|
||||
cv.ensure_list, [vol.All(vol.Upper, vol.In({x.name for x in TaskType}))]
|
||||
),
|
||||
@@ -510,7 +510,10 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
or (task.notes and keyword in task.notes.lower())
|
||||
or any(keyword in item.text.lower() for item in task.checklist)
|
||||
]
|
||||
result: dict[str, Any] = {"tasks": response}
|
||||
result: dict[str, Any] = {
|
||||
"tasks": [task.to_dict(omit_none=False) for task in response]
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
hass.services.async_register(
|
||||
|
||||
@@ -27,11 +27,13 @@ from homeassistant.components.backup import (
|
||||
AddonInfo,
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupConfig,
|
||||
BackupManagerError,
|
||||
BackupNotFound,
|
||||
BackupReaderWriter,
|
||||
BackupReaderWriterError,
|
||||
CreateBackupEvent,
|
||||
CreateBackupParametersDict,
|
||||
CreateBackupStage,
|
||||
CreateBackupState,
|
||||
Folder,
|
||||
@@ -633,6 +635,27 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
_LOGGER.debug("Could not get restore job %s: %s", restore_job_id, err)
|
||||
unsub()
|
||||
|
||||
async def async_validate_config(self, *, config: BackupConfig) -> None:
|
||||
"""Validate backup config.
|
||||
|
||||
Replace the core backup agent with the hassio default agent.
|
||||
"""
|
||||
core_agent_id = "backup.local"
|
||||
create_backup = config.data.create_backup
|
||||
if core_agent_id not in create_backup.agent_ids:
|
||||
_LOGGER.debug("Backup settings don't need to be adjusted")
|
||||
return
|
||||
|
||||
default_agent = await _default_agent(self._client)
|
||||
_LOGGER.info("Adjusting backup settings to not include core backup location")
|
||||
automatic_agents = [
|
||||
agent_id if agent_id != core_agent_id else default_agent
|
||||
for agent_id in create_backup.agent_ids
|
||||
]
|
||||
config.update(
|
||||
create_backup=CreateBackupParametersDict(agent_ids=automatic_agents)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_listen_job_events(
|
||||
self, job_id: UUID, on_event: Callable[[Mapping[str, Any]], None]
|
||||
|
||||
@@ -37,11 +37,24 @@ async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool
|
||||
for device in device_registry.devices.get_devices_for_config_entry_id(
|
||||
entry.entry_id
|
||||
):
|
||||
for domain, player_id in device.identifiers:
|
||||
if domain == DOMAIN and not isinstance(player_id, str):
|
||||
device_registry.async_update_device( # type: ignore[unreachable]
|
||||
device.id, new_identifiers={(DOMAIN, str(player_id))}
|
||||
for ident in device.identifiers:
|
||||
if ident[0] != DOMAIN or isinstance(ident[1], str):
|
||||
continue
|
||||
|
||||
player_id = int(ident[1]) # type: ignore[unreachable]
|
||||
|
||||
# Create set of identifiers excluding this integration
|
||||
identifiers = {ident for ident in device.identifiers if ident[0] != DOMAIN}
|
||||
migrated_identifiers = {(DOMAIN, str(player_id))}
|
||||
# Add migrated if not already present in another device, which occurs if the user downgraded and then upgraded
|
||||
if not device_registry.async_get_device(migrated_identifiers):
|
||||
identifiers.update(migrated_identifiers)
|
||||
if len(identifiers) > 0:
|
||||
device_registry.async_update_device(
|
||||
device.id, new_identifiers=identifiers
|
||||
)
|
||||
else:
|
||||
device_registry.async_remove_device(device.id)
|
||||
break
|
||||
|
||||
coordinator = HeosCoordinator(hass, entry)
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyheos"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["pyheos==1.0.1"],
|
||||
"requirements": ["pyheos==1.0.2"],
|
||||
"single_config_entry": true,
|
||||
"ssdp": [
|
||||
{
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
},
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["apyhiveapi"],
|
||||
"requirements": ["pyhive-integration==1.0.1"]
|
||||
"requirements": ["pyhive-integration==1.0.2"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.65", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.66", "babel==2.15.0"]
|
||||
}
|
||||
|
||||
@@ -432,6 +432,7 @@ def ws_expose_entity(
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "homeassistant/expose_entity/list",
|
||||
vol.Optional("assistant"): vol.In(KNOWN_ASSISTANTS),
|
||||
}
|
||||
)
|
||||
def ws_list_exposed_entities(
|
||||
@@ -441,10 +442,18 @@ def ws_list_exposed_entities(
|
||||
result: dict[str, Any] = {}
|
||||
|
||||
exposed_entities = hass.data[DATA_EXPOSED_ENTITIES]
|
||||
required_assistant = msg.get("assistant")
|
||||
entity_registry = er.async_get(hass)
|
||||
for entity_id in chain(exposed_entities.entities, entity_registry.entities):
|
||||
result[entity_id] = {}
|
||||
entity_settings = async_get_entity_settings(hass, entity_id)
|
||||
if required_assistant and (
|
||||
(required_assistant not in entity_settings)
|
||||
or (not entity_settings[required_assistant].get("should_expose"))
|
||||
):
|
||||
# Not exposed to required assistant
|
||||
continue
|
||||
|
||||
result[entity_id] = {}
|
||||
for assistant, settings in entity_settings.items():
|
||||
if "should_expose" not in settings:
|
||||
continue
|
||||
|
||||
@@ -107,7 +107,9 @@ class HueLight(HueBaseEntity, LightEntity):
|
||||
self._attr_effect_list = []
|
||||
if effects := resource.effects:
|
||||
self._attr_effect_list = [
|
||||
x.value for x in effects.status_values if x != EffectStatus.NO_EFFECT
|
||||
x.value
|
||||
for x in effects.status_values
|
||||
if x not in (EffectStatus.NO_EFFECT, EffectStatus.UNKNOWN)
|
||||
]
|
||||
if timed_effects := resource.timed_effects:
|
||||
self._attr_effect_list += [
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/hydrawise",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pydrawise"],
|
||||
"requirements": ["pydrawise==2025.1.0"]
|
||||
"requirements": ["pydrawise==2025.2.0"]
|
||||
}
|
||||
|
||||
@@ -10,8 +10,8 @@ from lacrosse_view import HTTPError, LaCrosse, Location, LoginError, Sensor
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import SCAN_INTERVAL
|
||||
|
||||
@@ -26,6 +26,7 @@ class LaCrosseUpdateCoordinator(DataUpdateCoordinator[list[Sensor]]):
|
||||
name: str
|
||||
id: str
|
||||
hass: HomeAssistant
|
||||
devices: list[Sensor] | None = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -60,24 +61,34 @@ class LaCrosseUpdateCoordinator(DataUpdateCoordinator[list[Sensor]]):
|
||||
except LoginError as error:
|
||||
raise ConfigEntryAuthFailed from error
|
||||
|
||||
if self.devices is None:
|
||||
_LOGGER.debug("Getting devices")
|
||||
try:
|
||||
self.devices = await self.api.get_devices(
|
||||
location=Location(id=self.id, name=self.name),
|
||||
)
|
||||
except HTTPError as error:
|
||||
raise UpdateFailed from error
|
||||
|
||||
try:
|
||||
# Fetch last hour of data
|
||||
sensors = await self.api.get_sensors(
|
||||
location=Location(id=self.id, name=self.name),
|
||||
tz=self.hass.config.time_zone,
|
||||
start=str(now - 3600),
|
||||
end=str(now),
|
||||
)
|
||||
except HTTPError as error:
|
||||
raise ConfigEntryNotReady from error
|
||||
for sensor in self.devices:
|
||||
sensor.data = (
|
||||
await self.api.get_sensor_status(
|
||||
sensor=sensor,
|
||||
tz=self.hass.config.time_zone,
|
||||
)
|
||||
)["data"]["current"]
|
||||
_LOGGER.debug("Got data: %s", sensor.data)
|
||||
|
||||
_LOGGER.debug("Got data: %s", sensors)
|
||||
except HTTPError as error:
|
||||
raise UpdateFailed from error
|
||||
|
||||
# Verify that we have permission to read the sensors
|
||||
for sensor in sensors:
|
||||
for sensor in self.devices:
|
||||
if not sensor.permissions.get("read", False):
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"This account does not have permission to read {sensor.name}"
|
||||
)
|
||||
|
||||
return sensors
|
||||
return self.devices
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/lacrosse_view",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["lacrosse_view"],
|
||||
"requirements": ["lacrosse-view==1.0.4"]
|
||||
"requirements": ["lacrosse-view==1.1.1"]
|
||||
}
|
||||
|
||||
@@ -45,10 +45,10 @@ class LaCrosseSensorEntityDescription(SensorEntityDescription):
|
||||
|
||||
def get_value(sensor: Sensor, field: str) -> float | int | str | None:
|
||||
"""Get the value of a sensor field."""
|
||||
field_data = sensor.data.get(field)
|
||||
field_data = sensor.data.get(field) if sensor.data is not None else None
|
||||
if field_data is None:
|
||||
return None
|
||||
value = field_data["values"][-1]["s"]
|
||||
value = field_data["spot"]["value"]
|
||||
try:
|
||||
value = float(value)
|
||||
except ValueError:
|
||||
@@ -178,7 +178,7 @@ async def async_setup_entry(
|
||||
continue
|
||||
|
||||
# if the API returns a different unit of measurement from the description, update it
|
||||
if sensor.data.get(field) is not None:
|
||||
if sensor.data is not None and sensor.data.get(field) is not None:
|
||||
native_unit_of_measurement = UNIT_OF_MEASUREMENT_MAP.get(
|
||||
sensor.data[field].get("unit")
|
||||
)
|
||||
@@ -240,7 +240,9 @@ class LaCrosseViewSensor(
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
data = self.coordinator.data[self.index].data
|
||||
return (
|
||||
super().available
|
||||
and self.entity_description.key in self.coordinator.data[self.index].data
|
||||
and data is not None
|
||||
and self.entity_description.key in data
|
||||
)
|
||||
|
||||
@@ -277,20 +277,6 @@ FOUR_GROUP_REMOTE_TRIGGER_SCHEMA = LUTRON_BUTTON_TRIGGER_SCHEMA.extend(
|
||||
}
|
||||
)
|
||||
|
||||
PADDLE_SWITCH_PICO_BUTTON_TYPES_TO_LIP = {
|
||||
"button_0": 2,
|
||||
"button_2": 4,
|
||||
}
|
||||
PADDLE_SWITCH_PICO_BUTTON_TYPES_TO_LEAP = {
|
||||
"button_0": 0,
|
||||
"button_2": 2,
|
||||
}
|
||||
PADDLE_SWITCH_PICO_TRIGGER_SCHEMA = LUTRON_BUTTON_TRIGGER_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_SUBTYPE): vol.In(PADDLE_SWITCH_PICO_BUTTON_TYPES_TO_LIP),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
DEVICE_TYPE_SCHEMA_MAP = {
|
||||
"Pico2Button": PICO_2_BUTTON_TRIGGER_SCHEMA,
|
||||
@@ -302,7 +288,6 @@ DEVICE_TYPE_SCHEMA_MAP = {
|
||||
"Pico4ButtonZone": PICO_4_BUTTON_ZONE_TRIGGER_SCHEMA,
|
||||
"Pico4Button2Group": PICO_4_BUTTON_2_GROUP_TRIGGER_SCHEMA,
|
||||
"FourGroupRemote": FOUR_GROUP_REMOTE_TRIGGER_SCHEMA,
|
||||
"PaddleSwitchPico": PADDLE_SWITCH_PICO_TRIGGER_SCHEMA,
|
||||
}
|
||||
|
||||
DEVICE_TYPE_SUBTYPE_MAP_TO_LIP = {
|
||||
@@ -315,7 +300,6 @@ DEVICE_TYPE_SUBTYPE_MAP_TO_LIP = {
|
||||
"Pico4ButtonZone": PICO_4_BUTTON_ZONE_BUTTON_TYPES_TO_LIP,
|
||||
"Pico4Button2Group": PICO_4_BUTTON_2_GROUP_BUTTON_TYPES_TO_LIP,
|
||||
"FourGroupRemote": FOUR_GROUP_REMOTE_BUTTON_TYPES_TO_LIP,
|
||||
"PaddleSwitchPico": PADDLE_SWITCH_PICO_BUTTON_TYPES_TO_LIP,
|
||||
}
|
||||
|
||||
DEVICE_TYPE_SUBTYPE_MAP_TO_LEAP = {
|
||||
@@ -328,7 +312,6 @@ DEVICE_TYPE_SUBTYPE_MAP_TO_LEAP = {
|
||||
"Pico4ButtonZone": PICO_4_BUTTON_ZONE_BUTTON_TYPES_TO_LEAP,
|
||||
"Pico4Button2Group": PICO_4_BUTTON_2_GROUP_BUTTON_TYPES_TO_LEAP,
|
||||
"FourGroupRemote": FOUR_GROUP_REMOTE_BUTTON_TYPES_TO_LEAP,
|
||||
"PaddleSwitchPico": PADDLE_SWITCH_PICO_BUTTON_TYPES_TO_LEAP,
|
||||
}
|
||||
|
||||
LEAP_TO_DEVICE_TYPE_SUBTYPE_MAP: dict[str, dict[int, str]] = {
|
||||
@@ -343,7 +326,6 @@ TRIGGER_SCHEMA = vol.Any(
|
||||
PICO_4_BUTTON_ZONE_TRIGGER_SCHEMA,
|
||||
PICO_4_BUTTON_2_GROUP_TRIGGER_SCHEMA,
|
||||
FOUR_GROUP_REMOTE_TRIGGER_SCHEMA,
|
||||
PADDLE_SWITCH_PICO_TRIGGER_SCHEMA,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -105,10 +105,8 @@ class MillHeater(MillBaseEntity, ClimateEntity):
|
||||
self, coordinator: MillDataUpdateCoordinator, device: mill.Heater
|
||||
) -> None:
|
||||
"""Initialize the thermostat."""
|
||||
|
||||
super().__init__(coordinator, device)
|
||||
self._attr_unique_id = device.device_id
|
||||
self._update_attr(device)
|
||||
super().__init__(coordinator, device)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
|
||||
from mill import Heater, MillDevice
|
||||
from mill import MillDevice
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -45,7 +45,7 @@ class MillBaseEntity(CoordinatorEntity[MillDataUpdateCoordinator]):
|
||||
|
||||
@abstractmethod
|
||||
@callback
|
||||
def _update_attr(self, device: MillDevice | Heater) -> None:
|
||||
def _update_attr(self, device: MillDevice) -> None:
|
||||
"""Update the attribute of the entity."""
|
||||
|
||||
@property
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from mill import MillDevice
|
||||
from mill import Heater, MillDevice
|
||||
|
||||
from homeassistant.components.number import NumberDeviceClass, NumberEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -27,6 +27,7 @@ async def async_setup_entry(
|
||||
async_add_entities(
|
||||
MillNumber(mill_data_coordinator, mill_device)
|
||||
for mill_device in mill_data_coordinator.data.values()
|
||||
if isinstance(mill_device, Heater)
|
||||
)
|
||||
|
||||
|
||||
@@ -45,9 +46,8 @@ class MillNumber(MillBaseEntity, NumberEntity):
|
||||
mill_device: MillDevice,
|
||||
) -> None:
|
||||
"""Initialize the number."""
|
||||
super().__init__(coordinator, mill_device)
|
||||
self._attr_unique_id = f"{mill_device.device_id}_max_heating_power"
|
||||
self._update_attr(mill_device)
|
||||
super().__init__(coordinator, mill_device)
|
||||
|
||||
@callback
|
||||
def _update_attr(self, device: MillDevice) -> None:
|
||||
|
||||
@@ -192,9 +192,9 @@ class MillSensor(MillBaseEntity, SensorEntity):
|
||||
mill_device: mill.Socket | mill.Heater,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator, mill_device)
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = f"{mill_device.device_id}_{entity_description.key}"
|
||||
super().__init__(coordinator, mill_device)
|
||||
|
||||
@callback
|
||||
def _update_attr(self, device):
|
||||
|
||||
@@ -236,7 +236,7 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
MQTT_PUBLISH_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_TOPIC): valid_publish_topic,
|
||||
vol.Required(ATTR_PAYLOAD): cv.string,
|
||||
vol.Required(ATTR_PAYLOAD, default=None): vol.Any(cv.string, None),
|
||||
vol.Optional(ATTR_EVALUATE_PAYLOAD): cv.boolean,
|
||||
vol.Optional(ATTR_QOS, default=DEFAULT_QOS): valid_qos_schema,
|
||||
vol.Optional(ATTR_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
|
||||
@@ -8,7 +8,6 @@ publish:
|
||||
selector:
|
||||
text:
|
||||
payload:
|
||||
required: true
|
||||
example: "The temperature is {{ states('sensor.temperature') }}"
|
||||
selector:
|
||||
template:
|
||||
|
||||
@@ -246,11 +246,7 @@
|
||||
},
|
||||
"payload": {
|
||||
"name": "Payload",
|
||||
"description": "The payload to publish."
|
||||
},
|
||||
"payload_template": {
|
||||
"name": "Payload template",
|
||||
"description": "Template to render as a payload value. If a payload is provided, the template is ignored."
|
||||
"description": "The payload to publish. Publishes an empty message if not provided."
|
||||
},
|
||||
"qos": {
|
||||
"name": "QoS",
|
||||
|
||||
@@ -8,7 +8,7 @@ from collections.abc import Awaitable, Callable
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp import ClientError, ClientResponseError, web
|
||||
from google_nest_sdm.camera_traits import CameraClipPreviewTrait
|
||||
from google_nest_sdm.device import Device
|
||||
from google_nest_sdm.event import EventMessage
|
||||
@@ -198,7 +198,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: NestConfigEntry) -> bool
|
||||
entry, unique_id=entry.data[CONF_PROJECT_ID]
|
||||
)
|
||||
|
||||
subscriber = await api.new_subscriber(hass, entry)
|
||||
auth = await api.new_auth(hass, entry)
|
||||
try:
|
||||
await auth.async_get_access_token()
|
||||
except ClientResponseError as err:
|
||||
if 400 <= err.status < 500:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
raise ConfigEntryNotReady from err
|
||||
except ClientError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
subscriber = await api.new_subscriber(hass, entry, auth)
|
||||
if not subscriber:
|
||||
return False
|
||||
# Keep media for last N events in memory
|
||||
|
||||
@@ -50,13 +50,14 @@ class AsyncConfigEntryAuth(AbstractAuth):
|
||||
return cast(str, self._oauth_session.token["access_token"])
|
||||
|
||||
async def async_get_creds(self) -> Credentials:
|
||||
"""Return an OAuth credential for Pub/Sub Subscriber."""
|
||||
# We don't have a way for Home Assistant to refresh creds on behalf
|
||||
# of the google pub/sub subscriber. Instead, build a full
|
||||
# Credentials object with enough information for the subscriber to
|
||||
# handle this on its own. We purposely don't refresh the token here
|
||||
# even when it is expired to fully hand off this responsibility and
|
||||
# know it is working at startup (then if not, fail loudly).
|
||||
"""Return an OAuth credential for Pub/Sub Subscriber.
|
||||
|
||||
The subscriber will call this when connecting to the stream to refresh
|
||||
the token. We construct a credentials object using the underlying
|
||||
OAuth2Session since the subscriber may expect the expiry fields to
|
||||
be present.
|
||||
"""
|
||||
await self.async_get_access_token()
|
||||
token = self._oauth_session.token
|
||||
creds = Credentials( # type: ignore[no-untyped-call]
|
||||
token=token["access_token"],
|
||||
@@ -101,9 +102,7 @@ class AccessTokenAuthImpl(AbstractAuth):
|
||||
)
|
||||
|
||||
|
||||
async def new_subscriber(
|
||||
hass: HomeAssistant, entry: NestConfigEntry
|
||||
) -> GoogleNestSubscriber | None:
|
||||
async def new_auth(hass: HomeAssistant, entry: NestConfigEntry) -> AbstractAuth:
|
||||
"""Create a GoogleNestSubscriber."""
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
@@ -114,14 +113,22 @@ async def new_subscriber(
|
||||
implementation, config_entry_oauth2_flow.LocalOAuth2Implementation
|
||||
):
|
||||
raise TypeError(f"Unexpected auth implementation {implementation}")
|
||||
if (subscription_name := entry.data.get(CONF_SUBSCRIPTION_NAME)) is None:
|
||||
subscription_name = entry.data[CONF_SUBSCRIBER_ID]
|
||||
auth = AsyncConfigEntryAuth(
|
||||
return AsyncConfigEntryAuth(
|
||||
aiohttp_client.async_get_clientsession(hass),
|
||||
config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation),
|
||||
implementation.client_id,
|
||||
implementation.client_secret,
|
||||
)
|
||||
|
||||
|
||||
async def new_subscriber(
|
||||
hass: HomeAssistant,
|
||||
entry: NestConfigEntry,
|
||||
auth: AbstractAuth,
|
||||
) -> GoogleNestSubscriber:
|
||||
"""Create a GoogleNestSubscriber."""
|
||||
if (subscription_name := entry.data.get(CONF_SUBSCRIPTION_NAME)) is None:
|
||||
subscription_name = entry.data[CONF_SUBSCRIBER_ID]
|
||||
return GoogleNestSubscriber(auth, entry.data[CONF_PROJECT_ID], subscription_name)
|
||||
|
||||
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nest",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["google_nest_sdm"],
|
||||
"requirements": ["google-nest-sdm==7.1.1"]
|
||||
"requirements": ["google-nest-sdm==7.1.3"]
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ set_preset_mode_with_end_datetime:
|
||||
select:
|
||||
options:
|
||||
- "away"
|
||||
- "Frost Guard"
|
||||
- "frost_guard"
|
||||
end_datetime:
|
||||
required: true
|
||||
example: '"2019-04-20 05:04:20"'
|
||||
|
||||
@@ -41,7 +41,7 @@ ATTR_CURRENCY = "currency"
|
||||
SERVICE_GET_PRICES_FOR_DATE = "get_prices_for_date"
|
||||
SERVICE_GET_PRICES_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Required(ATTR_DATE): cv.date,
|
||||
vol.Optional(ATTR_AREAS): vol.All(vol.In(list(AREAS)), cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_CURRENCY): vol.All(
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["ohme==1.2.8"]
|
||||
"requirements": ["ohme==1.2.9"]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from html import unescape
|
||||
from json import dumps, loads
|
||||
import logging
|
||||
from typing import cast
|
||||
|
||||
@@ -13,6 +15,7 @@ from onedrive_personal_sdk.exceptions import (
|
||||
HttpRequestException,
|
||||
OneDriveException,
|
||||
)
|
||||
from onedrive_personal_sdk.models.items import ItemUpdate
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
@@ -45,7 +48,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) -> bool:
|
||||
"""Set up OneDrive from a config entry."""
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
|
||||
session = OAuth2Session(hass, entry, implementation)
|
||||
|
||||
async def get_access_token() -> str:
|
||||
@@ -89,6 +91,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) ->
|
||||
backup_folder_id=backup_folder.id,
|
||||
)
|
||||
|
||||
try:
|
||||
await _migrate_backup_files(client, backup_folder.id)
|
||||
except OneDriveException as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_to_migrate_files",
|
||||
) from err
|
||||
|
||||
_async_notify_backup_listeners_soon(hass)
|
||||
|
||||
return True
|
||||
@@ -108,3 +118,34 @@ def _async_notify_backup_listeners(hass: HomeAssistant) -> None:
|
||||
@callback
|
||||
def _async_notify_backup_listeners_soon(hass: HomeAssistant) -> None:
|
||||
hass.loop.call_soon(_async_notify_backup_listeners, hass)
|
||||
|
||||
|
||||
async def _migrate_backup_files(client: OneDriveClient, backup_folder_id: str) -> None:
|
||||
"""Migrate backup files to metadata version 2."""
|
||||
files = await client.list_drive_items(backup_folder_id)
|
||||
for file in files:
|
||||
if file.description and '"metadata_version": 1' in (
|
||||
metadata_json := unescape(file.description)
|
||||
):
|
||||
metadata = loads(metadata_json)
|
||||
del metadata["metadata_version"]
|
||||
metadata_filename = file.name.rsplit(".", 1)[0] + ".metadata.json"
|
||||
metadata_file = await client.upload_file(
|
||||
backup_folder_id,
|
||||
metadata_filename,
|
||||
dumps(metadata),
|
||||
)
|
||||
metadata_description = {
|
||||
"metadata_version": 2,
|
||||
"backup_id": metadata["backup_id"],
|
||||
"backup_file_id": file.id,
|
||||
}
|
||||
await client.update_drive_item(
|
||||
path_or_id=metadata_file.id,
|
||||
data=ItemUpdate(description=dumps(metadata_description)),
|
||||
)
|
||||
await client.update_drive_item(
|
||||
path_or_id=file.id,
|
||||
data=ItemUpdate(description=""),
|
||||
)
|
||||
_LOGGER.debug("Migrated backup file %s", file.name)
|
||||
|
||||
@@ -4,8 +4,8 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from functools import wraps
|
||||
import html
|
||||
import json
|
||||
from html import unescape
|
||||
from json import dumps, loads
|
||||
import logging
|
||||
from typing import Any, Concatenate
|
||||
|
||||
@@ -34,6 +34,7 @@ from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
UPLOAD_CHUNK_SIZE = 16 * 320 * 1024 # 5.2MB
|
||||
TIMEOUT = ClientTimeout(connect=10, total=43200) # 12 hours
|
||||
METADATA_VERSION = 2
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
@@ -120,11 +121,19 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
self, backup_id: str, **kwargs: Any
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file."""
|
||||
item = await self._find_item_by_backup_id(backup_id)
|
||||
if item is None:
|
||||
metadata_item = await self._find_item_by_backup_id(backup_id)
|
||||
if (
|
||||
metadata_item is None
|
||||
or metadata_item.description is None
|
||||
or "backup_file_id" not in metadata_item.description
|
||||
):
|
||||
raise BackupAgentError("Backup not found")
|
||||
|
||||
stream = await self._client.download_drive_item(item.id, timeout=TIMEOUT)
|
||||
metadata_info = loads(unescape(metadata_item.description))
|
||||
|
||||
stream = await self._client.download_drive_item(
|
||||
metadata_info["backup_file_id"], timeout=TIMEOUT
|
||||
)
|
||||
return stream.iter_chunked(1024)
|
||||
|
||||
@handle_backup_errors
|
||||
@@ -136,15 +145,15 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
|
||||
filename = suggested_filename(backup)
|
||||
file = FileInfo(
|
||||
suggested_filename(backup),
|
||||
filename,
|
||||
backup.size,
|
||||
self._folder_id,
|
||||
await open_stream(),
|
||||
)
|
||||
try:
|
||||
item = await LargeFileUploadClient.upload(
|
||||
backup_file = await LargeFileUploadClient.upload(
|
||||
self._token_function, file, session=async_get_clientsession(self._hass)
|
||||
)
|
||||
except HashMismatchError as err:
|
||||
@@ -152,15 +161,25 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
"Hash validation failed, backup file might be corrupt"
|
||||
) from err
|
||||
|
||||
# store metadata in description
|
||||
backup_dict = backup.as_dict()
|
||||
backup_dict["metadata_version"] = 1 # version of the backup metadata
|
||||
description = json.dumps(backup_dict)
|
||||
# store metadata in metadata file
|
||||
description = dumps(backup.as_dict())
|
||||
_LOGGER.debug("Creating metadata: %s", description)
|
||||
metadata_filename = filename.rsplit(".", 1)[0] + ".metadata.json"
|
||||
metadata_file = await self._client.upload_file(
|
||||
self._folder_id,
|
||||
metadata_filename,
|
||||
description,
|
||||
)
|
||||
|
||||
# add metadata to the metadata file
|
||||
metadata_description = {
|
||||
"metadata_version": METADATA_VERSION,
|
||||
"backup_id": backup.backup_id,
|
||||
"backup_file_id": backup_file.id,
|
||||
}
|
||||
await self._client.update_drive_item(
|
||||
path_or_id=item.id,
|
||||
data=ItemUpdate(description=description),
|
||||
path_or_id=metadata_file.id,
|
||||
data=ItemUpdate(description=dumps(metadata_description)),
|
||||
)
|
||||
|
||||
@handle_backup_errors
|
||||
@@ -170,18 +189,28 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Delete a backup file."""
|
||||
item = await self._find_item_by_backup_id(backup_id)
|
||||
if item is None:
|
||||
metadata_item = await self._find_item_by_backup_id(backup_id)
|
||||
if (
|
||||
metadata_item is None
|
||||
or metadata_item.description is None
|
||||
or "backup_file_id" not in metadata_item.description
|
||||
):
|
||||
return
|
||||
await self._client.delete_drive_item(item.id)
|
||||
metadata_info = loads(unescape(metadata_item.description))
|
||||
|
||||
await self._client.delete_drive_item(metadata_info["backup_file_id"])
|
||||
await self._client.delete_drive_item(metadata_item.id)
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
items = await self._client.list_drive_items(self._folder_id)
|
||||
return [
|
||||
self._backup_from_description(item.description)
|
||||
for item in await self._client.list_drive_items(self._folder_id)
|
||||
if item.description and "homeassistant_version" in item.description
|
||||
await self._download_backup_metadata(item.id)
|
||||
for item in items
|
||||
if item.description
|
||||
and "backup_id" in item.description
|
||||
and f'"metadata_version": {METADATA_VERSION}' in unescape(item.description)
|
||||
]
|
||||
|
||||
@handle_backup_errors
|
||||
@@ -189,19 +218,11 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
self, backup_id: str, **kwargs: Any
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
item = await self._find_item_by_backup_id(backup_id)
|
||||
return (
|
||||
self._backup_from_description(item.description)
|
||||
if item and item.description
|
||||
else None
|
||||
)
|
||||
metadata_file = await self._find_item_by_backup_id(backup_id)
|
||||
if metadata_file is None or metadata_file.description is None:
|
||||
return None
|
||||
|
||||
def _backup_from_description(self, description: str) -> AgentBackup:
|
||||
"""Create a backup object from a description."""
|
||||
description = html.unescape(
|
||||
description
|
||||
) # OneDrive encodes the description on save automatically
|
||||
return AgentBackup.from_dict(json.loads(description))
|
||||
return await self._download_backup_metadata(metadata_file.id)
|
||||
|
||||
async def _find_item_by_backup_id(self, backup_id: str) -> File | Folder | None:
|
||||
"""Find an item by backup ID."""
|
||||
@@ -209,7 +230,15 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
(
|
||||
item
|
||||
for item in await self._client.list_drive_items(self._folder_id)
|
||||
if item.description and backup_id in item.description
|
||||
if item.description
|
||||
and backup_id in item.description
|
||||
and f'"metadata_version": {METADATA_VERSION}'
|
||||
in unescape(item.description)
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
async def _download_backup_metadata(self, item_id: str) -> AgentBackup:
|
||||
metadata_stream = await self._client.download_drive_item(item_id)
|
||||
metadata_json = loads(await metadata_stream.read())
|
||||
return AgentBackup.from_dict(metadata_json)
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["onedrive_personal_sdk"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["onedrive-personal-sdk==0.0.8"]
|
||||
"requirements": ["onedrive-personal-sdk==0.0.10"]
|
||||
}
|
||||
|
||||
@@ -35,6 +35,9 @@
|
||||
},
|
||||
"failed_to_get_folder": {
|
||||
"message": "Failed to get {folder} folder"
|
||||
},
|
||||
"failed_to_migrate_files": {
|
||||
"message": "Failed to migrate metadata to separate files"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -92,7 +92,7 @@ SUPPORT_ONKYO = (
|
||||
DEFAULT_PLAYABLE_SOURCES = (
|
||||
InputSource.from_meaning("FM"),
|
||||
InputSource.from_meaning("AM"),
|
||||
InputSource.from_meaning("TUNER"),
|
||||
InputSource.from_meaning("DAB"),
|
||||
)
|
||||
|
||||
ATTR_PRESET = "preset"
|
||||
|
||||
@@ -235,7 +235,7 @@ class ONVIFDevice:
|
||||
LOGGER.debug("%s: Retrieving current device date/time", self.name)
|
||||
try:
|
||||
device_time = await device_mgmt.GetSystemDateAndTime()
|
||||
except RequestError as err:
|
||||
except (RequestError, Fault) as err:
|
||||
LOGGER.warning(
|
||||
"Couldn't get device '%s' date/time. Error: %s", self.name, err
|
||||
)
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import date
|
||||
|
||||
from opower import Forecast, MeterType, UnitOfMeasure
|
||||
|
||||
@@ -29,7 +30,7 @@ from .coordinator import OpowerCoordinator
|
||||
class OpowerEntityDescription(SensorEntityDescription):
|
||||
"""Class describing Opower sensors entities."""
|
||||
|
||||
value_fn: Callable[[Forecast], str | float]
|
||||
value_fn: Callable[[Forecast], str | float | date]
|
||||
|
||||
|
||||
# suggested_display_precision=0 for all sensors since
|
||||
@@ -97,7 +98,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: str(data.start_date),
|
||||
value_fn=lambda data: data.start_date,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="elec_end_date",
|
||||
@@ -105,7 +106,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: str(data.end_date),
|
||||
value_fn=lambda data: data.end_date,
|
||||
),
|
||||
)
|
||||
GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
@@ -169,7 +170,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: str(data.start_date),
|
||||
value_fn=lambda data: data.start_date,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="gas_end_date",
|
||||
@@ -177,7 +178,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: str(data.end_date),
|
||||
value_fn=lambda data: data.end_date,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -247,7 +248,7 @@ class OpowerSensor(CoordinatorEntity[OpowerCoordinator], SensorEntity):
|
||||
self.utility_account_id = utility_account_id
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
def native_value(self) -> StateType | date:
|
||||
"""Return the state."""
|
||||
if self.coordinator.data is not None:
|
||||
return self.entity_description.value_fn(
|
||||
|
||||
@@ -27,7 +27,7 @@ REGISTERED_NOTIFICATIONS = (
|
||||
JSON_PAYLOAD = (
|
||||
'"{\\"notification_type\\":\\"{{notification_type}}\\",\\"subject\\":\\"{{subject}'
|
||||
'}\\",\\"message\\":\\"{{message}}\\",\\"image\\":\\"{{image}}\\",\\"{{media}}\\":'
|
||||
'{\\"media_type\\":\\"{{media_type}}\\",\\"tmdb_idd\\":\\"{{media_tmdbid}}\\",\\"t'
|
||||
'{\\"media_type\\":\\"{{media_type}}\\",\\"tmdb_id\\":\\"{{media_tmdbid}}\\",\\"t'
|
||||
'vdb_id\\":\\"{{media_tvdbid}}\\",\\"status\\":\\"{{media_status}}\\",\\"status4k'
|
||||
'\\":\\"{{media_status4k}}\\"},\\"{{request}}\\":{\\"request_id\\":\\"{{request_id'
|
||||
'}}\\",\\"requested_by_email\\":\\"{{requestedBy_email}}\\",\\"requested_by_userna'
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/prosegur",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyprosegur"],
|
||||
"requirements": ["pyprosegur==0.0.9"]
|
||||
"requirements": ["pyprosegur==0.0.13"]
|
||||
}
|
||||
|
||||
@@ -103,10 +103,10 @@ class ReolinkHostCoordinatorEntity(CoordinatorEntity[DataUpdateCoordinator[None]
|
||||
"""Handle incoming TCP push event."""
|
||||
self.async_write_ha_state()
|
||||
|
||||
def register_callback(self, unique_id: str, cmd_id: int) -> None:
|
||||
def register_callback(self, callback_id: str, cmd_id: int) -> None:
|
||||
"""Register callback for TCP push events."""
|
||||
self._host.api.baichuan.register_callback( # pragma: no cover
|
||||
unique_id, self._push_callback, cmd_id
|
||||
callback_id, self._push_callback, cmd_id
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
@@ -114,23 +114,25 @@ class ReolinkHostCoordinatorEntity(CoordinatorEntity[DataUpdateCoordinator[None]
|
||||
await super().async_added_to_hass()
|
||||
cmd_key = self.entity_description.cmd_key
|
||||
cmd_id = self.entity_description.cmd_id
|
||||
callback_id = f"{self.platform.domain}_{self._attr_unique_id}"
|
||||
if cmd_key is not None:
|
||||
self._host.async_register_update_cmd(cmd_key)
|
||||
if cmd_id is not None:
|
||||
self.register_callback(self._attr_unique_id, cmd_id)
|
||||
self.register_callback(callback_id, cmd_id)
|
||||
# Privacy mode
|
||||
self.register_callback(f"{self._attr_unique_id}_623", 623)
|
||||
self.register_callback(f"{callback_id}_623", 623)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Entity removed."""
|
||||
cmd_key = self.entity_description.cmd_key
|
||||
cmd_id = self.entity_description.cmd_id
|
||||
callback_id = f"{self.platform.domain}_{self._attr_unique_id}"
|
||||
if cmd_key is not None:
|
||||
self._host.async_unregister_update_cmd(cmd_key)
|
||||
if cmd_id is not None:
|
||||
self._host.api.baichuan.unregister_callback(self._attr_unique_id)
|
||||
self._host.api.baichuan.unregister_callback(callback_id)
|
||||
# Privacy mode
|
||||
self._host.api.baichuan.unregister_callback(f"{self._attr_unique_id}_623")
|
||||
self._host.api.baichuan.unregister_callback(f"{callback_id}_623")
|
||||
|
||||
await super().async_will_remove_from_hass()
|
||||
|
||||
@@ -189,10 +191,10 @@ class ReolinkChannelCoordinatorEntity(ReolinkHostCoordinatorEntity):
|
||||
"""Return True if entity is available."""
|
||||
return super().available and self._host.api.camera_online(self._channel)
|
||||
|
||||
def register_callback(self, unique_id: str, cmd_id: int) -> None:
|
||||
def register_callback(self, callback_id: str, cmd_id: int) -> None:
|
||||
"""Register callback for TCP push events."""
|
||||
self._host.api.baichuan.register_callback(
|
||||
unique_id, self._push_callback, cmd_id, self._channel
|
||||
callback_id, self._push_callback, cmd_id, self._channel
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.11.9"]
|
||||
"requirements": ["reolink-aio==0.12.0"]
|
||||
}
|
||||
|
||||
@@ -71,7 +71,7 @@ class ReolinkVODMediaSource(MediaSource):
|
||||
host = get_host(self.hass, config_entry_id)
|
||||
|
||||
def get_vod_type() -> VodRequestType:
|
||||
if filename.endswith(".mp4"):
|
||||
if filename.endswith((".mp4", ".vref")):
|
||||
if host.api.is_nvr:
|
||||
return VodRequestType.DOWNLOAD
|
||||
return VodRequestType.PLAYBACK
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rympro",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["pyrympro==0.0.8"]
|
||||
"requirements": ["pyrympro==0.0.9"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/sentry",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["sentry-sdk==1.40.3"]
|
||||
"requirements": ["sentry-sdk==1.45.1"]
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"delivered": {
|
||||
"default": "mdi:package"
|
||||
},
|
||||
"returned": {
|
||||
"alert": {
|
||||
"default": "mdi:package"
|
||||
},
|
||||
"package": {
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyseventeentrack"],
|
||||
"requirements": ["pyseventeentrack==1.0.1"]
|
||||
"requirements": ["pyseventeentrack==1.0.2"]
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ get_packages:
|
||||
- "ready_to_be_picked_up"
|
||||
- "undelivered"
|
||||
- "delivered"
|
||||
- "returned"
|
||||
- "alert"
|
||||
translation_key: package_state
|
||||
config_entry_id:
|
||||
required: true
|
||||
|
||||
@@ -57,8 +57,8 @@
|
||||
"delivered": {
|
||||
"name": "Delivered"
|
||||
},
|
||||
"returned": {
|
||||
"name": "Returned"
|
||||
"alert": {
|
||||
"name": "Alert"
|
||||
},
|
||||
"package": {
|
||||
"name": "Package {name}"
|
||||
@@ -104,7 +104,7 @@
|
||||
"ready_to_be_picked_up": "[%key:component::seventeentrack::entity::sensor::ready_to_be_picked_up::name%]",
|
||||
"undelivered": "[%key:component::seventeentrack::entity::sensor::undelivered::name%]",
|
||||
"delivered": "[%key:component::seventeentrack::entity::sensor::delivered::name%]",
|
||||
"returned": "[%key:component::seventeentrack::entity::sensor::returned::name%]"
|
||||
"alert": "[%key:component::seventeentrack::entity::sensor::alert::name%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioshelly"],
|
||||
"requirements": ["aioshelly==12.3.2"],
|
||||
"requirements": ["aioshelly==12.4.2"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_http._tcp.local.",
|
||||
|
||||
@@ -139,6 +139,24 @@ class RpcBluTrvNumber(RpcNumber):
|
||||
)
|
||||
|
||||
|
||||
class RpcBluTrvExtTempNumber(RpcBluTrvNumber):
|
||||
"""Represent a RPC BluTrv External Temperature number."""
|
||||
|
||||
_reported_value: float | None = None
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return value of number."""
|
||||
return self._reported_value
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Change the value."""
|
||||
await super().async_set_native_value(value)
|
||||
|
||||
self._reported_value = value
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
NUMBERS: dict[tuple[str, str], BlockNumberDescription] = {
|
||||
("device", "valvePos"): BlockNumberDescription(
|
||||
key="device|valvepos",
|
||||
@@ -175,7 +193,7 @@ RPC_NUMBERS: Final = {
|
||||
"method": "Trv.SetExternalTemperature",
|
||||
"params": {"id": 0, "t_C": value},
|
||||
},
|
||||
entity_class=RpcBluTrvNumber,
|
||||
entity_class=RpcBluTrvExtTempNumber,
|
||||
),
|
||||
"number": RpcNumberDescription(
|
||||
key="number",
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pymodbus", "pysmarty2"],
|
||||
"requirements": ["pysmarty2==0.10.1"]
|
||||
"requirements": ["pysmarty2==0.10.2"]
|
||||
}
|
||||
|
||||
@@ -170,6 +170,7 @@ MODELS_TV_ONLY = (
|
||||
"BEAM",
|
||||
"PLAYBAR",
|
||||
"PLAYBASE",
|
||||
"ULTRA",
|
||||
)
|
||||
MODELS_LINEIN_AND_TV = ("AMP",)
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/synology_dsm",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["synology_dsm"],
|
||||
"requirements": ["py-synologydsm-api==2.6.0"],
|
||||
"requirements": ["py-synologydsm-api==2.6.3"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Synology",
|
||||
|
||||
@@ -175,6 +175,7 @@ BASE_SERVICE_SCHEMA = vol.Schema(
|
||||
vol.Optional(ATTR_KEYBOARD_INLINE): cv.ensure_list,
|
||||
vol.Optional(ATTR_TIMEOUT): cv.positive_int,
|
||||
vol.Optional(ATTR_MESSAGE_TAG): cv.string,
|
||||
vol.Optional(ATTR_MESSAGE_THREAD_ID): vol.Coerce(int),
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
@@ -216,6 +217,7 @@ SERVICE_SCHEMA_SEND_POLL = vol.Schema(
|
||||
vol.Optional(ATTR_ALLOWS_MULTIPLE_ANSWERS, default=False): cv.boolean,
|
||||
vol.Optional(ATTR_DISABLE_NOTIF): cv.boolean,
|
||||
vol.Optional(ATTR_TIMEOUT): cv.positive_int,
|
||||
vol.Optional(ATTR_MESSAGE_THREAD_ID): vol.Coerce(int),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -754,7 +756,8 @@ class TelegramNotificationService:
|
||||
message_thread_id=params[ATTR_MESSAGE_THREAD_ID],
|
||||
context=context,
|
||||
)
|
||||
msg_ids[chat_id] = msg.id
|
||||
if msg is not None:
|
||||
msg_ids[chat_id] = msg.id
|
||||
return msg_ids
|
||||
|
||||
async def delete_message(self, chat_id=None, context=None, **kwargs):
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/tesla_fleet",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tesla-fleet-api"],
|
||||
"requirements": ["tesla-fleet-api==0.9.8"]
|
||||
"requirements": ["tesla-fleet-api==0.9.10"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/teslemetry",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tesla-fleet-api"],
|
||||
"requirements": ["tesla-fleet-api==0.9.8", "teslemetry-stream==0.6.6"]
|
||||
"requirements": ["tesla-fleet-api==0.9.10", "teslemetry-stream==0.6.6"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/tessie",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tessie", "tesla-fleet-api"],
|
||||
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.9.8"]
|
||||
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.9.10"]
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import logging
|
||||
from kasa import AuthenticationError, Credentials, Device, KasaException
|
||||
from kasa.iot import IotStrip
|
||||
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
@@ -123,11 +124,14 @@ class TPLinkDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
def get_child_coordinator(
|
||||
self,
|
||||
child: Device,
|
||||
platform_domain: str,
|
||||
) -> TPLinkDataUpdateCoordinator:
|
||||
"""Get separate child coordinator for a device or self if not needed."""
|
||||
# The iot HS300 allows a limited number of concurrent requests and fetching the
|
||||
# emeter information requires separate ones so create child coordinators here.
|
||||
if isinstance(self.device, IotStrip):
|
||||
# This does not happen for switches as the state is available on the
|
||||
# parent device info.
|
||||
if isinstance(self.device, IotStrip) and platform_domain != SWITCH_DOMAIN:
|
||||
if not (child_coordinator := self._child_coordinators.get(child.device_id)):
|
||||
# The child coordinators only update energy data so we can
|
||||
# set a longer update interval to avoid flooding the device
|
||||
|
||||
@@ -508,7 +508,9 @@ class CoordinatedTPLinkFeatureEntity(CoordinatedTPLinkEntity, ABC):
|
||||
)
|
||||
|
||||
for child in children:
|
||||
child_coordinator = coordinator.get_child_coordinator(child)
|
||||
child_coordinator = coordinator.get_child_coordinator(
|
||||
child, platform_domain
|
||||
)
|
||||
|
||||
child_entities = cls._entities_for_device(
|
||||
hass,
|
||||
@@ -651,7 +653,9 @@ class CoordinatedTPLinkModuleEntity(CoordinatedTPLinkEntity, ABC):
|
||||
device.host,
|
||||
)
|
||||
for child in children:
|
||||
child_coordinator = coordinator.get_child_coordinator(child)
|
||||
child_coordinator = coordinator.get_child_coordinator(
|
||||
child, platform_domain
|
||||
)
|
||||
|
||||
child_entities: list[_E] = cls._entities_for_device(
|
||||
hass,
|
||||
|
||||
@@ -301,5 +301,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["kasa"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-kasa[speedups]==0.10.1"]
|
||||
"requirements": ["python-kasa[speedups]==0.10.2"]
|
||||
}
|
||||
|
||||
@@ -78,7 +78,9 @@ MIGRATION_NAME_TO_KEY = {
|
||||
|
||||
SERVICE_BASE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ENTRY_ID): selector.ConfigEntrySelector(),
|
||||
vol.Required(CONF_ENTRY_ID): selector.ConfigEntrySelector(
|
||||
{"integration": DOMAIN}
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -256,7 +256,7 @@ BINARY_SENSORS: dict[str, tuple[TuyaBinarySensorEntityDescription, ...]] = {
|
||||
TuyaBinarySensorEntityDescription(
|
||||
key=DPCode.WATERSENSOR_STATE,
|
||||
device_class=BinarySensorDeviceClass.MOISTURE,
|
||||
on_value="alarm",
|
||||
on_value={"1", "alarm"},
|
||||
),
|
||||
TAMPER_BINARY_SENSOR,
|
||||
),
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/vesync",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyvesync"],
|
||||
"requirements": ["pyvesync==2.1.17"]
|
||||
"requirements": ["pyvesync==2.1.18"]
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user