forked from home-assistant/core
Compare commits
115 Commits
2025.5.0b1
...
2025.5.3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3e6473d130 | ||
|
|
9a183bc16a | ||
|
|
e540247c14 | ||
|
|
0aef8b58d8 | ||
|
|
f0501f917b | ||
|
|
97004e13cb | ||
|
|
f867a0af24 | ||
|
|
d3b3839ffa | ||
|
|
1a227d6a10 | ||
|
|
fc8c403a3a | ||
|
|
c1bf596eba | ||
|
|
63f69a9e3d | ||
|
|
e13b014b6f | ||
|
|
be0d4d926c | ||
|
|
2403fff81f | ||
|
|
8c475787cc | ||
|
|
d9fe1edd82 | ||
|
|
f5cf64700a | ||
|
|
777b04d7a5 | ||
|
|
9fc78ed4e2 | ||
|
|
d03af549d4 | ||
|
|
d91f01243c | ||
|
|
5094208db6 | ||
|
|
006f66a841 | ||
|
|
64b7d77840 | ||
|
|
abf6a809b8 | ||
|
|
1b7dd205c7 | ||
|
|
3e00366a61 | ||
|
|
a17275b559 | ||
|
|
9534a919ce | ||
|
|
422dbfef88 | ||
|
|
8e44684a61 | ||
|
|
642e7fd487 | ||
|
|
9bb9132e7b | ||
|
|
41be82f167 | ||
|
|
47140e14d9 | ||
|
|
926502b0f1 | ||
|
|
78351ff7a7 | ||
|
|
c333726867 | ||
|
|
f66feabaaf | ||
|
|
0ef098a9f3 | ||
|
|
02b028add3 | ||
|
|
34455f9743 | ||
|
|
8c4eec231f | ||
|
|
621a14d7cc | ||
|
|
4906e78a5c | ||
|
|
146e440d59 | ||
|
|
e2ede3ed19 | ||
|
|
b76ac68fb1 | ||
|
|
0691ad9362 | ||
|
|
715f116954 | ||
|
|
9f0db98745 | ||
|
|
0ba55c31e8 | ||
|
|
19b7cfbd4a | ||
|
|
a9520888cf | ||
|
|
f086f4a955 | ||
|
|
a657964c25 | ||
|
|
543104b36c | ||
|
|
bf1d2069e4 | ||
|
|
e5e1c9fb05 | ||
|
|
4c4be88323 | ||
|
|
5a83627dc5 | ||
|
|
3123a7b168 | ||
|
|
8161ce6ea8 | ||
|
|
d9cbd1b65f | ||
|
|
b7c07209b8 | ||
|
|
6c3a4f17f0 | ||
|
|
d82feb807f | ||
|
|
c373fa9296 | ||
|
|
139b48440f | ||
|
|
9de1d3b143 | ||
|
|
b69ebdaecb | ||
|
|
f25e50b017 | ||
|
|
a4a7601f9f | ||
|
|
41a503f76f | ||
|
|
f1a3d62db2 | ||
|
|
e465276464 | ||
|
|
47b45444eb | ||
|
|
cf0911cc56 | ||
|
|
da79d5b2e3 | ||
|
|
358b0c1c17 | ||
|
|
543348fe58 | ||
|
|
0635856761 | ||
|
|
081afe6034 | ||
|
|
ca14322227 | ||
|
|
a54816a6e5 | ||
|
|
27db4e90b5 | ||
|
|
e9cc624d93 | ||
|
|
5a95f43992 | ||
|
|
36a35132c0 | ||
|
|
2fbc75f89b | ||
|
|
48aa6be889 | ||
|
|
bde04bc47b | ||
|
|
7d163aa659 | ||
|
|
010b044379 | ||
|
|
00627b82e0 | ||
|
|
13aba6201e | ||
|
|
f392e0c1c7 | ||
|
|
181eca6c82 | ||
|
|
196d923ac6 | ||
|
|
4ad387c967 | ||
|
|
cb475bf153 | ||
|
|
47acceea08 | ||
|
|
fd6fb7e3bc | ||
|
|
30f7e9b441 | ||
|
|
a8beec2691 | ||
|
|
23244fb79f | ||
|
|
e5c56629e2 | ||
|
|
a793503c8a | ||
|
|
054c7a0adc | ||
|
|
6eb2d1aa7c | ||
|
|
619fdea5df | ||
|
|
e8bdc7286e | ||
|
|
18f2b120ef | ||
|
|
43d8345821 |
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyaprilaire"],
|
||||
"requirements": ["pyaprilaire==0.8.1"]
|
||||
"requirements": ["pyaprilaire==0.9.0"]
|
||||
}
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
from aiohttp import ClientTimeout
|
||||
from azure.core.exceptions import (
|
||||
AzureError,
|
||||
ClientAuthenticationError,
|
||||
HttpResponseError,
|
||||
ResourceNotFoundError,
|
||||
)
|
||||
from azure.core.pipeline.transport._aiohttp import (
|
||||
@@ -39,11 +39,20 @@ async def async_setup_entry(
|
||||
session = async_create_clientsession(
|
||||
hass, timeout=ClientTimeout(connect=10, total=12 * 60 * 60)
|
||||
)
|
||||
container_client = ContainerClient(
|
||||
account_url=f"https://{entry.data[CONF_ACCOUNT_NAME]}.blob.core.windows.net/",
|
||||
container_name=entry.data[CONF_CONTAINER_NAME],
|
||||
credential=entry.data[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=session),
|
||||
|
||||
def create_container_client() -> ContainerClient:
|
||||
"""Create a ContainerClient."""
|
||||
|
||||
return ContainerClient(
|
||||
account_url=f"https://{entry.data[CONF_ACCOUNT_NAME]}.blob.core.windows.net/",
|
||||
container_name=entry.data[CONF_CONTAINER_NAME],
|
||||
credential=entry.data[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=session),
|
||||
)
|
||||
|
||||
# has a blocking call to open in cpython
|
||||
container_client: ContainerClient = await hass.async_add_executor_job(
|
||||
create_container_client
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -61,7 +70,7 @@ async def async_setup_entry(
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
||||
) from err
|
||||
except HttpResponseError as err:
|
||||
except AzureError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
|
||||
@@ -8,7 +8,7 @@ import json
|
||||
import logging
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from azure.core.exceptions import HttpResponseError
|
||||
from azure.core.exceptions import AzureError, HttpResponseError, ServiceRequestError
|
||||
from azure.storage.blob import BlobProperties
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
@@ -80,6 +80,20 @@ def handle_backup_errors[_R, **P](
|
||||
f"Error during backup operation in {func.__name__}:"
|
||||
f" Status {err.status_code}, message: {err.message}"
|
||||
) from err
|
||||
except ServiceRequestError as err:
|
||||
raise BackupAgentError(
|
||||
f"Timeout during backup operation in {func.__name__}"
|
||||
) from err
|
||||
except AzureError as err:
|
||||
_LOGGER.debug(
|
||||
"Error during backup in %s: %s",
|
||||
func.__name__,
|
||||
err,
|
||||
exc_info=True,
|
||||
)
|
||||
raise BackupAgentError(
|
||||
f"Error during backup operation in {func.__name__}: {err}"
|
||||
) from err
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
@@ -27,9 +27,25 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for azure storage."""
|
||||
|
||||
def get_account_url(self, account_name: str) -> str:
|
||||
"""Get the account URL."""
|
||||
return f"https://{account_name}.blob.core.windows.net/"
|
||||
async def get_container_client(
|
||||
self, account_name: str, container_name: str, storage_account_key: str
|
||||
) -> ContainerClient:
|
||||
"""Get the container client.
|
||||
|
||||
ContainerClient has a blocking call to open in cpython
|
||||
"""
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
|
||||
def create_container_client() -> ContainerClient:
|
||||
return ContainerClient(
|
||||
account_url=f"https://{account_name}.blob.core.windows.net/",
|
||||
container_name=container_name,
|
||||
credential=storage_account_key,
|
||||
transport=AioHttpTransport(session=session),
|
||||
)
|
||||
|
||||
return await self.hass.async_add_executor_job(create_container_client)
|
||||
|
||||
async def validate_config(
|
||||
self, container_client: ContainerClient
|
||||
@@ -58,11 +74,10 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._async_abort_entries_match(
|
||||
{CONF_ACCOUNT_NAME: user_input[CONF_ACCOUNT_NAME]}
|
||||
)
|
||||
container_client = ContainerClient(
|
||||
account_url=self.get_account_url(user_input[CONF_ACCOUNT_NAME]),
|
||||
container_client = await self.get_container_client(
|
||||
account_name=user_input[CONF_ACCOUNT_NAME],
|
||||
container_name=user_input[CONF_CONTAINER_NAME],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
storage_account_key=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
)
|
||||
errors = await self.validate_config(container_client)
|
||||
|
||||
@@ -99,12 +114,12 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
|
||||
if user_input is not None:
|
||||
container_client = ContainerClient(
|
||||
account_url=self.get_account_url(reauth_entry.data[CONF_ACCOUNT_NAME]),
|
||||
container_client = await self.get_container_client(
|
||||
account_name=reauth_entry.data[CONF_ACCOUNT_NAME],
|
||||
container_name=reauth_entry.data[CONF_CONTAINER_NAME],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
storage_account_key=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
)
|
||||
|
||||
errors = await self.validate_config(container_client)
|
||||
if not errors:
|
||||
return self.async_update_reload_and_abort(
|
||||
@@ -129,13 +144,10 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input is not None:
|
||||
container_client = ContainerClient(
|
||||
account_url=self.get_account_url(
|
||||
reconfigure_entry.data[CONF_ACCOUNT_NAME]
|
||||
),
|
||||
container_client = await self.get_container_client(
|
||||
account_name=reconfigure_entry.data[CONF_ACCOUNT_NAME],
|
||||
container_name=user_input[CONF_CONTAINER_NAME],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
storage_account_key=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
)
|
||||
errors = await self.validate_config(container_client)
|
||||
if not errors:
|
||||
|
||||
@@ -22,7 +22,7 @@ from . import util
|
||||
from .agent import BackupAgent
|
||||
from .const import DATA_MANAGER
|
||||
from .manager import BackupManager
|
||||
from .models import BackupNotFound
|
||||
from .models import AgentBackup, BackupNotFound
|
||||
|
||||
|
||||
@callback
|
||||
@@ -85,7 +85,15 @@ class DownloadBackupView(HomeAssistantView):
|
||||
request, headers, backup_id, agent_id, agent, manager
|
||||
)
|
||||
return await self._send_backup_with_password(
|
||||
hass, request, headers, backup_id, agent_id, password, agent, manager
|
||||
hass,
|
||||
backup,
|
||||
request,
|
||||
headers,
|
||||
backup_id,
|
||||
agent_id,
|
||||
password,
|
||||
agent,
|
||||
manager,
|
||||
)
|
||||
except BackupNotFound:
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
@@ -116,6 +124,7 @@ class DownloadBackupView(HomeAssistantView):
|
||||
async def _send_backup_with_password(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
backup: AgentBackup,
|
||||
request: Request,
|
||||
headers: dict[istr, str],
|
||||
backup_id: str,
|
||||
@@ -144,7 +153,8 @@ class DownloadBackupView(HomeAssistantView):
|
||||
|
||||
stream = util.AsyncIteratorWriter(hass)
|
||||
worker = threading.Thread(
|
||||
target=util.decrypt_backup, args=[reader, stream, password, on_done, 0, []]
|
||||
target=util.decrypt_backup,
|
||||
args=[backup, reader, stream, password, on_done, 0, []],
|
||||
)
|
||||
try:
|
||||
worker.start()
|
||||
|
||||
@@ -295,13 +295,26 @@ def validate_password_stream(
|
||||
raise BackupEmpty
|
||||
|
||||
|
||||
def _get_expected_archives(backup: AgentBackup) -> set[str]:
|
||||
"""Get the expected archives in the backup."""
|
||||
expected_archives = set()
|
||||
if backup.homeassistant_included:
|
||||
expected_archives.add("homeassistant")
|
||||
for addon in backup.addons:
|
||||
expected_archives.add(addon.slug)
|
||||
for folder in backup.folders:
|
||||
expected_archives.add(folder.value)
|
||||
return expected_archives
|
||||
|
||||
|
||||
def decrypt_backup(
|
||||
backup: AgentBackup,
|
||||
input_stream: IO[bytes],
|
||||
output_stream: IO[bytes],
|
||||
password: str | None,
|
||||
on_done: Callable[[Exception | None], None],
|
||||
minimum_size: int,
|
||||
nonces: list[bytes],
|
||||
nonces: NonceGenerator,
|
||||
) -> None:
|
||||
"""Decrypt a backup."""
|
||||
error: Exception | None = None
|
||||
@@ -315,7 +328,7 @@ def decrypt_backup(
|
||||
fileobj=output_stream, mode="w|", bufsize=BUF_SIZE
|
||||
) as output_tar,
|
||||
):
|
||||
_decrypt_backup(input_tar, output_tar, password)
|
||||
_decrypt_backup(backup, input_tar, output_tar, password)
|
||||
except (DecryptError, SecureTarError, tarfile.TarError) as err:
|
||||
LOGGER.warning("Error decrypting backup: %s", err)
|
||||
error = err
|
||||
@@ -333,15 +346,18 @@ def decrypt_backup(
|
||||
|
||||
|
||||
def _decrypt_backup(
|
||||
backup: AgentBackup,
|
||||
input_tar: tarfile.TarFile,
|
||||
output_tar: tarfile.TarFile,
|
||||
password: str | None,
|
||||
) -> None:
|
||||
"""Decrypt a backup."""
|
||||
expected_archives = _get_expected_archives(backup)
|
||||
for obj in input_tar:
|
||||
# We compare with PurePath to avoid issues with different path separators,
|
||||
# for example when backup.json is added as "./backup.json"
|
||||
if PurePath(obj.name) == PurePath("backup.json"):
|
||||
object_path = PurePath(obj.name)
|
||||
if object_path == PurePath("backup.json"):
|
||||
# Rewrite the backup.json file to indicate that the backup is decrypted
|
||||
if not (reader := input_tar.extractfile(obj)):
|
||||
raise DecryptError
|
||||
@@ -352,7 +368,13 @@ def _decrypt_backup(
|
||||
metadata_obj.size = len(updated_metadata_b)
|
||||
output_tar.addfile(metadata_obj, BytesIO(updated_metadata_b))
|
||||
continue
|
||||
if not obj.name.endswith((".tar", ".tgz", ".tar.gz")):
|
||||
prefix, _, suffix = object_path.name.partition(".")
|
||||
if suffix not in ("tar", "tgz", "tar.gz"):
|
||||
LOGGER.debug("Unknown file %s will not be decrypted", obj.name)
|
||||
output_tar.addfile(obj, input_tar.extractfile(obj))
|
||||
continue
|
||||
if prefix not in expected_archives:
|
||||
LOGGER.debug("Unknown inner tar file %s will not be decrypted", obj.name)
|
||||
output_tar.addfile(obj, input_tar.extractfile(obj))
|
||||
continue
|
||||
istf = SecureTarFile(
|
||||
@@ -371,12 +393,13 @@ def _decrypt_backup(
|
||||
|
||||
|
||||
def encrypt_backup(
|
||||
backup: AgentBackup,
|
||||
input_stream: IO[bytes],
|
||||
output_stream: IO[bytes],
|
||||
password: str | None,
|
||||
on_done: Callable[[Exception | None], None],
|
||||
minimum_size: int,
|
||||
nonces: list[bytes],
|
||||
nonces: NonceGenerator,
|
||||
) -> None:
|
||||
"""Encrypt a backup."""
|
||||
error: Exception | None = None
|
||||
@@ -390,7 +413,7 @@ def encrypt_backup(
|
||||
fileobj=output_stream, mode="w|", bufsize=BUF_SIZE
|
||||
) as output_tar,
|
||||
):
|
||||
_encrypt_backup(input_tar, output_tar, password, nonces)
|
||||
_encrypt_backup(backup, input_tar, output_tar, password, nonces)
|
||||
except (EncryptError, SecureTarError, tarfile.TarError) as err:
|
||||
LOGGER.warning("Error encrypting backup: %s", err)
|
||||
error = err
|
||||
@@ -408,17 +431,20 @@ def encrypt_backup(
|
||||
|
||||
|
||||
def _encrypt_backup(
|
||||
backup: AgentBackup,
|
||||
input_tar: tarfile.TarFile,
|
||||
output_tar: tarfile.TarFile,
|
||||
password: str | None,
|
||||
nonces: list[bytes],
|
||||
nonces: NonceGenerator,
|
||||
) -> None:
|
||||
"""Encrypt a backup."""
|
||||
inner_tar_idx = 0
|
||||
expected_archives = _get_expected_archives(backup)
|
||||
for obj in input_tar:
|
||||
# We compare with PurePath to avoid issues with different path separators,
|
||||
# for example when backup.json is added as "./backup.json"
|
||||
if PurePath(obj.name) == PurePath("backup.json"):
|
||||
object_path = PurePath(obj.name)
|
||||
if object_path == PurePath("backup.json"):
|
||||
# Rewrite the backup.json file to indicate that the backup is encrypted
|
||||
if not (reader := input_tar.extractfile(obj)):
|
||||
raise EncryptError
|
||||
@@ -429,16 +455,21 @@ def _encrypt_backup(
|
||||
metadata_obj.size = len(updated_metadata_b)
|
||||
output_tar.addfile(metadata_obj, BytesIO(updated_metadata_b))
|
||||
continue
|
||||
if not obj.name.endswith((".tar", ".tgz", ".tar.gz")):
|
||||
prefix, _, suffix = object_path.name.partition(".")
|
||||
if suffix not in ("tar", "tgz", "tar.gz"):
|
||||
LOGGER.debug("Unknown file %s will not be encrypted", obj.name)
|
||||
output_tar.addfile(obj, input_tar.extractfile(obj))
|
||||
continue
|
||||
if prefix not in expected_archives:
|
||||
LOGGER.debug("Unknown inner tar file %s will not be encrypted", obj.name)
|
||||
continue
|
||||
istf = SecureTarFile(
|
||||
None, # Not used
|
||||
gzip=False,
|
||||
key=password_to_key(password) if password is not None else None,
|
||||
mode="r",
|
||||
fileobj=input_tar.extractfile(obj),
|
||||
nonce=nonces[inner_tar_idx],
|
||||
nonce=nonces.get(inner_tar_idx),
|
||||
)
|
||||
inner_tar_idx += 1
|
||||
with istf.encrypt(obj) as encrypted:
|
||||
@@ -456,17 +487,33 @@ class _CipherWorkerStatus:
|
||||
writer: AsyncIteratorWriter
|
||||
|
||||
|
||||
class NonceGenerator:
|
||||
"""Generate nonces for encryption."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the generator."""
|
||||
self._nonces: dict[int, bytes] = {}
|
||||
|
||||
def get(self, index: int) -> bytes:
|
||||
"""Get a nonce for the given index."""
|
||||
if index not in self._nonces:
|
||||
# Generate a new nonce for the given index
|
||||
self._nonces[index] = os.urandom(16)
|
||||
return self._nonces[index]
|
||||
|
||||
|
||||
class _CipherBackupStreamer:
|
||||
"""Encrypt or decrypt a backup."""
|
||||
|
||||
_cipher_func: Callable[
|
||||
[
|
||||
AgentBackup,
|
||||
IO[bytes],
|
||||
IO[bytes],
|
||||
str | None,
|
||||
Callable[[Exception | None], None],
|
||||
int,
|
||||
list[bytes],
|
||||
NonceGenerator,
|
||||
],
|
||||
None,
|
||||
]
|
||||
@@ -484,7 +531,7 @@ class _CipherBackupStreamer:
|
||||
self._hass = hass
|
||||
self._open_stream = open_stream
|
||||
self._password = password
|
||||
self._nonces: list[bytes] = []
|
||||
self._nonces = NonceGenerator()
|
||||
|
||||
def size(self) -> int:
|
||||
"""Return the maximum size of the decrypted or encrypted backup."""
|
||||
@@ -508,7 +555,15 @@ class _CipherBackupStreamer:
|
||||
writer = AsyncIteratorWriter(self._hass)
|
||||
worker = threading.Thread(
|
||||
target=self._cipher_func,
|
||||
args=[reader, writer, self._password, on_done, self.size(), self._nonces],
|
||||
args=[
|
||||
self._backup,
|
||||
reader,
|
||||
writer,
|
||||
self._password,
|
||||
on_done,
|
||||
self.size(),
|
||||
self._nonces,
|
||||
],
|
||||
)
|
||||
worker_status = _CipherWorkerStatus(
|
||||
done=asyncio.Event(), reader=reader, thread=worker, writer=writer
|
||||
@@ -538,17 +593,6 @@ class DecryptedBackupStreamer(_CipherBackupStreamer):
|
||||
class EncryptedBackupStreamer(_CipherBackupStreamer):
|
||||
"""Encrypt a backup."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
backup: AgentBackup,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
password: str | None,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(hass, backup, open_stream, password)
|
||||
self._nonces = [os.urandom(16) for _ in range(self._num_tar_files())]
|
||||
|
||||
_cipher_func = staticmethod(encrypt_backup)
|
||||
|
||||
def backup(self) -> AgentBackup:
|
||||
|
||||
@@ -77,6 +77,5 @@ async def async_unload_entry(hass: HomeAssistant, entry: ComelitConfigEntry) ->
|
||||
coordinator = entry.runtime_data
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, platforms):
|
||||
await coordinator.api.logout()
|
||||
await coordinator.api.close()
|
||||
|
||||
return unload_ok
|
||||
|
||||
@@ -134,11 +134,9 @@ class ComelitClimateEntity(ComelitBridgeBaseEntity, ClimateEntity):
|
||||
self._attr_current_temperature = values[0] / 10
|
||||
|
||||
self._attr_hvac_action = None
|
||||
if _mode == ClimaComelitMode.OFF:
|
||||
self._attr_hvac_action = HVACAction.OFF
|
||||
if not _active:
|
||||
self._attr_hvac_action = HVACAction.IDLE
|
||||
if _mode in API_STATUS:
|
||||
elif _mode in API_STATUS:
|
||||
self._attr_hvac_action = API_STATUS[_mode]["hvac_action"]
|
||||
|
||||
self._attr_hvac_mode = None
|
||||
|
||||
@@ -73,7 +73,6 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
) from err
|
||||
finally:
|
||||
await api.logout()
|
||||
await api.close()
|
||||
|
||||
return {"title": data[CONF_HOST]}
|
||||
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aiocomelit==0.12.0"]
|
||||
"requirements": ["aiocomelit==0.12.3"]
|
||||
}
|
||||
|
||||
@@ -76,7 +76,7 @@
|
||||
"cannot_authenticate": {
|
||||
"message": "Error authenticating"
|
||||
},
|
||||
"updated_failed": {
|
||||
"update_failed": {
|
||||
"message": "Failed to update data: {error}"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.4.30"]
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.5.7"]
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"aiodhcpwatcher==1.1.1",
|
||||
"aiodiscover==2.6.1",
|
||||
"aiodiscover==2.7.0",
|
||||
"cached-ipaddress==0.10.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -68,7 +68,7 @@ async def async_validate_hostname(
|
||||
result = False
|
||||
with contextlib.suppress(DNSError):
|
||||
result = bool(
|
||||
await aiodns.DNSResolver(
|
||||
await aiodns.DNSResolver( # type: ignore[call-overload]
|
||||
nameservers=[resolver], udp_port=port, tcp_port=port
|
||||
).query(hostname, qtype)
|
||||
)
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/dnsip",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["aiodns==3.3.0"]
|
||||
"requirements": ["aiodns==3.4.0"]
|
||||
}
|
||||
|
||||
@@ -106,7 +106,7 @@ class WanIpSensor(SensorEntity):
|
||||
async def async_update(self) -> None:
|
||||
"""Get the current DNS IP address for hostname."""
|
||||
try:
|
||||
response = await self.resolver.query(self.hostname, self.querytype)
|
||||
response = await self.resolver.query(self.hostname, self.querytype) # type: ignore[call-overload]
|
||||
except DNSError as err:
|
||||
_LOGGER.warning("Exception while resolving host: %s", err)
|
||||
response = None
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==13.1.0"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==13.2.1"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,8 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Generic
|
||||
|
||||
from deebot_client.capabilities import CapabilityEvent, CapabilityLifeSpan
|
||||
from deebot_client.capabilities import CapabilityEvent, CapabilityLifeSpan, DeviceType
|
||||
from deebot_client.device import Device
|
||||
from deebot_client.events import (
|
||||
BatteryEvent,
|
||||
ErrorEvent,
|
||||
@@ -34,7 +35,7 @@ from homeassistant.const import (
|
||||
UnitOfArea,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
@@ -59,6 +60,15 @@ class EcovacsSensorEntityDescription(
|
||||
"""Ecovacs sensor entity description."""
|
||||
|
||||
value_fn: Callable[[EventT], StateType]
|
||||
native_unit_of_measurement_fn: Callable[[DeviceType], str | None] | None = None
|
||||
|
||||
|
||||
@callback
|
||||
def get_area_native_unit_of_measurement(device_type: DeviceType) -> str | None:
|
||||
"""Get the area native unit of measurement based on device type."""
|
||||
if device_type is DeviceType.MOWER:
|
||||
return UnitOfArea.SQUARE_CENTIMETERS
|
||||
return UnitOfArea.SQUARE_METERS
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = (
|
||||
@@ -68,7 +78,9 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = (
|
||||
capability_fn=lambda caps: caps.stats.clean,
|
||||
value_fn=lambda e: e.area,
|
||||
translation_key="stats_area",
|
||||
native_unit_of_measurement=UnitOfArea.SQUARE_METERS,
|
||||
device_class=SensorDeviceClass.AREA,
|
||||
native_unit_of_measurement_fn=get_area_native_unit_of_measurement,
|
||||
suggested_unit_of_measurement=UnitOfArea.SQUARE_METERS,
|
||||
),
|
||||
EcovacsSensorEntityDescription[StatsEvent](
|
||||
key="stats_time",
|
||||
@@ -85,6 +97,7 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = (
|
||||
value_fn=lambda e: e.area,
|
||||
key="total_stats_area",
|
||||
translation_key="total_stats_area",
|
||||
device_class=SensorDeviceClass.AREA,
|
||||
native_unit_of_measurement=UnitOfArea.SQUARE_METERS,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
@@ -249,6 +262,27 @@ class EcovacsSensor(
|
||||
|
||||
entity_description: EcovacsSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device: Device,
|
||||
capability: CapabilityEvent,
|
||||
entity_description: EcovacsSensorEntityDescription,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Initialize entity."""
|
||||
super().__init__(device, capability, entity_description, **kwargs)
|
||||
if (
|
||||
entity_description.native_unit_of_measurement_fn
|
||||
and (
|
||||
native_unit_of_measurement
|
||||
:= entity_description.native_unit_of_measurement_fn(
|
||||
device.capabilities.device_type
|
||||
)
|
||||
)
|
||||
is not None
|
||||
):
|
||||
self._attr_native_unit_of_measurement = native_unit_of_measurement
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Set up the event listeners now that hass is ready."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["sense_energy"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["sense-energy==0.13.7"]
|
||||
"requirements": ["sense-energy==0.13.8"]
|
||||
}
|
||||
|
||||
@@ -64,7 +64,7 @@ async def _get_fixture_collection(envoy: Envoy, serial: str) -> dict[str, Any]:
|
||||
"/ivp/ensemble/generator",
|
||||
"/ivp/meters",
|
||||
"/ivp/meters/readings",
|
||||
"/home,",
|
||||
"/home",
|
||||
]
|
||||
|
||||
for end_point in end_points:
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyenphase==1.26.0"],
|
||||
"requirements": ["pyenphase==1.26.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
||||
@@ -17,7 +17,7 @@ DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS = False
|
||||
|
||||
DEFAULT_PORT: Final = 6053
|
||||
|
||||
STABLE_BLE_VERSION_STR = "2025.2.2"
|
||||
STABLE_BLE_VERSION_STR = "2025.5.0"
|
||||
STABLE_BLE_VERSION = AwesomeVersion(STABLE_BLE_VERSION_STR)
|
||||
PROJECT_URLS = {
|
||||
"esphome.bluetooth-proxy": "https://esphome.github.io/bluetooth-proxies/",
|
||||
|
||||
@@ -223,7 +223,6 @@ class EsphomeEntity(EsphomeBaseEntity, Generic[_InfoT, _StateT]):
|
||||
self._states = cast(dict[int, _StateT], entry_data.state[state_type])
|
||||
assert entry_data.device_info is not None
|
||||
device_info = entry_data.device_info
|
||||
self._device_info = device_info
|
||||
self._on_entry_data_changed()
|
||||
self._key = entity_info.key
|
||||
self._state_type = state_type
|
||||
@@ -311,6 +310,11 @@ class EsphomeEntity(EsphomeBaseEntity, Generic[_InfoT, _StateT]):
|
||||
@callback
|
||||
def _on_entry_data_changed(self) -> None:
|
||||
entry_data = self._entry_data
|
||||
# Update the device info since it can change
|
||||
# when the device is reconnected
|
||||
if TYPE_CHECKING:
|
||||
assert entry_data.device_info is not None
|
||||
self._device_info = entry_data.device_info
|
||||
self._api_version = entry_data.api_version
|
||||
self._client = entry_data.client
|
||||
if self._device_info.has_deep_sleep:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyfibaro"],
|
||||
"requirements": ["pyfibaro==0.8.2"]
|
||||
"requirements": ["pyfibaro==0.8.3"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/forecast_solar",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["forecast-solar==4.1.0"]
|
||||
"requirements": ["forecast-solar==4.2.0"]
|
||||
}
|
||||
|
||||
@@ -92,7 +92,7 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
|
||||
available_main_ains = [
|
||||
ain
|
||||
for ain, dev in data.devices.items()
|
||||
for ain, dev in data.devices.items() | data.templates.items()
|
||||
if dev.device_and_unit_id[1] is None
|
||||
]
|
||||
device_reg = dr.async_get(self.hass)
|
||||
|
||||
@@ -45,7 +45,15 @@ type FroniusConfigEntry = ConfigEntry[FroniusSolarNet]
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: FroniusConfigEntry) -> bool:
|
||||
"""Set up fronius from a config entry."""
|
||||
host = entry.data[CONF_HOST]
|
||||
fronius = Fronius(async_get_clientsession(hass), host)
|
||||
fronius = Fronius(
|
||||
async_get_clientsession(
|
||||
hass,
|
||||
# Fronius Gen24 firmware 1.35.4-1 redirects to HTTPS with self-signed
|
||||
# certificate. See https://github.com/home-assistant/core/issues/138881
|
||||
verify_ssl=False,
|
||||
),
|
||||
host,
|
||||
)
|
||||
solar_net = FroniusSolarNet(hass, entry, fronius)
|
||||
await solar_net.init_devices()
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ async def validate_host(
|
||||
hass: HomeAssistant, host: str
|
||||
) -> tuple[str, FroniusConfigEntryData]:
|
||||
"""Validate the user input allows us to connect."""
|
||||
fronius = Fronius(async_get_clientsession(hass), host)
|
||||
fronius = Fronius(async_get_clientsession(hass, verify_ssl=False), host)
|
||||
|
||||
try:
|
||||
datalogger_info: dict[str, Any]
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250507.0"]
|
||||
"requirements": ["home-assistant-frontend==20250516.0"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["googleapiclient"],
|
||||
"requirements": ["gcal-sync==7.0.0", "oauth2client==4.1.3", "ical==9.2.0"]
|
||||
"requirements": ["gcal-sync==7.0.1", "oauth2client==4.1.3", "ical==9.2.4"]
|
||||
}
|
||||
|
||||
@@ -254,11 +254,11 @@ async def google_generative_ai_config_option_schema(
|
||||
)
|
||||
for api_model in sorted(api_models, key=lambda x: x.display_name or "")
|
||||
if (
|
||||
api_model.name != "models/gemini-1.0-pro" # duplicate of gemini-pro
|
||||
and api_model.display_name
|
||||
api_model.display_name
|
||||
and api_model.name
|
||||
and api_model.supported_actions
|
||||
and "tts" not in api_model.name
|
||||
and "vision" not in api_model.name
|
||||
and api_model.supported_actions
|
||||
and "generateContent" in api_model.supported_actions
|
||||
)
|
||||
]
|
||||
|
||||
@@ -319,11 +319,10 @@ class GoogleGenerativeAIConversationEntity(
|
||||
tools.append(Tool(google_search=GoogleSearch()))
|
||||
|
||||
model_name = self.entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
# Gemini 1.0 doesn't support system_instruction while 1.5 does.
|
||||
# Assume future versions will support it (if not, the request fails with a
|
||||
# clear message at which point we can fix).
|
||||
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
|
||||
supports_system_instruction = (
|
||||
"gemini-1.0" not in model_name and "gemini-pro" not in model_name
|
||||
"gemma" not in model_name
|
||||
and "gemini-2.0-flash-preview-image-generation" not in model_name
|
||||
)
|
||||
|
||||
prompt_content = cast(
|
||||
|
||||
@@ -41,12 +41,12 @@
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||
"enable_google_search_tool": "Only works with \"No control\" in the \"Control Home Assistant\" setting. See docs for a workaround using it with \"Assist\"."
|
||||
"enable_google_search_tool": "Only works if there is nothing selected in the \"Control Home Assistant\" setting. See docs for a workaround using it with \"Assist\"."
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"invalid_google_search_option": "Google Search cannot be enabled alongside any Assist capability, this can only be used when Assist is set to \"No control\"."
|
||||
"invalid_google_search_option": "Google Search can only be enabled if nothing is selected in the \"Control Home Assistant\" setting."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -60,6 +60,9 @@ class HistoryStats:
|
||||
self._start = start
|
||||
self._end = end
|
||||
|
||||
self._pending_events: list[Event[EventStateChangedData]] = []
|
||||
self._query_count = 0
|
||||
|
||||
async def async_update(
|
||||
self, event: Event[EventStateChangedData] | None
|
||||
) -> HistoryStatsState:
|
||||
@@ -85,6 +88,14 @@ class HistoryStats:
|
||||
utc_now = dt_util.utcnow()
|
||||
now_timestamp = floored_timestamp(utc_now)
|
||||
|
||||
# If we end up querying data from the recorder when we get triggered by a new state
|
||||
# change event, it is possible this function could be reentered a second time before
|
||||
# the first recorder query returns. In that case a second recorder query will be done
|
||||
# and we need to hold the new event so that we can append it after the second query.
|
||||
# Otherwise the event will be dropped.
|
||||
if event:
|
||||
self._pending_events.append(event)
|
||||
|
||||
if current_period_start_timestamp > now_timestamp:
|
||||
# History cannot tell the future
|
||||
self._history_current_period = []
|
||||
@@ -113,15 +124,14 @@ class HistoryStats:
|
||||
start_changed = (
|
||||
current_period_start_timestamp != previous_period_start_timestamp
|
||||
)
|
||||
end_changed = current_period_end_timestamp != previous_period_end_timestamp
|
||||
if start_changed:
|
||||
self._prune_history_cache(current_period_start_timestamp)
|
||||
|
||||
new_data = False
|
||||
if event and (new_state := event.data["new_state"]) is not None:
|
||||
if (
|
||||
current_period_start_timestamp
|
||||
<= floored_timestamp(new_state.last_changed)
|
||||
<= current_period_end_timestamp
|
||||
if current_period_start_timestamp <= floored_timestamp(
|
||||
new_state.last_changed
|
||||
):
|
||||
self._history_current_period.append(
|
||||
HistoryState(new_state.state, new_state.last_changed_timestamp)
|
||||
@@ -131,26 +141,31 @@ class HistoryStats:
|
||||
not new_data
|
||||
and current_period_end_timestamp < now_timestamp
|
||||
and not start_changed
|
||||
and not end_changed
|
||||
):
|
||||
# If period has not changed and current time after the period end...
|
||||
# Don't compute anything as the value cannot have changed
|
||||
return self._state
|
||||
else:
|
||||
await self._async_history_from_db(
|
||||
current_period_start_timestamp, current_period_end_timestamp
|
||||
current_period_start_timestamp, now_timestamp
|
||||
)
|
||||
if event and (new_state := event.data["new_state"]) is not None:
|
||||
if (
|
||||
current_period_start_timestamp
|
||||
<= floored_timestamp(new_state.last_changed)
|
||||
<= current_period_end_timestamp
|
||||
):
|
||||
self._history_current_period.append(
|
||||
HistoryState(new_state.state, new_state.last_changed_timestamp)
|
||||
)
|
||||
for pending_event in self._pending_events:
|
||||
if (new_state := pending_event.data["new_state"]) is not None:
|
||||
if current_period_start_timestamp <= floored_timestamp(
|
||||
new_state.last_changed
|
||||
):
|
||||
self._history_current_period.append(
|
||||
HistoryState(
|
||||
new_state.state, new_state.last_changed_timestamp
|
||||
)
|
||||
)
|
||||
|
||||
self._has_recorder_data = True
|
||||
|
||||
if self._query_count == 0:
|
||||
self._pending_events.clear()
|
||||
|
||||
seconds_matched, match_count = self._async_compute_seconds_and_changes(
|
||||
now_timestamp,
|
||||
current_period_start_timestamp,
|
||||
@@ -165,12 +180,16 @@ class HistoryStats:
|
||||
current_period_end_timestamp: float,
|
||||
) -> None:
|
||||
"""Update history data for the current period from the database."""
|
||||
instance = get_instance(self.hass)
|
||||
states = await instance.async_add_executor_job(
|
||||
self._state_changes_during_period,
|
||||
current_period_start_timestamp,
|
||||
current_period_end_timestamp,
|
||||
)
|
||||
self._query_count += 1
|
||||
try:
|
||||
instance = get_instance(self.hass)
|
||||
states = await instance.async_add_executor_job(
|
||||
self._state_changes_during_period,
|
||||
current_period_start_timestamp,
|
||||
current_period_end_timestamp,
|
||||
)
|
||||
finally:
|
||||
self._query_count -= 1
|
||||
self._history_current_period = [
|
||||
HistoryState(state.state, state.last_changed.timestamp())
|
||||
for state in states
|
||||
@@ -208,6 +227,9 @@ class HistoryStats:
|
||||
current_state_matches = history_state.state in self._entity_states
|
||||
state_change_timestamp = history_state.last_changed
|
||||
|
||||
if math.floor(state_change_timestamp) > end_timestamp:
|
||||
break
|
||||
|
||||
if math.floor(state_change_timestamp) > now_timestamp:
|
||||
# Shouldn't count states that are in the future
|
||||
_LOGGER.debug(
|
||||
@@ -215,7 +237,7 @@ class HistoryStats:
|
||||
state_change_timestamp,
|
||||
now_timestamp,
|
||||
)
|
||||
continue
|
||||
break
|
||||
|
||||
if previous_state_matches:
|
||||
elapsed += state_change_timestamp - last_state_change_timestamp
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.70", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.73", "babel==2.15.0"]
|
||||
}
|
||||
|
||||
@@ -234,7 +234,7 @@
|
||||
"consumer_products_coffee_maker_program_coffee_world_black_eye": "Black eye",
|
||||
"consumer_products_coffee_maker_program_coffee_world_dead_eye": "Dead eye",
|
||||
"consumer_products_coffee_maker_program_beverage_hot_water": "Hot water",
|
||||
"dishcare_dishwasher_program_pre_rinse": "Pre_rinse",
|
||||
"dishcare_dishwasher_program_pre_rinse": "Pre-rinse",
|
||||
"dishcare_dishwasher_program_auto_1": "Auto 1",
|
||||
"dishcare_dishwasher_program_auto_2": "Auto 2",
|
||||
"dishcare_dishwasher_program_auto_3": "Auto 3",
|
||||
@@ -252,7 +252,7 @@
|
||||
"dishcare_dishwasher_program_intensiv_power": "Intensive power",
|
||||
"dishcare_dishwasher_program_magic_daily": "Magic daily",
|
||||
"dishcare_dishwasher_program_super_60": "Super 60ºC",
|
||||
"dishcare_dishwasher_program_kurz_60": "Kurz 60ºC",
|
||||
"dishcare_dishwasher_program_kurz_60": "Speed 60ºC",
|
||||
"dishcare_dishwasher_program_express_sparkle_65": "Express sparkle 65ºC",
|
||||
"dishcare_dishwasher_program_machine_care": "Machine care",
|
||||
"dishcare_dishwasher_program_steam_fresh": "Steam fresh",
|
||||
|
||||
@@ -90,16 +90,17 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
minor_version=2,
|
||||
)
|
||||
|
||||
if config_entry.minor_version == 2:
|
||||
# Add a `firmware_version` key
|
||||
if config_entry.minor_version <= 3:
|
||||
# Add a `firmware_version` key if it doesn't exist to handle entries created
|
||||
# with minor version 1.3 where the firmware version was not set.
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry,
|
||||
data={
|
||||
**config_entry.data,
|
||||
FIRMWARE_VERSION: None,
|
||||
FIRMWARE_VERSION: config_entry.data.get(FIRMWARE_VERSION),
|
||||
},
|
||||
version=1,
|
||||
minor_version=3,
|
||||
minor_version=4,
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
|
||||
@@ -62,7 +62,7 @@ class HomeAssistantYellowConfigFlow(BaseFirmwareConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Home Assistant Yellow."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 3
|
||||
MINOR_VERSION = 4
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Instantiate config flow."""
|
||||
@@ -116,6 +116,11 @@ class HomeAssistantYellowConfigFlow(BaseFirmwareConfigFlow, domain=DOMAIN):
|
||||
if self._probed_firmware_info is not None
|
||||
else ApplicationType.EZSP
|
||||
).value,
|
||||
FIRMWARE_VERSION: (
|
||||
self._probed_firmware_info.firmware_version
|
||||
if self._probed_firmware_info is not None
|
||||
else None
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -8,7 +8,13 @@ from pyhap.const import CATEGORY_AIR_PURIFIER
|
||||
from pyhap.service import Service
|
||||
from pyhap.util import callback as pyhap_callback
|
||||
|
||||
from homeassistant.const import STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.const import (
|
||||
ATTR_UNIT_OF_MEASUREMENT,
|
||||
STATE_ON,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
Event,
|
||||
EventStateChangedData,
|
||||
@@ -43,7 +49,12 @@ from .const import (
|
||||
THRESHOLD_FILTER_CHANGE_NEEDED,
|
||||
)
|
||||
from .type_fans import ATTR_PRESET_MODE, CHAR_ROTATION_SPEED, Fan
|
||||
from .util import cleanup_name_for_homekit, convert_to_float, density_to_air_quality
|
||||
from .util import (
|
||||
cleanup_name_for_homekit,
|
||||
convert_to_float,
|
||||
density_to_air_quality,
|
||||
temperature_to_homekit,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -345,8 +356,13 @@ class AirPurifier(Fan):
|
||||
):
|
||||
return
|
||||
|
||||
unit = new_state.attributes.get(
|
||||
ATTR_UNIT_OF_MEASUREMENT, UnitOfTemperature.CELSIUS
|
||||
)
|
||||
current_temperature = temperature_to_homekit(current_temperature, unit)
|
||||
|
||||
_LOGGER.debug(
|
||||
"%s: Linked temperature sensor %s changed to %d",
|
||||
"%s: Linked temperature sensor %s changed to %d °C",
|
||||
self.entity_id,
|
||||
self.linked_temperature_sensor,
|
||||
current_temperature,
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
"""Support for HomematicIP Cloud events."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homematicip.base.channel_event import ChannelEvent
|
||||
from homematicip.base.functionalChannels import FunctionalChannel
|
||||
from homematicip.device import Device
|
||||
|
||||
from homeassistant.components.event import (
|
||||
@@ -23,6 +26,9 @@ from .hap import HomematicipHAP
|
||||
class HmipEventEntityDescription(EventEntityDescription):
|
||||
"""Description of a HomematicIP Cloud event."""
|
||||
|
||||
channel_event_types: list[str] | None = None
|
||||
channel_selector_fn: Callable[[FunctionalChannel], bool] | None = None
|
||||
|
||||
|
||||
EVENT_DESCRIPTIONS = {
|
||||
"doorbell": HmipEventEntityDescription(
|
||||
@@ -30,6 +36,8 @@ EVENT_DESCRIPTIONS = {
|
||||
translation_key="doorbell",
|
||||
device_class=EventDeviceClass.DOORBELL,
|
||||
event_types=["ring"],
|
||||
channel_event_types=["DOOR_BELL_SENSOR_EVENT"],
|
||||
channel_selector_fn=lambda channel: channel.channelRole == "DOOR_BELL_INPUT",
|
||||
),
|
||||
}
|
||||
|
||||
@@ -41,24 +49,29 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the HomematicIP cover from a config entry."""
|
||||
hap = hass.data[DOMAIN][config_entry.unique_id]
|
||||
entities: list[HomematicipGenericEntity] = []
|
||||
|
||||
async_add_entities(
|
||||
entities.extend(
|
||||
HomematicipDoorBellEvent(
|
||||
hap,
|
||||
device,
|
||||
channel.index,
|
||||
EVENT_DESCRIPTIONS["doorbell"],
|
||||
description,
|
||||
)
|
||||
for description in EVENT_DESCRIPTIONS.values()
|
||||
for device in hap.home.devices
|
||||
for channel in device.functionalChannels
|
||||
if channel.channelRole == "DOOR_BELL_INPUT"
|
||||
if description.channel_selector_fn and description.channel_selector_fn(channel)
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class HomematicipDoorBellEvent(HomematicipGenericEntity, EventEntity):
|
||||
"""Event class for HomematicIP doorbell events."""
|
||||
|
||||
_attr_device_class = EventDeviceClass.DOORBELL
|
||||
entity_description: HmipEventEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -86,9 +99,27 @@ class HomematicipDoorBellEvent(HomematicipGenericEntity, EventEntity):
|
||||
@callback
|
||||
def _async_handle_event(self, *args, **kwargs) -> None:
|
||||
"""Handle the event fired by the functional channel."""
|
||||
raised_channel_event = self._get_channel_event_from_args(*args)
|
||||
|
||||
if not self._should_raise(raised_channel_event):
|
||||
return
|
||||
|
||||
event_types = self.entity_description.event_types
|
||||
if TYPE_CHECKING:
|
||||
assert event_types is not None
|
||||
|
||||
self._trigger_event(event_type=event_types[0])
|
||||
self.async_write_ha_state()
|
||||
|
||||
def _should_raise(self, event_type: str) -> bool:
|
||||
"""Check if the event should be raised."""
|
||||
if self.entity_description.channel_event_types is None:
|
||||
return False
|
||||
return event_type in self.entity_description.channel_event_types
|
||||
|
||||
def _get_channel_event_from_args(self, *args) -> str:
|
||||
"""Get the channel event."""
|
||||
if isinstance(args[0], ChannelEvent):
|
||||
return args[0].channelEventType
|
||||
|
||||
return ""
|
||||
|
||||
@@ -110,14 +110,14 @@ class AutomowerLawnMowerEntity(AutomowerAvailableEntity, LawnMowerEntity):
|
||||
mower_attributes = self.mower_attributes
|
||||
if mower_attributes.mower.state in PAUSED_STATES:
|
||||
return LawnMowerActivity.PAUSED
|
||||
if mower_attributes.mower.state in MowerStates.IN_OPERATION:
|
||||
if mower_attributes.mower.activity == MowerActivities.GOING_HOME:
|
||||
return LawnMowerActivity.RETURNING
|
||||
return LawnMowerActivity.MOWING
|
||||
if (mower_attributes.mower.state == "RESTRICTED") or (
|
||||
mower_attributes.mower.activity in DOCKED_ACTIVITIES
|
||||
):
|
||||
return LawnMowerActivity.DOCKED
|
||||
if mower_attributes.mower.state in MowerStates.IN_OPERATION:
|
||||
if mower_attributes.mower.activity == MowerActivities.GOING_HOME:
|
||||
return LawnMowerActivity.RETURNING
|
||||
return LawnMowerActivity.MOWING
|
||||
return LawnMowerActivity.ERROR
|
||||
|
||||
@property
|
||||
|
||||
@@ -58,6 +58,7 @@ class INKBIRDActiveBluetoothProcessorCoordinator(
|
||||
update_method=self._async_on_update,
|
||||
needs_poll_method=self._async_needs_poll,
|
||||
poll_method=self._async_poll_data,
|
||||
connectable=False, # Polling only happens if active scanning is disabled
|
||||
)
|
||||
|
||||
async def async_init(self) -> None:
|
||||
|
||||
@@ -5,7 +5,7 @@ from dataclasses import dataclass
|
||||
from typing import cast
|
||||
|
||||
from pylamarzocco import LaMarzoccoMachine
|
||||
from pylamarzocco.const import BackFlushStatus, MachineState, WidgetType
|
||||
from pylamarzocco.const import BackFlushStatus, MachineState, ModelName, WidgetType
|
||||
from pylamarzocco.models import BackFlush, MachineStatus
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
@@ -66,6 +66,9 @@ ENTITIES: tuple[LaMarzoccoBinarySensorEntityDescription, ...] = (
|
||||
is BackFlushStatus.REQUESTED
|
||||
),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
supported_fn=lambda coordinator: (
|
||||
coordinator.device.dashboard.model_name != ModelName.GS3_MP
|
||||
),
|
||||
),
|
||||
LaMarzoccoBinarySensorEntityDescription(
|
||||
key="websocket_connected",
|
||||
|
||||
@@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==2.0.0"]
|
||||
"requirements": ["pylamarzocco==2.0.4"]
|
||||
}
|
||||
|
||||
@@ -132,17 +132,18 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up sensor entities."""
|
||||
coordinator = entry.runtime_data.config_coordinator
|
||||
config_coordinator = entry.runtime_data.config_coordinator
|
||||
statistic_coordinators = entry.runtime_data.statistics_coordinator
|
||||
|
||||
entities = [
|
||||
LaMarzoccoSensorEntity(coordinator, description)
|
||||
LaMarzoccoSensorEntity(config_coordinator, description)
|
||||
for description in ENTITIES
|
||||
if description.supported_fn(coordinator)
|
||||
if description.supported_fn(config_coordinator)
|
||||
]
|
||||
entities.extend(
|
||||
LaMarzoccoStatisticSensorEntity(coordinator, description)
|
||||
LaMarzoccoStatisticSensorEntity(statistic_coordinators, description)
|
||||
for description in STATISTIC_ENTITIES
|
||||
if description.supported_fn(coordinator)
|
||||
if description.supported_fn(statistic_coordinators)
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import asyncio
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from pylamarzocco.const import FirmwareType, UpdateCommandStatus
|
||||
from pylamarzocco.const import FirmwareType, UpdateStatus
|
||||
from pylamarzocco.exceptions import RequestNotSuccessful
|
||||
|
||||
from homeassistant.components.update import (
|
||||
@@ -125,7 +125,7 @@ class LaMarzoccoUpdateEntity(LaMarzoccoEntity, UpdateEntity):
|
||||
await self.coordinator.device.update_firmware()
|
||||
while (
|
||||
update_progress := await self.coordinator.device.get_firmware()
|
||||
).command_status is UpdateCommandStatus.IN_PROGRESS:
|
||||
).command_status is UpdateStatus.IN_PROGRESS:
|
||||
if counter >= MAX_UPDATE_WAIT:
|
||||
_raise_timeout_error()
|
||||
self._attr_update_percentage = update_progress.progress_percentage
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["linkplay"],
|
||||
"requirements": ["python-linkplay==0.2.4"],
|
||||
"requirements": ["python-linkplay==0.2.5"],
|
||||
"zeroconf": ["_linkplay._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_calendar",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["ical"],
|
||||
"requirements": ["ical==9.2.0"]
|
||||
"requirements": ["ical==9.2.4"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_todo",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["ical==9.2.0"]
|
||||
"requirements": ["ical==9.2.4"]
|
||||
}
|
||||
|
||||
@@ -9,7 +9,11 @@
|
||||
"condition_type": {
|
||||
"is_locked": "{entity_name} is locked",
|
||||
"is_unlocked": "{entity_name} is unlocked",
|
||||
"is_open": "{entity_name} is open"
|
||||
"is_open": "{entity_name} is open",
|
||||
"is_jammed": "{entity_name} is jammed",
|
||||
"is_locking": "{entity_name} is locking",
|
||||
"is_unlocking": "{entity_name} is unlocking",
|
||||
"is_opening": "{entity_name} is opening"
|
||||
},
|
||||
"trigger_type": {
|
||||
"locked": "{entity_name} locked",
|
||||
|
||||
@@ -475,7 +475,7 @@ class MatrixBot:
|
||||
file_stat = await aiofiles.os.stat(image_path)
|
||||
|
||||
_LOGGER.debug("Uploading file from path, %s", image_path)
|
||||
async with aiofiles.open(image_path, "r+b") as image_file:
|
||||
async with aiofiles.open(image_path, "rb") as image_file:
|
||||
response, _ = await self._client.upload(
|
||||
image_file,
|
||||
content_type=mime_type,
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "calculated",
|
||||
"loggers": ["yt_dlp"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["yt-dlp[default]==2025.03.31"],
|
||||
"requirements": ["yt-dlp[default]==2025.05.22"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@ from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TWO_YEARS = 2 * 365 * 24
|
||||
TWO_YEARS_DAYS = 2 * 365
|
||||
|
||||
|
||||
class MillDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
@@ -91,7 +91,7 @@ class MillHistoricDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
if not last_stats or not last_stats.get(statistic_id):
|
||||
hourly_data = (
|
||||
await self.mill_data_connection.fetch_historic_energy_usage(
|
||||
dev_id, n_days=TWO_YEARS
|
||||
dev_id, n_days=TWO_YEARS_DAYS
|
||||
)
|
||||
)
|
||||
hourly_data = dict(sorted(hourly_data.items(), key=lambda x: x[0]))
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/mill",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["mill", "mill_local"],
|
||||
"requirements": ["millheater==0.12.3", "mill-local==0.3.0"]
|
||||
"requirements": ["millheater==0.12.5", "mill-local==0.3.0"]
|
||||
}
|
||||
|
||||
@@ -2063,7 +2063,7 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow):
|
||||
entities = [
|
||||
SelectOptionDict(
|
||||
value=key,
|
||||
label=f"{device_name} {component_data.get(CONF_NAME, '-')}"
|
||||
label=f"{device_name} {component_data.get(CONF_NAME, '-') or '-'}"
|
||||
f" ({component_data[CONF_PLATFORM]})",
|
||||
)
|
||||
for key, component_data in self._subentry_data["components"].items()
|
||||
@@ -2295,7 +2295,8 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow):
|
||||
self._component_id = None
|
||||
mqtt_device = self._subentry_data[CONF_DEVICE][CONF_NAME]
|
||||
mqtt_items = ", ".join(
|
||||
f"{mqtt_device} {component_data.get(CONF_NAME, '-')} ({component_data[CONF_PLATFORM]})"
|
||||
f"{mqtt_device} {component_data.get(CONF_NAME, '-') or '-'} "
|
||||
f"({component_data[CONF_PLATFORM]})"
|
||||
for component_data in self._subentry_data["components"].values()
|
||||
)
|
||||
menu_options = [
|
||||
|
||||
@@ -248,19 +248,22 @@ class NetatmoThermostat(NetatmoRoomEntity, ClimateEntity):
|
||||
if self.home.entity_id != data["home_id"]:
|
||||
return
|
||||
|
||||
if data["event_type"] == EVENT_TYPE_SCHEDULE and "schedule_id" in data:
|
||||
self._selected_schedule = getattr(
|
||||
self.hass.data[DOMAIN][DATA_SCHEDULES][self.home.entity_id].get(
|
||||
data["schedule_id"]
|
||||
),
|
||||
"name",
|
||||
None,
|
||||
)
|
||||
self._attr_extra_state_attributes[ATTR_SELECTED_SCHEDULE] = (
|
||||
self._selected_schedule
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
self.data_handler.async_force_update(self._signal_name)
|
||||
if data["event_type"] == EVENT_TYPE_SCHEDULE:
|
||||
# handle schedule change
|
||||
if "schedule_id" in data:
|
||||
self._selected_schedule = getattr(
|
||||
self.hass.data[DOMAIN][DATA_SCHEDULES][self.home.entity_id].get(
|
||||
data["schedule_id"]
|
||||
),
|
||||
"name",
|
||||
None,
|
||||
)
|
||||
self._attr_extra_state_attributes[ATTR_SELECTED_SCHEDULE] = (
|
||||
self._selected_schedule
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
self.data_handler.async_force_update(self._signal_name)
|
||||
# ignore other schedule events
|
||||
return
|
||||
|
||||
home = data["home"]
|
||||
|
||||
@@ -150,7 +150,11 @@ class NetgearRouter:
|
||||
if device_entry.via_device_id is None:
|
||||
continue # do not add the router itself
|
||||
|
||||
device_mac = dict(device_entry.connections)[dr.CONNECTION_NETWORK_MAC]
|
||||
device_mac = dict(device_entry.connections).get(
|
||||
dr.CONNECTION_NETWORK_MAC
|
||||
)
|
||||
if device_mac is None:
|
||||
continue
|
||||
self.devices[device_mac] = {
|
||||
"mac": device_mac,
|
||||
"name": device_entry.name,
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aionfty"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aiontfy==0.5.1"]
|
||||
"requirements": ["aiontfy==0.5.2"]
|
||||
}
|
||||
|
||||
@@ -181,11 +181,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
session = aiohttp.ClientSession(connector=connector)
|
||||
|
||||
@callback
|
||||
def _async_close_websession(event: Event) -> None:
|
||||
def _async_close_websession(event: Event | None = None) -> None:
|
||||
"""Close websession."""
|
||||
session.detach()
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_close_websession)
|
||||
entry.async_on_unload(_async_close_websession)
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, _async_close_websession)
|
||||
)
|
||||
|
||||
client = OctoprintClient(
|
||||
host=entry.data[CONF_HOST],
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"domain": "onedrive",
|
||||
"name": "OneDrive",
|
||||
"after_dependencies": ["cloud"],
|
||||
"codeowners": ["@zweckj"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["application_credentials"],
|
||||
|
||||
@@ -140,7 +140,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
content.append(
|
||||
ResponseInputImageParam(
|
||||
type="input_image",
|
||||
file_id=filename,
|
||||
image_url=f"data:{mime_type};base64,{base64_file}",
|
||||
detail="auto",
|
||||
)
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/opower",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["opower"],
|
||||
"requirements": ["opower==0.12.0"]
|
||||
"requirements": ["opower==0.12.1"]
|
||||
}
|
||||
|
||||
@@ -19,9 +19,7 @@ from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon
|
||||
MultiprotocolAddonManager,
|
||||
get_multiprotocol_addon_manager,
|
||||
is_multiprotocol_url,
|
||||
multi_pan_addon_using_device,
|
||||
)
|
||||
from homeassistant.components.homeassistant_yellow import RADIO_DEVICE as YELLOW_RADIO
|
||||
from homeassistant.config_entries import SOURCE_USER
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -34,10 +32,6 @@ if TYPE_CHECKING:
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
INFO_URL_SKY_CONNECT = (
|
||||
"https://skyconnect.home-assistant.io/multiprotocol-channel-missmatch"
|
||||
)
|
||||
INFO_URL_YELLOW = "https://yellow.home-assistant.io/multiprotocol-channel-missmatch"
|
||||
|
||||
INSECURE_NETWORK_KEYS = (
|
||||
# Thread web UI default
|
||||
@@ -208,16 +202,12 @@ async def _warn_on_channel_collision(
|
||||
delete_issue()
|
||||
return
|
||||
|
||||
yellow = await multi_pan_addon_using_device(hass, YELLOW_RADIO)
|
||||
learn_more_url = INFO_URL_YELLOW if yellow else INFO_URL_SKY_CONNECT
|
||||
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"otbr_zha_channel_collision_{otbrdata.entry_id}",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
learn_more_url=learn_more_url,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="otbr_zha_channel_collision",
|
||||
translation_placeholders={
|
||||
|
||||
@@ -32,6 +32,8 @@ from .const import (
|
||||
PLACEHOLDER_WEBHOOK_URL,
|
||||
)
|
||||
|
||||
AUTH_TOKEN_URL = "https://intercom.help/plaato/en/articles/5004720-auth_token"
|
||||
|
||||
|
||||
class PlaatoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handles a Plaato config flow."""
|
||||
@@ -153,7 +155,10 @@ class PlaatoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="api_method",
|
||||
data_schema=data_schema,
|
||||
errors=errors,
|
||||
description_placeholders={PLACEHOLDER_DEVICE_TYPE: device_type.name},
|
||||
description_placeholders={
|
||||
PLACEHOLDER_DEVICE_TYPE: device_type.name,
|
||||
"auth_token_url": AUTH_TOKEN_URL,
|
||||
},
|
||||
)
|
||||
|
||||
async def _get_webhook_id(self):
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
},
|
||||
"api_method": {
|
||||
"title": "Select API method",
|
||||
"description": "To be able to query the API an `auth_token` is required which can be obtained by following [these](https://plaato.zendesk.com/hc/en-us/articles/360003234717-Auth-token) instructions\n\n Selected device: **{device_type}** \n\nIf you rather use the built in webhook method (Airlock only) please check the box below and leave Auth Token blank",
|
||||
"description": "To be able to query the API an 'auth token' is required which can be obtained by following [these instructions]({auth_token_url})\n\nSelected device: **{device_type}** \n\nIf you prefer to use the built-in webhook method (Airlock only) please check the box below and leave 'Auth token' blank",
|
||||
"data": {
|
||||
"use_webhook": "Use webhook",
|
||||
"token": "Paste Auth Token here"
|
||||
|
||||
@@ -6,7 +6,6 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from pypoint import PointSession
|
||||
from tempora.utc import fromtimestamp
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
@@ -62,7 +61,9 @@ class PointDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]
|
||||
or device.device_id not in self.device_updates
|
||||
or self.device_updates[device.device_id] < last_updated
|
||||
):
|
||||
self.device_updates[device.device_id] = last_updated or fromtimestamp(0)
|
||||
self.device_updates[device.device_id] = (
|
||||
last_updated or datetime.fromtimestamp(0)
|
||||
)
|
||||
self.data[device.device_id] = {
|
||||
k: await device.sensor(k)
|
||||
for k in ("temperature", "humidity", "sound_pressure")
|
||||
|
||||
@@ -6,6 +6,7 @@ from contextlib import contextmanager, nullcontext
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
import warnings
|
||||
|
||||
from qnapstats import QNAPStats
|
||||
import urllib3
|
||||
@@ -37,7 +38,8 @@ def suppress_insecure_request_warning():
|
||||
Was added in here to solve the following issue, not being solved upstream.
|
||||
https://github.com/colinodell/python-qnapstats/issues/96
|
||||
"""
|
||||
with urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning):
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", urllib3.exceptions.InsecureRequestWarning)
|
||||
yield
|
||||
|
||||
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["ical"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["ical==9.2.0"]
|
||||
"requirements": ["ical==9.2.4"]
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ from homeassistant.helpers import (
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
@@ -372,14 +372,55 @@ def migrate_entity_ids(
|
||||
else:
|
||||
new_device_id = f"{host.unique_id}_{device_uid[1]}"
|
||||
_LOGGER.debug(
|
||||
"Updating Reolink device UID from %s to %s", device_uid, new_device_id
|
||||
"Updating Reolink device UID from %s to %s",
|
||||
device_uid,
|
||||
new_device_id,
|
||||
)
|
||||
new_identifiers = {(DOMAIN, new_device_id)}
|
||||
device_reg.async_update_device(device.id, new_identifiers=new_identifiers)
|
||||
|
||||
# Check for wrongfully combined entities in one device
|
||||
# Can be removed in HA 2025.12
|
||||
new_identifiers = device.identifiers.copy()
|
||||
remove_ids = False
|
||||
if (DOMAIN, host.unique_id) in device.identifiers:
|
||||
remove_ids = True # NVR/Hub in identifiers, keep that one, remove others
|
||||
for old_id in device.identifiers:
|
||||
(old_device_uid, old_ch, old_is_chime) = get_device_uid_and_ch(old_id, host)
|
||||
if (
|
||||
not old_device_uid
|
||||
or old_device_uid[0] != host.unique_id
|
||||
or old_id[1] == host.unique_id
|
||||
):
|
||||
continue
|
||||
if remove_ids:
|
||||
new_identifiers.remove(old_id)
|
||||
remove_ids = True # after the first identifier, remove the others
|
||||
if new_identifiers != device.identifiers:
|
||||
_LOGGER.debug(
|
||||
"Updating Reolink device identifiers from %s to %s",
|
||||
device.identifiers,
|
||||
new_identifiers,
|
||||
)
|
||||
device_reg.async_update_device(device.id, new_identifiers=new_identifiers)
|
||||
break
|
||||
|
||||
if ch is None or is_chime:
|
||||
continue # Do not consider the NVR itself or chimes
|
||||
|
||||
# Check for wrongfully added MAC of the NVR/Hub to the camera
|
||||
# Can be removed in HA 2025.12
|
||||
host_connnection = (CONNECTION_NETWORK_MAC, host.api.mac_address)
|
||||
if host_connnection in device.connections:
|
||||
new_connections = device.connections.copy()
|
||||
new_connections.remove(host_connnection)
|
||||
_LOGGER.debug(
|
||||
"Updating Reolink device connections from %s to %s",
|
||||
device.connections,
|
||||
new_connections,
|
||||
)
|
||||
device_reg.async_update_device(device.id, new_connections=new_connections)
|
||||
|
||||
ch_device_ids[device.id] = ch
|
||||
if host.api.supported(ch, "UID") and device_uid[1] != host.api.camera_uid(ch):
|
||||
if host.api.supported(None, "UID"):
|
||||
@@ -387,7 +428,9 @@ def migrate_entity_ids(
|
||||
else:
|
||||
new_device_id = f"{device_uid[0]}_{host.api.camera_uid(ch)}"
|
||||
_LOGGER.debug(
|
||||
"Updating Reolink device UID from %s to %s", device_uid, new_device_id
|
||||
"Updating Reolink device UID from %s to %s",
|
||||
device_uid,
|
||||
new_device_id,
|
||||
)
|
||||
new_identifiers = {(DOMAIN, new_device_id)}
|
||||
existing_device = device_reg.async_get_device(identifiers=new_identifiers)
|
||||
|
||||
@@ -198,7 +198,14 @@ class ReolinkChannelCoordinatorEntity(ReolinkHostCoordinatorEntity):
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return super().available and self._host.api.camera_online(self._channel)
|
||||
if self.entity_description.always_available:
|
||||
return True
|
||||
|
||||
return (
|
||||
super().available
|
||||
and self._host.api.camera_online(self._channel)
|
||||
and not self._host.api.baichuan.privacy_mode(self._channel)
|
||||
)
|
||||
|
||||
def register_callback(self, callback_id: str, cmd_id: int) -> None:
|
||||
"""Register callback for TCP push events."""
|
||||
|
||||
@@ -465,10 +465,11 @@ class ReolinkHost:
|
||||
wake = True
|
||||
self.last_wake = time()
|
||||
|
||||
for channel in self._api.channels:
|
||||
if self._api.baichuan.privacy_mode(channel):
|
||||
await self._api.baichuan.get_privacy_mode(channel)
|
||||
if self._api.baichuan.privacy_mode():
|
||||
await self._api.baichuan.get_privacy_mode()
|
||||
if self._api.baichuan.privacy_mode():
|
||||
return # API is shutdown, no need to check states
|
||||
return # API is shutdown, no need to check states
|
||||
|
||||
await self._api.get_states(cmd_list=self.update_cmd, wake=wake)
|
||||
|
||||
@@ -580,7 +581,12 @@ class ReolinkHost:
|
||||
)
|
||||
return
|
||||
|
||||
await self._api.subscribe(self._webhook_url)
|
||||
try:
|
||||
await self._api.subscribe(self._webhook_url)
|
||||
except NotSupportedError as err:
|
||||
self._onvif_push_supported = False
|
||||
_LOGGER.debug(err)
|
||||
return
|
||||
|
||||
_LOGGER.debug(
|
||||
"Host %s: subscribed successfully to webhook %s",
|
||||
@@ -601,7 +607,11 @@ class ReolinkHost:
|
||||
return # API is shutdown, no need to subscribe
|
||||
|
||||
try:
|
||||
if self._onvif_push_supported and not self._api.baichuan.events_active:
|
||||
if (
|
||||
self._onvif_push_supported
|
||||
and not self._api.baichuan.events_active
|
||||
and self._cancel_tcp_push_check is None
|
||||
):
|
||||
await self._renew(SubType.push)
|
||||
|
||||
if self._onvif_long_poll_supported and self._long_poll_task is not None:
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.13.2"]
|
||||
"requirements": ["reolink-aio==0.13.3"]
|
||||
}
|
||||
|
||||
@@ -76,13 +76,18 @@ def get_store(hass: HomeAssistant, config_entry_id: str) -> Store[str]:
|
||||
|
||||
|
||||
def get_device_uid_and_ch(
|
||||
device: dr.DeviceEntry, host: ReolinkHost
|
||||
device: dr.DeviceEntry | tuple[str, str], host: ReolinkHost
|
||||
) -> tuple[list[str], int | None, bool]:
|
||||
"""Get the channel and the split device_uid from a reolink DeviceEntry."""
|
||||
device_uid = []
|
||||
is_chime = False
|
||||
|
||||
for dev_id in device.identifiers:
|
||||
if isinstance(device, dr.DeviceEntry):
|
||||
dev_ids = device.identifiers
|
||||
else:
|
||||
dev_ids = {device}
|
||||
|
||||
for dev_id in dev_ids:
|
||||
if dev_id[0] == DOMAIN:
|
||||
device_uid = dev_id[1].split("_")
|
||||
if device_uid[0] == host.unique_id:
|
||||
|
||||
@@ -28,7 +28,7 @@ from roborock.version_a01_apis import RoborockClientA01
|
||||
from roborock.web_api import RoborockApiClient
|
||||
from vacuum_map_parser_base.config.color import ColorsPalette
|
||||
from vacuum_map_parser_base.config.image_config import ImageConfig
|
||||
from vacuum_map_parser_base.config.size import Sizes
|
||||
from vacuum_map_parser_base.config.size import Size, Sizes
|
||||
from vacuum_map_parser_base.map_data import MapData
|
||||
from vacuum_map_parser_roborock.map_data_parser import RoborockMapDataParser
|
||||
|
||||
@@ -148,7 +148,13 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
]
|
||||
self.map_parser = RoborockMapDataParser(
|
||||
ColorsPalette(),
|
||||
Sizes({k: v * MAP_SCALE for k, v in Sizes.SIZES.items()}),
|
||||
Sizes(
|
||||
{
|
||||
k: v * MAP_SCALE
|
||||
for k, v in Sizes.SIZES.items()
|
||||
if k != Size.MOP_PATH_WIDTH
|
||||
}
|
||||
),
|
||||
drawables,
|
||||
ImageConfig(scale=MAP_SCALE),
|
||||
[],
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/sense",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["sense_energy"],
|
||||
"requirements": ["sense-energy==0.13.7"]
|
||||
"requirements": ["sense-energy==0.13.8"]
|
||||
}
|
||||
|
||||
@@ -252,7 +252,7 @@ class SensiboClimate(SensiboDeviceBaseEntity, ClimateEntity):
|
||||
return features
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> int | None:
|
||||
def current_humidity(self) -> float | None:
|
||||
"""Return the current humidity."""
|
||||
return self.device_data.humidity
|
||||
|
||||
|
||||
@@ -15,5 +15,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pysensibo"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pysensibo==1.1.0"]
|
||||
"requirements": ["pysensibo==1.2.1"]
|
||||
}
|
||||
|
||||
@@ -101,14 +101,25 @@ MOTION_SENSOR_TYPES: tuple[SensiboMotionSensorEntityDescription, ...] = (
|
||||
value_fn=lambda data: data.temperature,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _pure_aqi(pm25_pure: PureAQI | None) -> str | None:
|
||||
"""Return the Pure aqi name or None if unknown."""
|
||||
if pm25_pure:
|
||||
aqi_name = pm25_pure.name.lower()
|
||||
if aqi_name != "unknown":
|
||||
return aqi_name
|
||||
return None
|
||||
|
||||
|
||||
PURE_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = (
|
||||
SensiboDeviceSensorEntityDescription(
|
||||
key="pm25",
|
||||
translation_key="pm25_pure",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
value_fn=lambda data: data.pm25_pure.name.lower() if data.pm25_pure else None,
|
||||
value_fn=lambda data: _pure_aqi(data.pm25_pure),
|
||||
extra_fn=None,
|
||||
options=[aqi.name.lower() for aqi in PureAQI],
|
||||
options=[aqi.name.lower() for aqi in PureAQI if aqi.name != "UNKNOWN"],
|
||||
),
|
||||
SensiboDeviceSensorEntityDescription(
|
||||
key="pure_sensitivity",
|
||||
@@ -119,6 +130,7 @@ PURE_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = (
|
||||
FILTER_LAST_RESET_DESCRIPTION,
|
||||
)
|
||||
|
||||
|
||||
DEVICE_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = (
|
||||
SensiboDeviceSensorEntityDescription(
|
||||
key="timer_time",
|
||||
|
||||
@@ -13,6 +13,7 @@ from aiohttp import ClientResponseError
|
||||
from pysmartthings import (
|
||||
Attribute,
|
||||
Capability,
|
||||
Category,
|
||||
ComponentStatus,
|
||||
Device,
|
||||
DeviceEvent,
|
||||
@@ -32,6 +33,7 @@ from homeassistant.const import (
|
||||
ATTR_HW_VERSION,
|
||||
ATTR_MANUFACTURER,
|
||||
ATTR_MODEL,
|
||||
ATTR_SUGGESTED_AREA,
|
||||
ATTR_SW_VERSION,
|
||||
ATTR_VIA_DEVICE,
|
||||
CONF_ACCESS_TOKEN,
|
||||
@@ -193,6 +195,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry)
|
||||
}
|
||||
devices = await client.get_devices()
|
||||
for device in devices:
|
||||
if (
|
||||
(main_component := device.components.get(MAIN)) is not None
|
||||
and main_component.manufacturer_category is Category.BLUETOOTH_TRACKER
|
||||
):
|
||||
device_status[device.device_id] = FullDevice(
|
||||
device=device,
|
||||
status={},
|
||||
online=True,
|
||||
)
|
||||
continue
|
||||
status = process_status(await client.get_device_status(device.device_id))
|
||||
online = await client.get_device_health(device.device_id)
|
||||
device_status[device.device_id] = FullDevice(
|
||||
@@ -453,14 +465,24 @@ def create_devices(
|
||||
ATTR_SW_VERSION: viper.software_version,
|
||||
}
|
||||
)
|
||||
if (
|
||||
device_registry.async_get_device({(DOMAIN, device.device.device_id)})
|
||||
is None
|
||||
):
|
||||
kwargs.update(
|
||||
{
|
||||
ATTR_SUGGESTED_AREA: (
|
||||
rooms.get(device.device.room_id)
|
||||
if device.device.room_id
|
||||
else None
|
||||
)
|
||||
}
|
||||
)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, device.device.device_id)},
|
||||
configuration_url="https://account.smartthings.com",
|
||||
name=device.device.label,
|
||||
suggested_area=(
|
||||
rooms.get(device.device.room_id) if device.device.room_id else None
|
||||
),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
@@ -58,7 +58,7 @@ OPERATING_STATE_TO_ACTION = {
|
||||
}
|
||||
|
||||
AC_MODE_TO_STATE = {
|
||||
"auto": HVACMode.HEAT_COOL,
|
||||
"auto": HVACMode.AUTO,
|
||||
"cool": HVACMode.COOL,
|
||||
"dry": HVACMode.DRY,
|
||||
"coolClean": HVACMode.COOL,
|
||||
@@ -66,10 +66,11 @@ AC_MODE_TO_STATE = {
|
||||
"heat": HVACMode.HEAT,
|
||||
"heatClean": HVACMode.HEAT,
|
||||
"fanOnly": HVACMode.FAN_ONLY,
|
||||
"fan": HVACMode.FAN_ONLY,
|
||||
"wind": HVACMode.FAN_ONLY,
|
||||
}
|
||||
STATE_TO_AC_MODE = {
|
||||
HVACMode.HEAT_COOL: "auto",
|
||||
HVACMode.AUTO: "auto",
|
||||
HVACMode.COOL: "cool",
|
||||
HVACMode.DRY: "dry",
|
||||
HVACMode.HEAT: "heat",
|
||||
@@ -88,6 +89,7 @@ FAN_OSCILLATION_TO_SWING = {
|
||||
}
|
||||
|
||||
WIND = "wind"
|
||||
FAN = "fan"
|
||||
WINDFREE = "windFree"
|
||||
|
||||
UNIT_MAP = {"C": UnitOfTemperature.CELSIUS, "F": UnitOfTemperature.FAHRENHEIT}
|
||||
@@ -388,14 +390,15 @@ class SmartThingsAirConditioner(SmartThingsEntity, ClimateEntity):
|
||||
tasks.append(self.async_turn_on())
|
||||
|
||||
mode = STATE_TO_AC_MODE[hvac_mode]
|
||||
# If new hvac_mode is HVAC_MODE_FAN_ONLY and AirConditioner support "wind" mode the AirConditioner new mode has to be "wind"
|
||||
# The conversion make the mode change working
|
||||
# The conversion is made only for device that wrongly has capability "wind" instead "fan_only"
|
||||
# If new hvac_mode is HVAC_MODE_FAN_ONLY and AirConditioner support "wind" or "fan" mode the AirConditioner
|
||||
# new mode has to be "wind" or "fan"
|
||||
if hvac_mode == HVACMode.FAN_ONLY:
|
||||
if WIND in self.get_attribute_value(
|
||||
Capability.AIR_CONDITIONER_MODE, Attribute.SUPPORTED_AC_MODES
|
||||
):
|
||||
mode = WIND
|
||||
for fan_mode in (WIND, FAN):
|
||||
if fan_mode in self.get_attribute_value(
|
||||
Capability.AIR_CONDITIONER_MODE, Attribute.SUPPORTED_AC_MODES
|
||||
):
|
||||
mode = fan_mode
|
||||
break
|
||||
|
||||
tasks.append(
|
||||
self.execute_device_command(
|
||||
|
||||
@@ -30,5 +30,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pysmartthings"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pysmartthings==3.2.1"]
|
||||
"requirements": ["pysmartthings==3.2.3"]
|
||||
}
|
||||
|
||||
@@ -584,7 +584,7 @@ CAPABILITY_TO_SENSORS: dict[
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
use_temperature_unit=True,
|
||||
# Set the value to None if it is 0 F (-17 C)
|
||||
value_fn=lambda value: None if value in {0, -17} else value,
|
||||
value_fn=lambda value: None if value in {-17, 0, 1} else value,
|
||||
)
|
||||
]
|
||||
},
|
||||
|
||||
@@ -53,7 +53,6 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
try:
|
||||
if not await self._async_check_auth_required(user_input):
|
||||
info = await self.client.get_info()
|
||||
self._host = str(info.device_ip)
|
||||
self._device_name = str(info.hostname)
|
||||
|
||||
if info.model not in Devices:
|
||||
@@ -79,7 +78,6 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
try:
|
||||
if not await self._async_check_auth_required(user_input):
|
||||
info = await self.client.get_info()
|
||||
self._host = str(info.device_ip)
|
||||
self._device_name = str(info.hostname)
|
||||
|
||||
if info.model not in Devices:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["snoo"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["python-snoo==0.6.5"]
|
||||
"requirements": ["python-snoo==0.6.6"]
|
||||
}
|
||||
|
||||
@@ -56,7 +56,8 @@
|
||||
"power": "Power button pressed",
|
||||
"status_requested": "Status requested",
|
||||
"sticky_white_noise_updated": "Sleepytime sounds updated",
|
||||
"config_change": "Config changed"
|
||||
"config_change": "Config changed",
|
||||
"restart": "Restart"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -151,6 +151,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry) -
|
||||
player_coordinator = SqueezeBoxPlayerUpdateCoordinator(
|
||||
hass, entry, player, lms.uuid
|
||||
)
|
||||
await player_coordinator.async_refresh()
|
||||
known_players.append(player.player_id)
|
||||
async_dispatcher_send(
|
||||
hass, SIGNAL_PLAYER_DISCOVERED, player_coordinator
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/squeezebox",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pysqueezebox"],
|
||||
"requirements": ["pysqueezebox==0.12.0"]
|
||||
"requirements": ["pysqueezebox==0.12.1"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ from collections.abc import Callable
|
||||
from datetime import datetime
|
||||
import json
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from pysqueezebox import Server, async_discover
|
||||
import voluptuous as vol
|
||||
@@ -329,22 +329,22 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
@property
|
||||
def media_title(self) -> str | None:
|
||||
"""Title of current playing media."""
|
||||
return str(self._player.title)
|
||||
return cast(str | None, self._player.title)
|
||||
|
||||
@property
|
||||
def media_channel(self) -> str | None:
|
||||
"""Channel (e.g. webradio name) of current playing media."""
|
||||
return str(self._player.remote_title)
|
||||
return cast(str | None, self._player.remote_title)
|
||||
|
||||
@property
|
||||
def media_artist(self) -> str | None:
|
||||
"""Artist of current playing media."""
|
||||
return str(self._player.artist)
|
||||
return cast(str | None, self._player.artist)
|
||||
|
||||
@property
|
||||
def media_album_name(self) -> str | None:
|
||||
"""Album of current playing media."""
|
||||
return str(self._player.album)
|
||||
return cast(str | None, self._player.album)
|
||||
|
||||
@property
|
||||
def repeat(self) -> RepeatMode:
|
||||
|
||||
@@ -236,7 +236,7 @@ class SynologyDSMBackupAgent(BackupAgent):
|
||||
raise BackupAgentError("Failed to read meta data") from err
|
||||
|
||||
try:
|
||||
files = await self._file_station.get_files(path=self.path)
|
||||
files = await self._file_station.get_files(path=self.path, limit=1000)
|
||||
except SynologyDSMAPIErrorException as err:
|
||||
raise BackupAgentError("Failed to list backups") from err
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/synology_dsm",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["synology_dsm"],
|
||||
"requirements": ["py-synologydsm-api==2.7.1"],
|
||||
"requirements": ["py-synologydsm-api==2.7.2"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Synology",
|
||||
|
||||
@@ -9,6 +9,7 @@ from tesla_fleet_api.teslemetry import EnergySite, Vehicle
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -229,7 +230,7 @@ class TeslemetryWallConnectorEntity(TeslemetryEntity):
|
||||
super().__init__(data.live_coordinator, key)
|
||||
|
||||
@property
|
||||
def _value(self) -> int:
|
||||
def _value(self) -> StateType:
|
||||
"""Return a specific wall connector value from coordinator data."""
|
||||
return (
|
||||
self.coordinator.data.get("wall_connectors", {})
|
||||
|
||||
@@ -1763,8 +1763,7 @@ class TeslemetryWallConnectorSensorEntity(TeslemetryWallConnectorEntity, SensorE
|
||||
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update the attributes of the sensor."""
|
||||
if self.exists:
|
||||
self._attr_native_value = self.entity_description.value_fn(self._value)
|
||||
self._attr_native_value = self.entity_description.value_fn(self._value)
|
||||
|
||||
|
||||
class TeslemetryEnergyInfoSensorEntity(TeslemetryEnergyInfoEntity, SensorEntity):
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/tibber",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tibber"],
|
||||
"requirements": ["pyTibber==0.30.8"]
|
||||
"requirements": ["pyTibber==0.31.2"]
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"velbus-protocol"
|
||||
],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["velbus-aio==2025.4.2"],
|
||||
"requirements": ["velbus-aio==2025.5.0"],
|
||||
"usb": [
|
||||
{
|
||||
"vid": "10CF",
|
||||
|
||||
@@ -97,6 +97,7 @@ SKU_TO_BASE_DEVICE = {
|
||||
"LAP-V102S-AASR": "Vital100S", # Alt ID Model Vital100S
|
||||
"LAP-V102S-WEU": "Vital100S", # Alt ID Model Vital100S
|
||||
"LAP-V102S-WUK": "Vital100S", # Alt ID Model Vital100S
|
||||
"LAP-V102S-AUSR": "Vital100S", # Alt ID Model Vital100S
|
||||
"EverestAir": "EverestAir",
|
||||
"LAP-EL551S-AUS": "EverestAir", # Alt ID Model EverestAir
|
||||
"LAP-EL551S-AEUR": "EverestAir", # Alt ID Model EverestAir
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
@@ -17,7 +18,8 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import WebControlProConfigEntry
|
||||
from .entity import WebControlProGenericEntity
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=5)
|
||||
ACTION_DELAY = 0.5
|
||||
SCAN_INTERVAL = timedelta(seconds=10)
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@@ -56,6 +58,7 @@ class WebControlProCover(WebControlProGenericEntity, CoverEntity):
|
||||
"""Move the cover to a specific position."""
|
||||
action = self._dest.action(self._drive_action_desc)
|
||||
await action(percentage=100 - kwargs[ATTR_POSITION])
|
||||
await asyncio.sleep(ACTION_DELAY)
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool | None:
|
||||
@@ -66,11 +69,13 @@ class WebControlProCover(WebControlProGenericEntity, CoverEntity):
|
||||
"""Open the cover."""
|
||||
action = self._dest.action(self._drive_action_desc)
|
||||
await action(percentage=0)
|
||||
await asyncio.sleep(ACTION_DELAY)
|
||||
|
||||
async def async_close_cover(self, **kwargs: Any) -> None:
|
||||
"""Close the cover."""
|
||||
action = self._dest.action(self._drive_action_desc)
|
||||
await action(percentage=100)
|
||||
await asyncio.sleep(ACTION_DELAY)
|
||||
|
||||
async def async_stop_cover(self, **kwargs: Any) -> None:
|
||||
"""Stop the device if in motion."""
|
||||
@@ -79,6 +84,7 @@ class WebControlProCover(WebControlProGenericEntity, CoverEntity):
|
||||
WMS_WebControl_pro_API_actionType.Stop,
|
||||
)
|
||||
await action()
|
||||
await asyncio.sleep(ACTION_DELAY)
|
||||
|
||||
|
||||
class WebControlProAwning(WebControlProCover):
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
@@ -16,7 +17,8 @@ from . import WebControlProConfigEntry
|
||||
from .const import BRIGHTNESS_SCALE
|
||||
from .entity import WebControlProGenericEntity
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=5)
|
||||
ACTION_DELAY = 0.5
|
||||
SCAN_INTERVAL = timedelta(seconds=15)
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@@ -54,11 +56,13 @@ class WebControlProLight(WebControlProGenericEntity, LightEntity):
|
||||
"""Turn the light on."""
|
||||
action = self._dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch)
|
||||
await action(onOffState=True)
|
||||
await asyncio.sleep(ACTION_DELAY)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the light off."""
|
||||
action = self._dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch)
|
||||
await action(onOffState=False)
|
||||
await asyncio.sleep(ACTION_DELAY)
|
||||
|
||||
|
||||
class WebControlProDimmer(WebControlProLight):
|
||||
@@ -87,3 +91,4 @@ class WebControlProDimmer(WebControlProLight):
|
||||
await action(
|
||||
percentage=brightness_to_value(BRIGHTNESS_SCALE, kwargs[ATTR_BRIGHTNESS])
|
||||
)
|
||||
await asyncio.sleep(ACTION_DELAY)
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["holidays"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["holidays==0.70"]
|
||||
"requirements": ["holidays==0.73"]
|
||||
}
|
||||
|
||||
@@ -419,13 +419,26 @@ class ZHADeviceProxy(EventBase):
|
||||
@callback
|
||||
def handle_zha_event(self, zha_event: ZHAEvent) -> None:
|
||||
"""Handle a ZHA event."""
|
||||
if ATTR_UNIQUE_ID in zha_event.data:
|
||||
unique_id = zha_event.data[ATTR_UNIQUE_ID]
|
||||
|
||||
# Client cluster handler unique IDs in the ZHA lib were disambiguated by
|
||||
# adding a suffix of `_CLIENT`. Unfortunately, this breaks existing
|
||||
# automations that match the `unique_id` key. This can be removed in a
|
||||
# future release with proper notice of a breaking change.
|
||||
unique_id = unique_id.removesuffix("_CLIENT")
|
||||
else:
|
||||
unique_id = zha_event.unique_id
|
||||
|
||||
self.gateway_proxy.hass.bus.async_fire(
|
||||
ZHA_EVENT,
|
||||
{
|
||||
ATTR_DEVICE_IEEE: str(zha_event.device_ieee),
|
||||
ATTR_UNIQUE_ID: zha_event.unique_id,
|
||||
ATTR_DEVICE_ID: self.device_id,
|
||||
**zha_event.data,
|
||||
# The order of these keys is intentional, `zha_event.data` can contain
|
||||
# a `unique_id` key, which we explicitly replace
|
||||
ATTR_UNIQUE_ID: unique_id,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -105,6 +105,7 @@ from .const import (
|
||||
CONF_USE_ADDON,
|
||||
DATA_CLIENT,
|
||||
DOMAIN,
|
||||
DRIVER_READY_TIMEOUT,
|
||||
EVENT_DEVICE_ADDED_TO_REGISTRY,
|
||||
EVENT_VALUE_UPDATED,
|
||||
LIB_LOGGER,
|
||||
@@ -135,7 +136,6 @@ from .services import ZWaveServices
|
||||
|
||||
CONNECT_TIMEOUT = 10
|
||||
DATA_DRIVER_EVENTS = "driver_events"
|
||||
DRIVER_READY_TIMEOUT = 60
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -278,6 +278,39 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
# and we'll handle the clean up below.
|
||||
await driver_events.setup(driver)
|
||||
|
||||
if (old_unique_id := entry.unique_id) is not None and old_unique_id != (
|
||||
new_unique_id := str(driver.controller.home_id)
|
||||
):
|
||||
device_registry = dr.async_get(hass)
|
||||
controller_model = "Unknown model"
|
||||
if (
|
||||
(own_node := driver.controller.own_node)
|
||||
and (
|
||||
controller_device_entry := device_registry.async_get_device(
|
||||
identifiers={get_device_id(driver, own_node)}
|
||||
)
|
||||
)
|
||||
and (model := controller_device_entry.model)
|
||||
):
|
||||
controller_model = model
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"migrate_unique_id.{entry.entry_id}",
|
||||
data={
|
||||
"config_entry_id": entry.entry_id,
|
||||
"config_entry_title": entry.title,
|
||||
"controller_model": controller_model,
|
||||
"new_unique_id": new_unique_id,
|
||||
"old_unique_id": old_unique_id,
|
||||
},
|
||||
is_fixable=True,
|
||||
severity=IssueSeverity.ERROR,
|
||||
translation_key="migrate_unique_id",
|
||||
)
|
||||
else:
|
||||
async_delete_issue(hass, DOMAIN, f"migrate_unique_id.{entry.entry_id}")
|
||||
|
||||
# If the listen task is already failed, we need to raise ConfigEntryNotReady
|
||||
if listen_task.done():
|
||||
listen_error, error_message = _get_listen_task_error(listen_task)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user