forked from home-assistant/core
Compare commits
101 Commits
2025.5.0b6
...
2025.5.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f66feabaaf | ||
|
|
0ef098a9f3 | ||
|
|
02b028add3 | ||
|
|
34455f9743 | ||
|
|
8c4eec231f | ||
|
|
621a14d7cc | ||
|
|
4906e78a5c | ||
|
|
146e440d59 | ||
|
|
e2ede3ed19 | ||
|
|
b76ac68fb1 | ||
|
|
0691ad9362 | ||
|
|
715f116954 | ||
|
|
9f0db98745 | ||
|
|
0ba55c31e8 | ||
|
|
19b7cfbd4a | ||
|
|
a9520888cf | ||
|
|
f086f4a955 | ||
|
|
a657964c25 | ||
|
|
543104b36c | ||
|
|
bf1d2069e4 | ||
|
|
e5e1c9fb05 | ||
|
|
4c4be88323 | ||
|
|
5a83627dc5 | ||
|
|
3123a7b168 | ||
|
|
8161ce6ea8 | ||
|
|
d9cbd1b65f | ||
|
|
b7c07209b8 | ||
|
|
6c3a4f17f0 | ||
|
|
d82feb807f | ||
|
|
c373fa9296 | ||
|
|
139b48440f | ||
|
|
9de1d3b143 | ||
|
|
b69ebdaecb | ||
|
|
f25e50b017 | ||
|
|
a4a7601f9f | ||
|
|
41a503f76f | ||
|
|
f1a3d62db2 | ||
|
|
e465276464 | ||
|
|
47b45444eb | ||
|
|
cf0911cc56 | ||
|
|
da79d5b2e3 | ||
|
|
358b0c1c17 | ||
|
|
543348fe58 | ||
|
|
0635856761 | ||
|
|
081afe6034 | ||
|
|
ca14322227 | ||
|
|
a54816a6e5 | ||
|
|
27db4e90b5 | ||
|
|
e9cc624d93 | ||
|
|
5a95f43992 | ||
|
|
36a35132c0 | ||
|
|
2fbc75f89b | ||
|
|
48aa6be889 | ||
|
|
bde04bc47b | ||
|
|
7d163aa659 | ||
|
|
010b044379 | ||
|
|
00627b82e0 | ||
|
|
13aba6201e | ||
|
|
f392e0c1c7 | ||
|
|
181eca6c82 | ||
|
|
196d923ac6 | ||
|
|
4ad387c967 | ||
|
|
cb475bf153 | ||
|
|
47acceea08 | ||
|
|
fd6fb7e3bc | ||
|
|
30f7e9b441 | ||
|
|
a8beec2691 | ||
|
|
23244fb79f | ||
|
|
e5c56629e2 | ||
|
|
a793503c8a | ||
|
|
054c7a0adc | ||
|
|
6eb2d1aa7c | ||
|
|
619fdea5df | ||
|
|
e8bdc7286e | ||
|
|
18f2b120ef | ||
|
|
43d8345821 | ||
|
|
999e930fc8 | ||
|
|
d4e99efc46 | ||
|
|
fb01a0a9f1 | ||
|
|
9556285c59 | ||
|
|
2d40b1ec75 | ||
|
|
7eb690b125 | ||
|
|
a23644debc | ||
|
|
c98ba7f6ba | ||
|
|
aa2b61f133 | ||
|
|
f85d4afe45 | ||
|
|
b4ab9177b8 | ||
|
|
e7c310ca58 | ||
|
|
85a83f2553 | ||
|
|
d2e7baeb38 | ||
|
|
07b2ce28b1 | ||
|
|
35c90d9bde | ||
|
|
a9632bd0ff | ||
|
|
983e134ae9 | ||
|
|
e217532f9e | ||
|
|
1eeab28eec | ||
|
|
2a3bd45901 | ||
|
|
d16453a465 | ||
|
|
de63dddc96 | ||
|
|
ccffe19611 | ||
|
|
806bcf47d9 |
@@ -39,11 +39,20 @@ async def async_setup_entry(
|
||||
session = async_create_clientsession(
|
||||
hass, timeout=ClientTimeout(connect=10, total=12 * 60 * 60)
|
||||
)
|
||||
container_client = ContainerClient(
|
||||
account_url=f"https://{entry.data[CONF_ACCOUNT_NAME]}.blob.core.windows.net/",
|
||||
container_name=entry.data[CONF_CONTAINER_NAME],
|
||||
credential=entry.data[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=session),
|
||||
|
||||
def create_container_client() -> ContainerClient:
|
||||
"""Create a ContainerClient."""
|
||||
|
||||
return ContainerClient(
|
||||
account_url=f"https://{entry.data[CONF_ACCOUNT_NAME]}.blob.core.windows.net/",
|
||||
container_name=entry.data[CONF_CONTAINER_NAME],
|
||||
credential=entry.data[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=session),
|
||||
)
|
||||
|
||||
# has a blocking call to open in cpython
|
||||
container_client: ContainerClient = await hass.async_add_executor_job(
|
||||
create_container_client
|
||||
)
|
||||
|
||||
try:
|
||||
|
||||
@@ -27,9 +27,25 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for azure storage."""
|
||||
|
||||
def get_account_url(self, account_name: str) -> str:
|
||||
"""Get the account URL."""
|
||||
return f"https://{account_name}.blob.core.windows.net/"
|
||||
async def get_container_client(
|
||||
self, account_name: str, container_name: str, storage_account_key: str
|
||||
) -> ContainerClient:
|
||||
"""Get the container client.
|
||||
|
||||
ContainerClient has a blocking call to open in cpython
|
||||
"""
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
|
||||
def create_container_client() -> ContainerClient:
|
||||
return ContainerClient(
|
||||
account_url=f"https://{account_name}.blob.core.windows.net/",
|
||||
container_name=container_name,
|
||||
credential=storage_account_key,
|
||||
transport=AioHttpTransport(session=session),
|
||||
)
|
||||
|
||||
return await self.hass.async_add_executor_job(create_container_client)
|
||||
|
||||
async def validate_config(
|
||||
self, container_client: ContainerClient
|
||||
@@ -58,11 +74,10 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._async_abort_entries_match(
|
||||
{CONF_ACCOUNT_NAME: user_input[CONF_ACCOUNT_NAME]}
|
||||
)
|
||||
container_client = ContainerClient(
|
||||
account_url=self.get_account_url(user_input[CONF_ACCOUNT_NAME]),
|
||||
container_client = await self.get_container_client(
|
||||
account_name=user_input[CONF_ACCOUNT_NAME],
|
||||
container_name=user_input[CONF_CONTAINER_NAME],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
storage_account_key=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
)
|
||||
errors = await self.validate_config(container_client)
|
||||
|
||||
@@ -99,12 +114,12 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
|
||||
if user_input is not None:
|
||||
container_client = ContainerClient(
|
||||
account_url=self.get_account_url(reauth_entry.data[CONF_ACCOUNT_NAME]),
|
||||
container_client = await self.get_container_client(
|
||||
account_name=reauth_entry.data[CONF_ACCOUNT_NAME],
|
||||
container_name=reauth_entry.data[CONF_CONTAINER_NAME],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
storage_account_key=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
)
|
||||
|
||||
errors = await self.validate_config(container_client)
|
||||
if not errors:
|
||||
return self.async_update_reload_and_abort(
|
||||
@@ -129,13 +144,10 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input is not None:
|
||||
container_client = ContainerClient(
|
||||
account_url=self.get_account_url(
|
||||
reconfigure_entry.data[CONF_ACCOUNT_NAME]
|
||||
),
|
||||
container_client = await self.get_container_client(
|
||||
account_name=reconfigure_entry.data[CONF_ACCOUNT_NAME],
|
||||
container_name=user_input[CONF_CONTAINER_NAME],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
storage_account_key=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
)
|
||||
errors = await self.validate_config(container_client)
|
||||
if not errors:
|
||||
|
||||
@@ -22,7 +22,7 @@ from . import util
|
||||
from .agent import BackupAgent
|
||||
from .const import DATA_MANAGER
|
||||
from .manager import BackupManager
|
||||
from .models import BackupNotFound
|
||||
from .models import AgentBackup, BackupNotFound
|
||||
|
||||
|
||||
@callback
|
||||
@@ -85,7 +85,15 @@ class DownloadBackupView(HomeAssistantView):
|
||||
request, headers, backup_id, agent_id, agent, manager
|
||||
)
|
||||
return await self._send_backup_with_password(
|
||||
hass, request, headers, backup_id, agent_id, password, agent, manager
|
||||
hass,
|
||||
backup,
|
||||
request,
|
||||
headers,
|
||||
backup_id,
|
||||
agent_id,
|
||||
password,
|
||||
agent,
|
||||
manager,
|
||||
)
|
||||
except BackupNotFound:
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
@@ -116,6 +124,7 @@ class DownloadBackupView(HomeAssistantView):
|
||||
async def _send_backup_with_password(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
backup: AgentBackup,
|
||||
request: Request,
|
||||
headers: dict[istr, str],
|
||||
backup_id: str,
|
||||
@@ -144,7 +153,8 @@ class DownloadBackupView(HomeAssistantView):
|
||||
|
||||
stream = util.AsyncIteratorWriter(hass)
|
||||
worker = threading.Thread(
|
||||
target=util.decrypt_backup, args=[reader, stream, password, on_done, 0, []]
|
||||
target=util.decrypt_backup,
|
||||
args=[backup, reader, stream, password, on_done, 0, []],
|
||||
)
|
||||
try:
|
||||
worker.start()
|
||||
|
||||
@@ -295,13 +295,26 @@ def validate_password_stream(
|
||||
raise BackupEmpty
|
||||
|
||||
|
||||
def _get_expected_archives(backup: AgentBackup) -> set[str]:
|
||||
"""Get the expected archives in the backup."""
|
||||
expected_archives = set()
|
||||
if backup.homeassistant_included:
|
||||
expected_archives.add("homeassistant")
|
||||
for addon in backup.addons:
|
||||
expected_archives.add(addon.slug)
|
||||
for folder in backup.folders:
|
||||
expected_archives.add(folder.value)
|
||||
return expected_archives
|
||||
|
||||
|
||||
def decrypt_backup(
|
||||
backup: AgentBackup,
|
||||
input_stream: IO[bytes],
|
||||
output_stream: IO[bytes],
|
||||
password: str | None,
|
||||
on_done: Callable[[Exception | None], None],
|
||||
minimum_size: int,
|
||||
nonces: list[bytes],
|
||||
nonces: NonceGenerator,
|
||||
) -> None:
|
||||
"""Decrypt a backup."""
|
||||
error: Exception | None = None
|
||||
@@ -315,7 +328,7 @@ def decrypt_backup(
|
||||
fileobj=output_stream, mode="w|", bufsize=BUF_SIZE
|
||||
) as output_tar,
|
||||
):
|
||||
_decrypt_backup(input_tar, output_tar, password)
|
||||
_decrypt_backup(backup, input_tar, output_tar, password)
|
||||
except (DecryptError, SecureTarError, tarfile.TarError) as err:
|
||||
LOGGER.warning("Error decrypting backup: %s", err)
|
||||
error = err
|
||||
@@ -333,15 +346,18 @@ def decrypt_backup(
|
||||
|
||||
|
||||
def _decrypt_backup(
|
||||
backup: AgentBackup,
|
||||
input_tar: tarfile.TarFile,
|
||||
output_tar: tarfile.TarFile,
|
||||
password: str | None,
|
||||
) -> None:
|
||||
"""Decrypt a backup."""
|
||||
expected_archives = _get_expected_archives(backup)
|
||||
for obj in input_tar:
|
||||
# We compare with PurePath to avoid issues with different path separators,
|
||||
# for example when backup.json is added as "./backup.json"
|
||||
if PurePath(obj.name) == PurePath("backup.json"):
|
||||
object_path = PurePath(obj.name)
|
||||
if object_path == PurePath("backup.json"):
|
||||
# Rewrite the backup.json file to indicate that the backup is decrypted
|
||||
if not (reader := input_tar.extractfile(obj)):
|
||||
raise DecryptError
|
||||
@@ -352,7 +368,13 @@ def _decrypt_backup(
|
||||
metadata_obj.size = len(updated_metadata_b)
|
||||
output_tar.addfile(metadata_obj, BytesIO(updated_metadata_b))
|
||||
continue
|
||||
if not obj.name.endswith((".tar", ".tgz", ".tar.gz")):
|
||||
prefix, _, suffix = object_path.name.partition(".")
|
||||
if suffix not in ("tar", "tgz", "tar.gz"):
|
||||
LOGGER.debug("Unknown file %s will not be decrypted", obj.name)
|
||||
output_tar.addfile(obj, input_tar.extractfile(obj))
|
||||
continue
|
||||
if prefix not in expected_archives:
|
||||
LOGGER.debug("Unknown inner tar file %s will not be decrypted", obj.name)
|
||||
output_tar.addfile(obj, input_tar.extractfile(obj))
|
||||
continue
|
||||
istf = SecureTarFile(
|
||||
@@ -371,12 +393,13 @@ def _decrypt_backup(
|
||||
|
||||
|
||||
def encrypt_backup(
|
||||
backup: AgentBackup,
|
||||
input_stream: IO[bytes],
|
||||
output_stream: IO[bytes],
|
||||
password: str | None,
|
||||
on_done: Callable[[Exception | None], None],
|
||||
minimum_size: int,
|
||||
nonces: list[bytes],
|
||||
nonces: NonceGenerator,
|
||||
) -> None:
|
||||
"""Encrypt a backup."""
|
||||
error: Exception | None = None
|
||||
@@ -390,7 +413,7 @@ def encrypt_backup(
|
||||
fileobj=output_stream, mode="w|", bufsize=BUF_SIZE
|
||||
) as output_tar,
|
||||
):
|
||||
_encrypt_backup(input_tar, output_tar, password, nonces)
|
||||
_encrypt_backup(backup, input_tar, output_tar, password, nonces)
|
||||
except (EncryptError, SecureTarError, tarfile.TarError) as err:
|
||||
LOGGER.warning("Error encrypting backup: %s", err)
|
||||
error = err
|
||||
@@ -408,17 +431,20 @@ def encrypt_backup(
|
||||
|
||||
|
||||
def _encrypt_backup(
|
||||
backup: AgentBackup,
|
||||
input_tar: tarfile.TarFile,
|
||||
output_tar: tarfile.TarFile,
|
||||
password: str | None,
|
||||
nonces: list[bytes],
|
||||
nonces: NonceGenerator,
|
||||
) -> None:
|
||||
"""Encrypt a backup."""
|
||||
inner_tar_idx = 0
|
||||
expected_archives = _get_expected_archives(backup)
|
||||
for obj in input_tar:
|
||||
# We compare with PurePath to avoid issues with different path separators,
|
||||
# for example when backup.json is added as "./backup.json"
|
||||
if PurePath(obj.name) == PurePath("backup.json"):
|
||||
object_path = PurePath(obj.name)
|
||||
if object_path == PurePath("backup.json"):
|
||||
# Rewrite the backup.json file to indicate that the backup is encrypted
|
||||
if not (reader := input_tar.extractfile(obj)):
|
||||
raise EncryptError
|
||||
@@ -429,16 +455,21 @@ def _encrypt_backup(
|
||||
metadata_obj.size = len(updated_metadata_b)
|
||||
output_tar.addfile(metadata_obj, BytesIO(updated_metadata_b))
|
||||
continue
|
||||
if not obj.name.endswith((".tar", ".tgz", ".tar.gz")):
|
||||
prefix, _, suffix = object_path.name.partition(".")
|
||||
if suffix not in ("tar", "tgz", "tar.gz"):
|
||||
LOGGER.debug("Unknown file %s will not be encrypted", obj.name)
|
||||
output_tar.addfile(obj, input_tar.extractfile(obj))
|
||||
continue
|
||||
if prefix not in expected_archives:
|
||||
LOGGER.debug("Unknown inner tar file %s will not be encrypted", obj.name)
|
||||
continue
|
||||
istf = SecureTarFile(
|
||||
None, # Not used
|
||||
gzip=False,
|
||||
key=password_to_key(password) if password is not None else None,
|
||||
mode="r",
|
||||
fileobj=input_tar.extractfile(obj),
|
||||
nonce=nonces[inner_tar_idx],
|
||||
nonce=nonces.get(inner_tar_idx),
|
||||
)
|
||||
inner_tar_idx += 1
|
||||
with istf.encrypt(obj) as encrypted:
|
||||
@@ -456,17 +487,33 @@ class _CipherWorkerStatus:
|
||||
writer: AsyncIteratorWriter
|
||||
|
||||
|
||||
class NonceGenerator:
|
||||
"""Generate nonces for encryption."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the generator."""
|
||||
self._nonces: dict[int, bytes] = {}
|
||||
|
||||
def get(self, index: int) -> bytes:
|
||||
"""Get a nonce for the given index."""
|
||||
if index not in self._nonces:
|
||||
# Generate a new nonce for the given index
|
||||
self._nonces[index] = os.urandom(16)
|
||||
return self._nonces[index]
|
||||
|
||||
|
||||
class _CipherBackupStreamer:
|
||||
"""Encrypt or decrypt a backup."""
|
||||
|
||||
_cipher_func: Callable[
|
||||
[
|
||||
AgentBackup,
|
||||
IO[bytes],
|
||||
IO[bytes],
|
||||
str | None,
|
||||
Callable[[Exception | None], None],
|
||||
int,
|
||||
list[bytes],
|
||||
NonceGenerator,
|
||||
],
|
||||
None,
|
||||
]
|
||||
@@ -484,7 +531,7 @@ class _CipherBackupStreamer:
|
||||
self._hass = hass
|
||||
self._open_stream = open_stream
|
||||
self._password = password
|
||||
self._nonces: list[bytes] = []
|
||||
self._nonces = NonceGenerator()
|
||||
|
||||
def size(self) -> int:
|
||||
"""Return the maximum size of the decrypted or encrypted backup."""
|
||||
@@ -508,7 +555,15 @@ class _CipherBackupStreamer:
|
||||
writer = AsyncIteratorWriter(self._hass)
|
||||
worker = threading.Thread(
|
||||
target=self._cipher_func,
|
||||
args=[reader, writer, self._password, on_done, self.size(), self._nonces],
|
||||
args=[
|
||||
self._backup,
|
||||
reader,
|
||||
writer,
|
||||
self._password,
|
||||
on_done,
|
||||
self.size(),
|
||||
self._nonces,
|
||||
],
|
||||
)
|
||||
worker_status = _CipherWorkerStatus(
|
||||
done=asyncio.Event(), reader=reader, thread=worker, writer=writer
|
||||
@@ -538,17 +593,6 @@ class DecryptedBackupStreamer(_CipherBackupStreamer):
|
||||
class EncryptedBackupStreamer(_CipherBackupStreamer):
|
||||
"""Encrypt a backup."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
backup: AgentBackup,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
password: str | None,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(hass, backup, open_stream, password)
|
||||
self._nonces = [os.urandom(16) for _ in range(self._num_tar_files())]
|
||||
|
||||
_cipher_func = staticmethod(encrypt_backup)
|
||||
|
||||
def backup(self) -> AgentBackup:
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bluemaestro",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bluemaestro-ble==0.4.0"]
|
||||
"requirements": ["bluemaestro-ble==0.4.1"]
|
||||
}
|
||||
|
||||
@@ -77,6 +77,5 @@ async def async_unload_entry(hass: HomeAssistant, entry: ComelitConfigEntry) ->
|
||||
coordinator = entry.runtime_data
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, platforms):
|
||||
await coordinator.api.logout()
|
||||
await coordinator.api.close()
|
||||
|
||||
return unload_ok
|
||||
|
||||
@@ -134,11 +134,9 @@ class ComelitClimateEntity(ComelitBridgeBaseEntity, ClimateEntity):
|
||||
self._attr_current_temperature = values[0] / 10
|
||||
|
||||
self._attr_hvac_action = None
|
||||
if _mode == ClimaComelitMode.OFF:
|
||||
self._attr_hvac_action = HVACAction.OFF
|
||||
if not _active:
|
||||
self._attr_hvac_action = HVACAction.IDLE
|
||||
if _mode in API_STATUS:
|
||||
elif _mode in API_STATUS:
|
||||
self._attr_hvac_action = API_STATUS[_mode]["hvac_action"]
|
||||
|
||||
self._attr_hvac_mode = None
|
||||
|
||||
@@ -73,7 +73,6 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
) from err
|
||||
finally:
|
||||
await api.logout()
|
||||
await api.close()
|
||||
|
||||
return {"title": data[CONF_HOST]}
|
||||
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aiocomelit==0.12.0"]
|
||||
"requirements": ["aiocomelit==0.12.1"]
|
||||
}
|
||||
|
||||
@@ -76,7 +76,7 @@
|
||||
"cannot_authenticate": {
|
||||
"message": "Error authenticating"
|
||||
},
|
||||
"updated_failed": {
|
||||
"update_failed": {
|
||||
"message": "Failed to update data: {error}"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.4.30"]
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.5.7"]
|
||||
}
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["devolo_home_control_api"],
|
||||
"requirements": ["devolo-home-control-api==0.18.3"],
|
||||
"requirements": ["devolo-home-control-api==0.19.0"],
|
||||
"zeroconf": ["_dvl-deviceapi._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"aiodhcpwatcher==1.1.1",
|
||||
"aiodiscover==2.6.1",
|
||||
"aiodiscover==2.7.0",
|
||||
"cached-ipaddress==0.10.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -68,7 +68,7 @@ async def async_validate_hostname(
|
||||
result = False
|
||||
with contextlib.suppress(DNSError):
|
||||
result = bool(
|
||||
await aiodns.DNSResolver(
|
||||
await aiodns.DNSResolver( # type: ignore[call-overload]
|
||||
nameservers=[resolver], udp_port=port, tcp_port=port
|
||||
).query(hostname, qtype)
|
||||
)
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/dnsip",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["aiodns==3.3.0"]
|
||||
"requirements": ["aiodns==3.4.0"]
|
||||
}
|
||||
|
||||
@@ -106,7 +106,7 @@ class WanIpSensor(SensorEntity):
|
||||
async def async_update(self) -> None:
|
||||
"""Get the current DNS IP address for hostname."""
|
||||
try:
|
||||
response = await self.resolver.query(self.hostname, self.querytype)
|
||||
response = await self.resolver.query(self.hostname, self.querytype) # type: ignore[call-overload]
|
||||
except DNSError as err:
|
||||
_LOGGER.warning("Exception while resolving host: %s", err)
|
||||
response = None
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==13.0.1"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==13.2.0"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,8 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Generic
|
||||
|
||||
from deebot_client.capabilities import CapabilityEvent, CapabilityLifeSpan
|
||||
from deebot_client.capabilities import CapabilityEvent, CapabilityLifeSpan, DeviceType
|
||||
from deebot_client.device import Device
|
||||
from deebot_client.events import (
|
||||
BatteryEvent,
|
||||
ErrorEvent,
|
||||
@@ -34,7 +35,7 @@ from homeassistant.const import (
|
||||
UnitOfArea,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
@@ -59,6 +60,15 @@ class EcovacsSensorEntityDescription(
|
||||
"""Ecovacs sensor entity description."""
|
||||
|
||||
value_fn: Callable[[EventT], StateType]
|
||||
native_unit_of_measurement_fn: Callable[[DeviceType], str | None] | None = None
|
||||
|
||||
|
||||
@callback
|
||||
def get_area_native_unit_of_measurement(device_type: DeviceType) -> str | None:
|
||||
"""Get the area native unit of measurement based on device type."""
|
||||
if device_type is DeviceType.MOWER:
|
||||
return UnitOfArea.SQUARE_CENTIMETERS
|
||||
return UnitOfArea.SQUARE_METERS
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = (
|
||||
@@ -68,7 +78,7 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = (
|
||||
capability_fn=lambda caps: caps.stats.clean,
|
||||
value_fn=lambda e: e.area,
|
||||
translation_key="stats_area",
|
||||
native_unit_of_measurement=UnitOfArea.SQUARE_METERS,
|
||||
native_unit_of_measurement_fn=get_area_native_unit_of_measurement,
|
||||
),
|
||||
EcovacsSensorEntityDescription[StatsEvent](
|
||||
key="stats_time",
|
||||
@@ -85,7 +95,7 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = (
|
||||
value_fn=lambda e: e.area,
|
||||
key="total_stats_area",
|
||||
translation_key="total_stats_area",
|
||||
native_unit_of_measurement=UnitOfArea.SQUARE_METERS,
|
||||
native_unit_of_measurement_fn=get_area_native_unit_of_measurement,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
EcovacsSensorEntityDescription[TotalStatsEvent](
|
||||
@@ -249,6 +259,27 @@ class EcovacsSensor(
|
||||
|
||||
entity_description: EcovacsSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device: Device,
|
||||
capability: CapabilityEvent,
|
||||
entity_description: EcovacsSensorEntityDescription,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Initialize entity."""
|
||||
super().__init__(device, capability, entity_description, **kwargs)
|
||||
if (
|
||||
entity_description.native_unit_of_measurement_fn
|
||||
and (
|
||||
native_unit_of_measurement
|
||||
:= entity_description.native_unit_of_measurement_fn(
|
||||
device.capabilities.device_type
|
||||
)
|
||||
)
|
||||
is not None
|
||||
):
|
||||
self._attr_native_unit_of_measurement = native_unit_of_measurement
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Set up the event listeners now that hass is ready."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@@ -64,7 +64,7 @@ async def _get_fixture_collection(envoy: Envoy, serial: str) -> dict[str, Any]:
|
||||
"/ivp/ensemble/generator",
|
||||
"/ivp/meters",
|
||||
"/ivp/meters/readings",
|
||||
"/home,",
|
||||
"/home",
|
||||
]
|
||||
|
||||
for end_point in end_points:
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyenphase==1.26.0"],
|
||||
"requirements": ["pyenphase==1.26.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
||||
@@ -223,7 +223,6 @@ class EsphomeEntity(EsphomeBaseEntity, Generic[_InfoT, _StateT]):
|
||||
self._states = cast(dict[int, _StateT], entry_data.state[state_type])
|
||||
assert entry_data.device_info is not None
|
||||
device_info = entry_data.device_info
|
||||
self._device_info = device_info
|
||||
self._on_entry_data_changed()
|
||||
self._key = entity_info.key
|
||||
self._state_type = state_type
|
||||
@@ -311,6 +310,11 @@ class EsphomeEntity(EsphomeBaseEntity, Generic[_InfoT, _StateT]):
|
||||
@callback
|
||||
def _on_entry_data_changed(self) -> None:
|
||||
entry_data = self._entry_data
|
||||
# Update the device info since it can change
|
||||
# when the device is reconnected
|
||||
if TYPE_CHECKING:
|
||||
assert entry_data.device_info is not None
|
||||
self._device_info = entry_data.device_info
|
||||
self._api_version = entry_data.api_version
|
||||
self._client = entry_data.client
|
||||
if self._device_info.has_deep_sleep:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/forecast_solar",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["forecast-solar==4.1.0"]
|
||||
"requirements": ["forecast-solar==4.2.0"]
|
||||
}
|
||||
|
||||
@@ -92,7 +92,7 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
|
||||
available_main_ains = [
|
||||
ain
|
||||
for ain, dev in data.devices.items()
|
||||
for ain, dev in data.devices.items() | data.templates.items()
|
||||
if dev.device_and_unit_id[1] is None
|
||||
]
|
||||
device_reg = dr.async_get(self.hass)
|
||||
|
||||
@@ -45,7 +45,15 @@ type FroniusConfigEntry = ConfigEntry[FroniusSolarNet]
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: FroniusConfigEntry) -> bool:
|
||||
"""Set up fronius from a config entry."""
|
||||
host = entry.data[CONF_HOST]
|
||||
fronius = Fronius(async_get_clientsession(hass), host)
|
||||
fronius = Fronius(
|
||||
async_get_clientsession(
|
||||
hass,
|
||||
# Fronius Gen24 firmware 1.35.4-1 redirects to HTTPS with self-signed
|
||||
# certificate. See https://github.com/home-assistant/core/issues/138881
|
||||
verify_ssl=False,
|
||||
),
|
||||
host,
|
||||
)
|
||||
solar_net = FroniusSolarNet(hass, entry, fronius)
|
||||
await solar_net.init_devices()
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ async def validate_host(
|
||||
hass: HomeAssistant, host: str
|
||||
) -> tuple[str, FroniusConfigEntryData]:
|
||||
"""Validate the user input allows us to connect."""
|
||||
fronius = Fronius(async_get_clientsession(hass), host)
|
||||
fronius = Fronius(async_get_clientsession(hass, verify_ssl=False), host)
|
||||
|
||||
try:
|
||||
datalogger_info: dict[str, Any]
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250502.1"]
|
||||
"requirements": ["home-assistant-frontend==20250516.0"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["googleapiclient"],
|
||||
"requirements": ["gcal-sync==7.0.0", "oauth2client==4.1.3", "ical==9.2.0"]
|
||||
"requirements": ["gcal-sync==7.0.1", "oauth2client==4.1.3", "ical==9.2.4"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.70", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.72", "babel==2.15.0"]
|
||||
}
|
||||
|
||||
@@ -234,7 +234,7 @@
|
||||
"consumer_products_coffee_maker_program_coffee_world_black_eye": "Black eye",
|
||||
"consumer_products_coffee_maker_program_coffee_world_dead_eye": "Dead eye",
|
||||
"consumer_products_coffee_maker_program_beverage_hot_water": "Hot water",
|
||||
"dishcare_dishwasher_program_pre_rinse": "Pre_rinse",
|
||||
"dishcare_dishwasher_program_pre_rinse": "Pre-rinse",
|
||||
"dishcare_dishwasher_program_auto_1": "Auto 1",
|
||||
"dishcare_dishwasher_program_auto_2": "Auto 2",
|
||||
"dishcare_dishwasher_program_auto_3": "Auto 3",
|
||||
@@ -252,7 +252,7 @@
|
||||
"dishcare_dishwasher_program_intensiv_power": "Intensive power",
|
||||
"dishcare_dishwasher_program_magic_daily": "Magic daily",
|
||||
"dishcare_dishwasher_program_super_60": "Super 60ºC",
|
||||
"dishcare_dishwasher_program_kurz_60": "Kurz 60ºC",
|
||||
"dishcare_dishwasher_program_kurz_60": "Speed 60ºC",
|
||||
"dishcare_dishwasher_program_express_sparkle_65": "Express sparkle 65ºC",
|
||||
"dishcare_dishwasher_program_machine_care": "Machine care",
|
||||
"dishcare_dishwasher_program_steam_fresh": "Steam fresh",
|
||||
|
||||
@@ -90,16 +90,17 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
minor_version=2,
|
||||
)
|
||||
|
||||
if config_entry.minor_version == 2:
|
||||
# Add a `firmware_version` key
|
||||
if config_entry.minor_version <= 3:
|
||||
# Add a `firmware_version` key if it doesn't exist to handle entries created
|
||||
# with minor version 1.3 where the firmware version was not set.
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry,
|
||||
data={
|
||||
**config_entry.data,
|
||||
FIRMWARE_VERSION: None,
|
||||
FIRMWARE_VERSION: config_entry.data.get(FIRMWARE_VERSION),
|
||||
},
|
||||
version=1,
|
||||
minor_version=3,
|
||||
minor_version=4,
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
|
||||
@@ -62,7 +62,7 @@ class HomeAssistantYellowConfigFlow(BaseFirmwareConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Home Assistant Yellow."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 3
|
||||
MINOR_VERSION = 4
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Instantiate config flow."""
|
||||
@@ -116,6 +116,11 @@ class HomeAssistantYellowConfigFlow(BaseFirmwareConfigFlow, domain=DOMAIN):
|
||||
if self._probed_firmware_info is not None
|
||||
else ApplicationType.EZSP
|
||||
).value,
|
||||
FIRMWARE_VERSION: (
|
||||
self._probed_firmware_info.firmware_version
|
||||
if self._probed_firmware_info is not None
|
||||
else None
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -8,7 +8,13 @@ from pyhap.const import CATEGORY_AIR_PURIFIER
|
||||
from pyhap.service import Service
|
||||
from pyhap.util import callback as pyhap_callback
|
||||
|
||||
from homeassistant.const import STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.const import (
|
||||
ATTR_UNIT_OF_MEASUREMENT,
|
||||
STATE_ON,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
Event,
|
||||
EventStateChangedData,
|
||||
@@ -43,7 +49,12 @@ from .const import (
|
||||
THRESHOLD_FILTER_CHANGE_NEEDED,
|
||||
)
|
||||
from .type_fans import ATTR_PRESET_MODE, CHAR_ROTATION_SPEED, Fan
|
||||
from .util import cleanup_name_for_homekit, convert_to_float, density_to_air_quality
|
||||
from .util import (
|
||||
cleanup_name_for_homekit,
|
||||
convert_to_float,
|
||||
density_to_air_quality,
|
||||
temperature_to_homekit,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -345,8 +356,13 @@ class AirPurifier(Fan):
|
||||
):
|
||||
return
|
||||
|
||||
unit = new_state.attributes.get(
|
||||
ATTR_UNIT_OF_MEASUREMENT, UnitOfTemperature.CELSIUS
|
||||
)
|
||||
current_temperature = temperature_to_homekit(current_temperature, unit)
|
||||
|
||||
_LOGGER.debug(
|
||||
"%s: Linked temperature sensor %s changed to %d",
|
||||
"%s: Linked temperature sensor %s changed to %d °C",
|
||||
self.entity_id,
|
||||
self.linked_temperature_sensor,
|
||||
current_temperature,
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
"""Support for HomematicIP Cloud events."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homematicip.base.channel_event import ChannelEvent
|
||||
from homematicip.base.functionalChannels import FunctionalChannel
|
||||
from homematicip.device import Device
|
||||
|
||||
from homeassistant.components.event import (
|
||||
@@ -23,6 +26,9 @@ from .hap import HomematicipHAP
|
||||
class HmipEventEntityDescription(EventEntityDescription):
|
||||
"""Description of a HomematicIP Cloud event."""
|
||||
|
||||
channel_event_types: list[str] | None = None
|
||||
channel_selector_fn: Callable[[FunctionalChannel], bool] | None = None
|
||||
|
||||
|
||||
EVENT_DESCRIPTIONS = {
|
||||
"doorbell": HmipEventEntityDescription(
|
||||
@@ -30,6 +36,8 @@ EVENT_DESCRIPTIONS = {
|
||||
translation_key="doorbell",
|
||||
device_class=EventDeviceClass.DOORBELL,
|
||||
event_types=["ring"],
|
||||
channel_event_types=["DOOR_BELL_SENSOR_EVENT"],
|
||||
channel_selector_fn=lambda channel: channel.channelRole == "DOOR_BELL_INPUT",
|
||||
),
|
||||
}
|
||||
|
||||
@@ -41,24 +49,29 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the HomematicIP cover from a config entry."""
|
||||
hap = hass.data[DOMAIN][config_entry.unique_id]
|
||||
entities: list[HomematicipGenericEntity] = []
|
||||
|
||||
async_add_entities(
|
||||
entities.extend(
|
||||
HomematicipDoorBellEvent(
|
||||
hap,
|
||||
device,
|
||||
channel.index,
|
||||
EVENT_DESCRIPTIONS["doorbell"],
|
||||
description,
|
||||
)
|
||||
for description in EVENT_DESCRIPTIONS.values()
|
||||
for device in hap.home.devices
|
||||
for channel in device.functionalChannels
|
||||
if channel.channelRole == "DOOR_BELL_INPUT"
|
||||
if description.channel_selector_fn and description.channel_selector_fn(channel)
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class HomematicipDoorBellEvent(HomematicipGenericEntity, EventEntity):
|
||||
"""Event class for HomematicIP doorbell events."""
|
||||
|
||||
_attr_device_class = EventDeviceClass.DOORBELL
|
||||
entity_description: HmipEventEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -86,9 +99,27 @@ class HomematicipDoorBellEvent(HomematicipGenericEntity, EventEntity):
|
||||
@callback
|
||||
def _async_handle_event(self, *args, **kwargs) -> None:
|
||||
"""Handle the event fired by the functional channel."""
|
||||
raised_channel_event = self._get_channel_event_from_args(*args)
|
||||
|
||||
if not self._should_raise(raised_channel_event):
|
||||
return
|
||||
|
||||
event_types = self.entity_description.event_types
|
||||
if TYPE_CHECKING:
|
||||
assert event_types is not None
|
||||
|
||||
self._trigger_event(event_type=event_types[0])
|
||||
self.async_write_ha_state()
|
||||
|
||||
def _should_raise(self, event_type: str) -> bool:
|
||||
"""Check if the event should be raised."""
|
||||
if self.entity_description.channel_event_types is None:
|
||||
return False
|
||||
return event_type in self.entity_description.channel_event_types
|
||||
|
||||
def _get_channel_event_from_args(self, *args) -> str:
|
||||
"""Get the channel event."""
|
||||
if isinstance(args[0], ChannelEvent):
|
||||
return args[0].channelEventType
|
||||
|
||||
return ""
|
||||
|
||||
@@ -110,14 +110,14 @@ class AutomowerLawnMowerEntity(AutomowerAvailableEntity, LawnMowerEntity):
|
||||
mower_attributes = self.mower_attributes
|
||||
if mower_attributes.mower.state in PAUSED_STATES:
|
||||
return LawnMowerActivity.PAUSED
|
||||
if mower_attributes.mower.activity in MOWING_ACTIVITIES:
|
||||
return LawnMowerActivity.MOWING
|
||||
if mower_attributes.mower.activity == MowerActivities.GOING_HOME:
|
||||
return LawnMowerActivity.RETURNING
|
||||
if (mower_attributes.mower.state == "RESTRICTED") or (
|
||||
mower_attributes.mower.activity in DOCKED_ACTIVITIES
|
||||
):
|
||||
return LawnMowerActivity.DOCKED
|
||||
if mower_attributes.mower.state in MowerStates.IN_OPERATION:
|
||||
if mower_attributes.mower.activity == MowerActivities.GOING_HOME:
|
||||
return LawnMowerActivity.RETURNING
|
||||
return LawnMowerActivity.MOWING
|
||||
return LawnMowerActivity.ERROR
|
||||
|
||||
@property
|
||||
|
||||
@@ -58,6 +58,7 @@ class INKBIRDActiveBluetoothProcessorCoordinator(
|
||||
update_method=self._async_on_update,
|
||||
needs_poll_method=self._async_needs_poll,
|
||||
poll_method=self._async_poll_data,
|
||||
connectable=False, # Polling only happens if active scanning is disabled
|
||||
)
|
||||
|
||||
async def async_init(self) -> None:
|
||||
|
||||
@@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==2.0.0"]
|
||||
"requirements": ["pylamarzocco==2.0.3"]
|
||||
}
|
||||
|
||||
@@ -132,17 +132,18 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up sensor entities."""
|
||||
coordinator = entry.runtime_data.config_coordinator
|
||||
config_coordinator = entry.runtime_data.config_coordinator
|
||||
statistic_coordinators = entry.runtime_data.statistics_coordinator
|
||||
|
||||
entities = [
|
||||
LaMarzoccoSensorEntity(coordinator, description)
|
||||
LaMarzoccoSensorEntity(config_coordinator, description)
|
||||
for description in ENTITIES
|
||||
if description.supported_fn(coordinator)
|
||||
if description.supported_fn(config_coordinator)
|
||||
]
|
||||
entities.extend(
|
||||
LaMarzoccoStatisticSensorEntity(coordinator, description)
|
||||
LaMarzoccoStatisticSensorEntity(statistic_coordinators, description)
|
||||
for description in STATISTIC_ENTITIES
|
||||
if description.supported_fn(coordinator)
|
||||
if description.supported_fn(statistic_coordinators)
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["linkplay"],
|
||||
"requirements": ["python-linkplay==0.2.4"],
|
||||
"requirements": ["python-linkplay==0.2.5"],
|
||||
"zeroconf": ["_linkplay._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_calendar",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["ical"],
|
||||
"requirements": ["ical==9.2.0"]
|
||||
"requirements": ["ical==9.2.4"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_todo",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["ical==9.2.0"]
|
||||
"requirements": ["ical==9.2.4"]
|
||||
}
|
||||
|
||||
@@ -93,7 +93,6 @@ async def async_setup_intents(hass: HomeAssistant) -> None:
|
||||
DOMAIN,
|
||||
SERVICE_VOLUME_SET,
|
||||
required_domains={DOMAIN},
|
||||
required_states={MediaPlayerState.PLAYING},
|
||||
required_features=MediaPlayerEntityFeature.VOLUME_SET,
|
||||
required_slots={
|
||||
ATTR_MEDIA_VOLUME_LEVEL: intent.IntentSlotInfo(
|
||||
@@ -159,7 +158,6 @@ class MediaUnpauseHandler(intent.ServiceIntentHandler):
|
||||
DOMAIN,
|
||||
SERVICE_MEDIA_PLAY,
|
||||
required_domains={DOMAIN},
|
||||
required_states={MediaPlayerState.PAUSED},
|
||||
description="Resumes a media player",
|
||||
platforms={DOMAIN},
|
||||
device_classes={MediaPlayerDeviceClass},
|
||||
|
||||
@@ -57,8 +57,8 @@ ATA_HVAC_MODE_REVERSE_LOOKUP = {v: k for k, v in ATA_HVAC_MODE_LOOKUP.items()}
|
||||
|
||||
|
||||
ATW_ZONE_HVAC_MODE_LOOKUP = {
|
||||
atw.ZONE_OPERATION_MODE_HEAT: HVACMode.HEAT,
|
||||
atw.ZONE_OPERATION_MODE_COOL: HVACMode.COOL,
|
||||
atw.ZONE_STATUS_HEAT: HVACMode.HEAT,
|
||||
atw.ZONE_STATUS_COOL: HVACMode.COOL,
|
||||
}
|
||||
ATW_ZONE_HVAC_MODE_REVERSE_LOOKUP = {v: k for k, v in ATW_ZONE_HVAC_MODE_LOOKUP.items()}
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TWO_YEARS = 2 * 365 * 24
|
||||
TWO_YEARS_DAYS = 2 * 365
|
||||
|
||||
|
||||
class MillDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
@@ -91,7 +91,7 @@ class MillHistoricDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
if not last_stats or not last_stats.get(statistic_id):
|
||||
hourly_data = (
|
||||
await self.mill_data_connection.fetch_historic_energy_usage(
|
||||
dev_id, n_days=TWO_YEARS
|
||||
dev_id, n_days=TWO_YEARS_DAYS
|
||||
)
|
||||
)
|
||||
hourly_data = dict(sorted(hourly_data.items(), key=lambda x: x[0]))
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/mill",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["mill", "mill_local"],
|
||||
"requirements": ["millheater==0.12.3", "mill-local==0.3.0"]
|
||||
"requirements": ["millheater==0.12.5", "mill-local==0.3.0"]
|
||||
}
|
||||
|
||||
@@ -498,8 +498,7 @@ def validate_light_platform_config(user_data: dict[str, Any]) -> dict[str, str]:
|
||||
if user_data.get(CONF_MIN_KELVIN, DEFAULT_MIN_KELVIN) >= user_data.get(
|
||||
CONF_MAX_KELVIN, DEFAULT_MAX_KELVIN
|
||||
):
|
||||
errors[CONF_MAX_KELVIN] = "max_below_min_kelvin"
|
||||
errors[CONF_MIN_KELVIN] = "max_below_min_kelvin"
|
||||
errors["advanced_settings"] = "max_below_min_kelvin"
|
||||
return errors
|
||||
|
||||
|
||||
@@ -1276,7 +1275,10 @@ def validate_user_input(
|
||||
try:
|
||||
validator(value)
|
||||
except (ValueError, vol.Error, vol.Invalid):
|
||||
errors[field] = data_schema_fields[field].error or "invalid_input"
|
||||
data_schema_field = data_schema_fields[field]
|
||||
errors[data_schema_field.section or field] = (
|
||||
data_schema_field.error or "invalid_input"
|
||||
)
|
||||
|
||||
if config_validator is not None:
|
||||
if TYPE_CHECKING:
|
||||
@@ -1385,8 +1387,11 @@ def subentry_schema_default_data_from_fields(
|
||||
return {
|
||||
key: field.default
|
||||
for key, field in data_schema_fields.items()
|
||||
if field.is_schema_default
|
||||
or (field.default is not vol.UNDEFINED and key not in component_data)
|
||||
if _check_conditions(field, component_data)
|
||||
and (
|
||||
field.is_schema_default
|
||||
or (field.default is not vol.UNDEFINED and key not in component_data)
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -2058,7 +2063,7 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow):
|
||||
entities = [
|
||||
SelectOptionDict(
|
||||
value=key,
|
||||
label=f"{device_name} {component_data.get(CONF_NAME, '-')}"
|
||||
label=f"{device_name} {component_data.get(CONF_NAME, '-') or '-'}"
|
||||
f" ({component_data[CONF_PLATFORM]})",
|
||||
)
|
||||
for key, component_data in self._subentry_data["components"].items()
|
||||
@@ -2212,7 +2217,10 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow):
|
||||
for component_data in self._subentry_data["components"].values():
|
||||
platform = component_data[CONF_PLATFORM]
|
||||
subentry_default_data = subentry_schema_default_data_from_fields(
|
||||
PLATFORM_ENTITY_FIELDS[platform] | COMMON_ENTITY_FIELDS, component_data
|
||||
COMMON_ENTITY_FIELDS
|
||||
| PLATFORM_ENTITY_FIELDS[platform]
|
||||
| PLATFORM_MQTT_FIELDS[platform],
|
||||
component_data,
|
||||
)
|
||||
component_data.update(subentry_default_data)
|
||||
|
||||
@@ -2287,7 +2295,8 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow):
|
||||
self._component_id = None
|
||||
mqtt_device = self._subentry_data[CONF_DEVICE][CONF_NAME]
|
||||
mqtt_items = ", ".join(
|
||||
f"{mqtt_device} {component_data.get(CONF_NAME, '-')} ({component_data[CONF_PLATFORM]})"
|
||||
f"{mqtt_device} {component_data.get(CONF_NAME, '-') or '-'} "
|
||||
f"({component_data[CONF_PLATFORM]})"
|
||||
for component_data in self._subentry_data["components"].values()
|
||||
)
|
||||
menu_options = [
|
||||
|
||||
@@ -150,7 +150,11 @@ class NetgearRouter:
|
||||
if device_entry.via_device_id is None:
|
||||
continue # do not add the router itself
|
||||
|
||||
device_mac = dict(device_entry.connections)[dr.CONNECTION_NETWORK_MAC]
|
||||
device_mac = dict(device_entry.connections).get(
|
||||
dr.CONNECTION_NETWORK_MAC
|
||||
)
|
||||
if device_mac is None:
|
||||
continue
|
||||
self.devices[device_mac] = {
|
||||
"mac": device_mac,
|
||||
"name": device_entry.name,
|
||||
|
||||
@@ -181,11 +181,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
session = aiohttp.ClientSession(connector=connector)
|
||||
|
||||
@callback
|
||||
def _async_close_websession(event: Event) -> None:
|
||||
def _async_close_websession(event: Event | None = None) -> None:
|
||||
"""Close websession."""
|
||||
session.detach()
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_close_websession)
|
||||
entry.async_on_unload(_async_close_websession)
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, _async_close_websession)
|
||||
)
|
||||
|
||||
client = OctoprintClient(
|
||||
host=entry.data[CONF_HOST],
|
||||
|
||||
@@ -140,7 +140,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
content.append(
|
||||
ResponseInputImageParam(
|
||||
type="input_image",
|
||||
file_id=filename,
|
||||
image_url=f"data:{mime_type};base64,{base64_file}",
|
||||
detail="auto",
|
||||
)
|
||||
|
||||
@@ -32,6 +32,8 @@ from .const import (
|
||||
PLACEHOLDER_WEBHOOK_URL,
|
||||
)
|
||||
|
||||
AUTH_TOKEN_URL = "https://intercom.help/plaato/en/articles/5004720-auth_token"
|
||||
|
||||
|
||||
class PlaatoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handles a Plaato config flow."""
|
||||
@@ -153,7 +155,10 @@ class PlaatoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="api_method",
|
||||
data_schema=data_schema,
|
||||
errors=errors,
|
||||
description_placeholders={PLACEHOLDER_DEVICE_TYPE: device_type.name},
|
||||
description_placeholders={
|
||||
PLACEHOLDER_DEVICE_TYPE: device_type.name,
|
||||
"auth_token_url": AUTH_TOKEN_URL,
|
||||
},
|
||||
)
|
||||
|
||||
async def _get_webhook_id(self):
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
},
|
||||
"api_method": {
|
||||
"title": "Select API method",
|
||||
"description": "To be able to query the API an `auth_token` is required which can be obtained by following [these](https://plaato.zendesk.com/hc/en-us/articles/360003234717-Auth-token) instructions\n\n Selected device: **{device_type}** \n\nIf you rather use the built in webhook method (Airlock only) please check the box below and leave Auth Token blank",
|
||||
"description": "To be able to query the API an 'auth token' is required which can be obtained by following [these instructions]({auth_token_url})\n\nSelected device: **{device_type}** \n\nIf you prefer to use the built-in webhook method (Airlock only) please check the box below and leave 'Auth token' blank",
|
||||
"data": {
|
||||
"use_webhook": "Use webhook",
|
||||
"token": "Paste Auth Token here"
|
||||
|
||||
@@ -6,7 +6,6 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from pypoint import PointSession
|
||||
from tempora.utc import fromtimestamp
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
@@ -62,7 +61,9 @@ class PointDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]
|
||||
or device.device_id not in self.device_updates
|
||||
or self.device_updates[device.device_id] < last_updated
|
||||
):
|
||||
self.device_updates[device.device_id] = last_updated or fromtimestamp(0)
|
||||
self.device_updates[device.device_id] = (
|
||||
last_updated or datetime.fromtimestamp(0)
|
||||
)
|
||||
self.data[device.device_id] = {
|
||||
k: await device.sensor(k)
|
||||
for k in ("temperature", "humidity", "sound_pressure")
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["ical"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["ical==9.2.0"]
|
||||
"requirements": ["ical==9.2.4"]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["renault_api"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["renault-api==0.3.0"]
|
||||
"requirements": ["renault-api==0.3.1"]
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ from homeassistant.helpers import (
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
@@ -364,45 +364,90 @@ def migrate_entity_ids(
|
||||
devices = dr.async_entries_for_config_entry(device_reg, config_entry_id)
|
||||
ch_device_ids = {}
|
||||
for device in devices:
|
||||
(device_uid, ch, is_chime) = get_device_uid_and_ch(device, host)
|
||||
for dev_id in device.identifiers:
|
||||
(device_uid, ch, is_chime) = get_device_uid_and_ch(dev_id, host)
|
||||
if not device_uid:
|
||||
continue
|
||||
|
||||
if host.api.supported(None, "UID") and device_uid[0] != host.unique_id:
|
||||
if ch is None:
|
||||
new_device_id = f"{host.unique_id}"
|
||||
else:
|
||||
new_device_id = f"{host.unique_id}_{device_uid[1]}"
|
||||
_LOGGER.debug(
|
||||
"Updating Reolink device UID from %s to %s", device_uid, new_device_id
|
||||
)
|
||||
new_identifiers = {(DOMAIN, new_device_id)}
|
||||
device_reg.async_update_device(device.id, new_identifiers=new_identifiers)
|
||||
|
||||
if ch is None or is_chime:
|
||||
continue # Do not consider the NVR itself or chimes
|
||||
|
||||
ch_device_ids[device.id] = ch
|
||||
if host.api.supported(ch, "UID") and device_uid[1] != host.api.camera_uid(ch):
|
||||
if host.api.supported(None, "UID"):
|
||||
new_device_id = f"{host.unique_id}_{host.api.camera_uid(ch)}"
|
||||
else:
|
||||
new_device_id = f"{device_uid[0]}_{host.api.camera_uid(ch)}"
|
||||
_LOGGER.debug(
|
||||
"Updating Reolink device UID from %s to %s", device_uid, new_device_id
|
||||
)
|
||||
new_identifiers = {(DOMAIN, new_device_id)}
|
||||
existing_device = device_reg.async_get_device(identifiers=new_identifiers)
|
||||
if existing_device is None:
|
||||
if host.api.supported(None, "UID") and device_uid[0] != host.unique_id:
|
||||
if ch is None:
|
||||
new_device_id = f"{host.unique_id}"
|
||||
else:
|
||||
new_device_id = f"{host.unique_id}_{device_uid[1]}"
|
||||
_LOGGER.debug(
|
||||
"Updating Reolink device UID from %s to %s",
|
||||
device_uid,
|
||||
new_device_id,
|
||||
)
|
||||
new_identifiers = {(DOMAIN, new_device_id)}
|
||||
device_reg.async_update_device(
|
||||
device.id, new_identifiers=new_identifiers
|
||||
)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Reolink device with uid %s already exists, "
|
||||
"removing device with uid %s",
|
||||
new_device_id,
|
||||
device_uid,
|
||||
|
||||
if ch is None or is_chime:
|
||||
continue # Do not consider the NVR itself or chimes
|
||||
|
||||
# Check for wrongfully combined host with NVR entities in one device
|
||||
# Can be removed in HA 2025.12
|
||||
if (DOMAIN, host.unique_id) in device.identifiers:
|
||||
new_identifiers = device.identifiers.copy()
|
||||
for old_id in device.identifiers:
|
||||
if old_id[0] == DOMAIN and old_id[1] != host.unique_id:
|
||||
new_identifiers.remove(old_id)
|
||||
_LOGGER.debug(
|
||||
"Updating Reolink device identifiers from %s to %s",
|
||||
device.identifiers,
|
||||
new_identifiers,
|
||||
)
|
||||
device_reg.async_remove_device(device.id)
|
||||
device_reg.async_update_device(
|
||||
device.id, new_identifiers=new_identifiers
|
||||
)
|
||||
break
|
||||
|
||||
# Check for wrongfully added MAC of the NVR/Hub to the camera
|
||||
# Can be removed in HA 2025.12
|
||||
host_connnection = (CONNECTION_NETWORK_MAC, host.api.mac_address)
|
||||
if host_connnection in device.connections:
|
||||
new_connections = device.connections.copy()
|
||||
new_connections.remove(host_connnection)
|
||||
_LOGGER.debug(
|
||||
"Updating Reolink device connections from %s to %s",
|
||||
device.connections,
|
||||
new_connections,
|
||||
)
|
||||
device_reg.async_update_device(
|
||||
device.id, new_connections=new_connections
|
||||
)
|
||||
|
||||
ch_device_ids[device.id] = ch
|
||||
if host.api.supported(ch, "UID") and device_uid[1] != host.api.camera_uid(
|
||||
ch
|
||||
):
|
||||
if host.api.supported(None, "UID"):
|
||||
new_device_id = f"{host.unique_id}_{host.api.camera_uid(ch)}"
|
||||
else:
|
||||
new_device_id = f"{device_uid[0]}_{host.api.camera_uid(ch)}"
|
||||
_LOGGER.debug(
|
||||
"Updating Reolink device UID from %s to %s",
|
||||
device_uid,
|
||||
new_device_id,
|
||||
)
|
||||
new_identifiers = {(DOMAIN, new_device_id)}
|
||||
existing_device = device_reg.async_get_device(
|
||||
identifiers=new_identifiers
|
||||
)
|
||||
if existing_device is None:
|
||||
device_reg.async_update_device(
|
||||
device.id, new_identifiers=new_identifiers
|
||||
)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Reolink device with uid %s already exists, "
|
||||
"removing device with uid %s",
|
||||
new_device_id,
|
||||
device_uid,
|
||||
)
|
||||
device_reg.async_remove_device(device.id)
|
||||
|
||||
entity_reg = er.async_get(hass)
|
||||
entities = er.async_entries_for_config_entry(entity_reg, config_entry_id)
|
||||
|
||||
@@ -198,7 +198,14 @@ class ReolinkChannelCoordinatorEntity(ReolinkHostCoordinatorEntity):
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return super().available and self._host.api.camera_online(self._channel)
|
||||
if self.entity_description.always_available:
|
||||
return True
|
||||
|
||||
return (
|
||||
super().available
|
||||
and self._host.api.camera_online(self._channel)
|
||||
and not self._host.api.baichuan.privacy_mode(self._channel)
|
||||
)
|
||||
|
||||
def register_callback(self, callback_id: str, cmd_id: int) -> None:
|
||||
"""Register callback for TCP push events."""
|
||||
|
||||
@@ -465,10 +465,11 @@ class ReolinkHost:
|
||||
wake = True
|
||||
self.last_wake = time()
|
||||
|
||||
for channel in self._api.channels:
|
||||
if self._api.baichuan.privacy_mode(channel):
|
||||
await self._api.baichuan.get_privacy_mode(channel)
|
||||
if self._api.baichuan.privacy_mode():
|
||||
await self._api.baichuan.get_privacy_mode()
|
||||
if self._api.baichuan.privacy_mode():
|
||||
return # API is shutdown, no need to check states
|
||||
return # API is shutdown, no need to check states
|
||||
|
||||
await self._api.get_states(cmd_list=self.update_cmd, wake=wake)
|
||||
|
||||
@@ -580,7 +581,12 @@ class ReolinkHost:
|
||||
)
|
||||
return
|
||||
|
||||
await self._api.subscribe(self._webhook_url)
|
||||
try:
|
||||
await self._api.subscribe(self._webhook_url)
|
||||
except NotSupportedError as err:
|
||||
self._onvif_push_supported = False
|
||||
_LOGGER.debug(err)
|
||||
return
|
||||
|
||||
_LOGGER.debug(
|
||||
"Host %s: subscribed successfully to webhook %s",
|
||||
@@ -601,7 +607,11 @@ class ReolinkHost:
|
||||
return # API is shutdown, no need to subscribe
|
||||
|
||||
try:
|
||||
if self._onvif_push_supported and not self._api.baichuan.events_active:
|
||||
if (
|
||||
self._onvif_push_supported
|
||||
and not self._api.baichuan.events_active
|
||||
and self._cancel_tcp_push_check is None
|
||||
):
|
||||
await self._renew(SubType.push)
|
||||
|
||||
if self._onvif_long_poll_supported and self._long_poll_task is not None:
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.13.2"]
|
||||
"requirements": ["reolink-aio==0.13.3"]
|
||||
}
|
||||
|
||||
@@ -76,13 +76,18 @@ def get_store(hass: HomeAssistant, config_entry_id: str) -> Store[str]:
|
||||
|
||||
|
||||
def get_device_uid_and_ch(
|
||||
device: dr.DeviceEntry, host: ReolinkHost
|
||||
device: dr.DeviceEntry | tuple[str, str], host: ReolinkHost
|
||||
) -> tuple[list[str], int | None, bool]:
|
||||
"""Get the channel and the split device_uid from a reolink DeviceEntry."""
|
||||
device_uid = []
|
||||
is_chime = False
|
||||
|
||||
for dev_id in device.identifiers:
|
||||
if isinstance(device, dr.DeviceEntry):
|
||||
dev_ids = device.identifiers
|
||||
else:
|
||||
dev_ids = {device}
|
||||
|
||||
for dev_id in dev_ids:
|
||||
if dev_id[0] == DOMAIN:
|
||||
device_uid = dev_id[1].split("_")
|
||||
if device_uid[0] == host.unique_id:
|
||||
|
||||
@@ -28,7 +28,7 @@ from roborock.version_a01_apis import RoborockClientA01
|
||||
from roborock.web_api import RoborockApiClient
|
||||
from vacuum_map_parser_base.config.color import ColorsPalette
|
||||
from vacuum_map_parser_base.config.image_config import ImageConfig
|
||||
from vacuum_map_parser_base.config.size import Sizes
|
||||
from vacuum_map_parser_base.config.size import Size, Sizes
|
||||
from vacuum_map_parser_base.map_data import MapData
|
||||
from vacuum_map_parser_roborock.map_data_parser import RoborockMapDataParser
|
||||
|
||||
@@ -148,7 +148,13 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
]
|
||||
self.map_parser = RoborockMapDataParser(
|
||||
ColorsPalette(),
|
||||
Sizes({k: v * MAP_SCALE for k, v in Sizes.SIZES.items()}),
|
||||
Sizes(
|
||||
{
|
||||
k: v * MAP_SCALE
|
||||
for k, v in Sizes.SIZES.items()
|
||||
if k != Size.MOP_PATH_WIDTH
|
||||
}
|
||||
),
|
||||
drawables,
|
||||
ImageConfig(scale=MAP_SCALE),
|
||||
[],
|
||||
|
||||
@@ -252,7 +252,7 @@ class SensiboClimate(SensiboDeviceBaseEntity, ClimateEntity):
|
||||
return features
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> int | None:
|
||||
def current_humidity(self) -> float | None:
|
||||
"""Return the current humidity."""
|
||||
return self.device_data.humidity
|
||||
|
||||
|
||||
@@ -15,5 +15,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pysensibo"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pysensibo==1.1.0"]
|
||||
"requirements": ["pysensibo==1.2.1"]
|
||||
}
|
||||
|
||||
@@ -101,14 +101,25 @@ MOTION_SENSOR_TYPES: tuple[SensiboMotionSensorEntityDescription, ...] = (
|
||||
value_fn=lambda data: data.temperature,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _pure_aqi(pm25_pure: PureAQI | None) -> str | None:
|
||||
"""Return the Pure aqi name or None if unknown."""
|
||||
if pm25_pure:
|
||||
aqi_name = pm25_pure.name.lower()
|
||||
if aqi_name != "unknown":
|
||||
return aqi_name
|
||||
return None
|
||||
|
||||
|
||||
PURE_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = (
|
||||
SensiboDeviceSensorEntityDescription(
|
||||
key="pm25",
|
||||
translation_key="pm25_pure",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
value_fn=lambda data: data.pm25_pure.name.lower() if data.pm25_pure else None,
|
||||
value_fn=lambda data: _pure_aqi(data.pm25_pure),
|
||||
extra_fn=None,
|
||||
options=[aqi.name.lower() for aqi in PureAQI],
|
||||
options=[aqi.name.lower() for aqi in PureAQI if aqi.name != "UNKNOWN"],
|
||||
),
|
||||
SensiboDeviceSensorEntityDescription(
|
||||
key="pure_sensitivity",
|
||||
@@ -119,6 +130,7 @@ PURE_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = (
|
||||
FILTER_LAST_RESET_DESCRIPTION,
|
||||
)
|
||||
|
||||
|
||||
DEVICE_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = (
|
||||
SensiboDeviceSensorEntityDescription(
|
||||
key="timer_time",
|
||||
|
||||
@@ -13,6 +13,7 @@ from aiohttp import ClientResponseError
|
||||
from pysmartthings import (
|
||||
Attribute,
|
||||
Capability,
|
||||
Category,
|
||||
ComponentStatus,
|
||||
Device,
|
||||
DeviceEvent,
|
||||
@@ -32,6 +33,7 @@ from homeassistant.const import (
|
||||
ATTR_HW_VERSION,
|
||||
ATTR_MANUFACTURER,
|
||||
ATTR_MODEL,
|
||||
ATTR_SUGGESTED_AREA,
|
||||
ATTR_SW_VERSION,
|
||||
ATTR_VIA_DEVICE,
|
||||
CONF_ACCESS_TOKEN,
|
||||
@@ -193,6 +195,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry)
|
||||
}
|
||||
devices = await client.get_devices()
|
||||
for device in devices:
|
||||
if (
|
||||
(main_component := device.components.get(MAIN)) is not None
|
||||
and main_component.manufacturer_category is Category.BLUETOOTH_TRACKER
|
||||
):
|
||||
device_status[device.device_id] = FullDevice(
|
||||
device=device,
|
||||
status={},
|
||||
online=True,
|
||||
)
|
||||
continue
|
||||
status = process_status(await client.get_device_status(device.device_id))
|
||||
online = await client.get_device_health(device.device_id)
|
||||
device_status[device.device_id] = FullDevice(
|
||||
@@ -453,14 +465,24 @@ def create_devices(
|
||||
ATTR_SW_VERSION: viper.software_version,
|
||||
}
|
||||
)
|
||||
if (
|
||||
device_registry.async_get_device({(DOMAIN, device.device.device_id)})
|
||||
is None
|
||||
):
|
||||
kwargs.update(
|
||||
{
|
||||
ATTR_SUGGESTED_AREA: (
|
||||
rooms.get(device.device.room_id)
|
||||
if device.device.room_id
|
||||
else None
|
||||
)
|
||||
}
|
||||
)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, device.device.device_id)},
|
||||
configuration_url="https://account.smartthings.com",
|
||||
name=device.device.label,
|
||||
suggested_area=(
|
||||
rooms.get(device.device.room_id) if device.device.room_id else None
|
||||
),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ from .entity import SmartThingsEntity
|
||||
|
||||
ATTR_OPERATION_STATE = "operation_state"
|
||||
MODE_TO_STATE = {
|
||||
"auto": HVACMode.HEAT_COOL,
|
||||
"auto": HVACMode.AUTO,
|
||||
"cool": HVACMode.COOL,
|
||||
"eco": HVACMode.AUTO,
|
||||
"rush hour": HVACMode.AUTO,
|
||||
@@ -40,7 +40,7 @@ MODE_TO_STATE = {
|
||||
"off": HVACMode.OFF,
|
||||
}
|
||||
STATE_TO_MODE = {
|
||||
HVACMode.HEAT_COOL: "auto",
|
||||
HVACMode.AUTO: "auto",
|
||||
HVACMode.COOL: "cool",
|
||||
HVACMode.HEAT: "heat",
|
||||
HVACMode.OFF: "off",
|
||||
@@ -58,7 +58,7 @@ OPERATING_STATE_TO_ACTION = {
|
||||
}
|
||||
|
||||
AC_MODE_TO_STATE = {
|
||||
"auto": HVACMode.HEAT_COOL,
|
||||
"auto": HVACMode.AUTO,
|
||||
"cool": HVACMode.COOL,
|
||||
"dry": HVACMode.DRY,
|
||||
"coolClean": HVACMode.COOL,
|
||||
@@ -66,10 +66,11 @@ AC_MODE_TO_STATE = {
|
||||
"heat": HVACMode.HEAT,
|
||||
"heatClean": HVACMode.HEAT,
|
||||
"fanOnly": HVACMode.FAN_ONLY,
|
||||
"fan": HVACMode.FAN_ONLY,
|
||||
"wind": HVACMode.FAN_ONLY,
|
||||
}
|
||||
STATE_TO_AC_MODE = {
|
||||
HVACMode.HEAT_COOL: "auto",
|
||||
HVACMode.AUTO: "auto",
|
||||
HVACMode.COOL: "cool",
|
||||
HVACMode.DRY: "dry",
|
||||
HVACMode.HEAT: "heat",
|
||||
@@ -88,6 +89,7 @@ FAN_OSCILLATION_TO_SWING = {
|
||||
}
|
||||
|
||||
WIND = "wind"
|
||||
FAN = "fan"
|
||||
WINDFREE = "windFree"
|
||||
|
||||
UNIT_MAP = {"C": UnitOfTemperature.CELSIUS, "F": UnitOfTemperature.FAHRENHEIT}
|
||||
@@ -388,14 +390,15 @@ class SmartThingsAirConditioner(SmartThingsEntity, ClimateEntity):
|
||||
tasks.append(self.async_turn_on())
|
||||
|
||||
mode = STATE_TO_AC_MODE[hvac_mode]
|
||||
# If new hvac_mode is HVAC_MODE_FAN_ONLY and AirConditioner support "wind" mode the AirConditioner new mode has to be "wind"
|
||||
# The conversion make the mode change working
|
||||
# The conversion is made only for device that wrongly has capability "wind" instead "fan_only"
|
||||
# If new hvac_mode is HVAC_MODE_FAN_ONLY and AirConditioner support "wind" or "fan" mode the AirConditioner
|
||||
# new mode has to be "wind" or "fan"
|
||||
if hvac_mode == HVACMode.FAN_ONLY:
|
||||
if WIND in self.get_attribute_value(
|
||||
Capability.AIR_CONDITIONER_MODE, Attribute.SUPPORTED_AC_MODES
|
||||
):
|
||||
mode = WIND
|
||||
for fan_mode in (WIND, FAN):
|
||||
if fan_mode in self.get_attribute_value(
|
||||
Capability.AIR_CONDITIONER_MODE, Attribute.SUPPORTED_AC_MODES
|
||||
):
|
||||
mode = fan_mode
|
||||
break
|
||||
|
||||
tasks.append(
|
||||
self.execute_device_command(
|
||||
|
||||
@@ -30,5 +30,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pysmartthings"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pysmartthings==3.2.0"]
|
||||
"requirements": ["pysmartthings==3.2.2"]
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ class SmartThingsSelectDescription(SelectEntityDescription):
|
||||
options_attribute: Attribute
|
||||
status_attribute: Attribute
|
||||
command: Command
|
||||
default_options: list[str] | None = None
|
||||
|
||||
|
||||
CAPABILITIES_TO_SELECT: dict[Capability | str, SmartThingsSelectDescription] = {
|
||||
@@ -46,6 +47,7 @@ CAPABILITIES_TO_SELECT: dict[Capability | str, SmartThingsSelectDescription] = {
|
||||
options_attribute=Attribute.SUPPORTED_MACHINE_STATES,
|
||||
status_attribute=Attribute.MACHINE_STATE,
|
||||
command=Command.SET_MACHINE_STATE,
|
||||
default_options=["run", "pause", "stop"],
|
||||
),
|
||||
Capability.WASHER_OPERATING_STATE: SmartThingsSelectDescription(
|
||||
key=Capability.WASHER_OPERATING_STATE,
|
||||
@@ -55,6 +57,7 @@ CAPABILITIES_TO_SELECT: dict[Capability | str, SmartThingsSelectDescription] = {
|
||||
options_attribute=Attribute.SUPPORTED_MACHINE_STATES,
|
||||
status_attribute=Attribute.MACHINE_STATE,
|
||||
command=Command.SET_MACHINE_STATE,
|
||||
default_options=["run", "pause", "stop"],
|
||||
),
|
||||
Capability.SAMSUNG_CE_AUTO_DISPENSE_DETERGENT: SmartThingsSelectDescription(
|
||||
key=Capability.SAMSUNG_CE_AUTO_DISPENSE_DETERGENT,
|
||||
@@ -114,8 +117,12 @@ class SmartThingsSelectEntity(SmartThingsEntity, SelectEntity):
|
||||
@property
|
||||
def options(self) -> list[str]:
|
||||
"""Return the list of options."""
|
||||
return self.get_attribute_value(
|
||||
self.entity_description.key, self.entity_description.options_attribute
|
||||
return (
|
||||
self.get_attribute_value(
|
||||
self.entity_description.key, self.entity_description.options_attribute
|
||||
)
|
||||
or self.entity_description.default_options
|
||||
or []
|
||||
)
|
||||
|
||||
@property
|
||||
|
||||
@@ -584,7 +584,7 @@ CAPABILITY_TO_SENSORS: dict[
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
use_temperature_unit=True,
|
||||
# Set the value to None if it is 0 F (-17 C)
|
||||
value_fn=lambda value: None if value in {0, -17} else value,
|
||||
value_fn=lambda value: None if value in {-17, 0, 1} else value,
|
||||
)
|
||||
]
|
||||
},
|
||||
@@ -631,7 +631,7 @@ CAPABILITY_TO_SENSORS: dict[
|
||||
SmartThingsSensorEntityDescription(
|
||||
key="powerEnergy_meter",
|
||||
translation_key="power_energy",
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
value_fn=lambda value: value["powerEnergy"] / 1000,
|
||||
|
||||
@@ -53,7 +53,6 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
try:
|
||||
if not await self._async_check_auth_required(user_input):
|
||||
info = await self.client.get_info()
|
||||
self._host = str(info.device_ip)
|
||||
self._device_name = str(info.hostname)
|
||||
|
||||
if info.model not in Devices:
|
||||
@@ -79,7 +78,6 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
try:
|
||||
if not await self._async_check_auth_required(user_input):
|
||||
info = await self.client.get_info()
|
||||
self._host = str(info.device_ip)
|
||||
self._device_name = str(info.hostname)
|
||||
|
||||
if info.model not in Devices:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["snoo"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["python-snoo==0.6.5"]
|
||||
"requirements": ["python-snoo==0.6.6"]
|
||||
}
|
||||
|
||||
@@ -56,7 +56,8 @@
|
||||
"power": "Power button pressed",
|
||||
"status_requested": "Status requested",
|
||||
"sticky_white_noise_updated": "Sleepytime sounds updated",
|
||||
"config_change": "Config changed"
|
||||
"config_change": "Config changed",
|
||||
"restart": "Restart"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -290,8 +290,10 @@
|
||||
"options": {
|
||||
"apparent_power": "[%key:component::sensor::entity_component::apparent_power::name%]",
|
||||
"aqi": "[%key:component::sensor::entity_component::aqi::name%]",
|
||||
"area": "[%key:component::sensor::entity_component::area::name%]",
|
||||
"atmospheric_pressure": "[%key:component::sensor::entity_component::atmospheric_pressure::name%]",
|
||||
"battery": "[%key:component::sensor::entity_component::battery::name%]",
|
||||
"blood_glucose_concentration": "[%key:component::sensor::entity_component::blood_glucose_concentration::name%]",
|
||||
"carbon_dioxide": "[%key:component::sensor::entity_component::carbon_dioxide::name%]",
|
||||
"carbon_monoxide": "[%key:component::sensor::entity_component::carbon_monoxide::name%]",
|
||||
"conductivity": "[%key:component::sensor::entity_component::conductivity::name%]",
|
||||
@@ -302,6 +304,7 @@
|
||||
"distance": "[%key:component::sensor::entity_component::distance::name%]",
|
||||
"duration": "[%key:component::sensor::entity_component::duration::name%]",
|
||||
"energy": "[%key:component::sensor::entity_component::energy::name%]",
|
||||
"energy_distance": "[%key:component::sensor::entity_component::energy_distance::name%]",
|
||||
"energy_storage": "[%key:component::sensor::entity_component::energy_storage::name%]",
|
||||
"frequency": "[%key:component::sensor::entity_component::frequency::name%]",
|
||||
"gas": "[%key:component::sensor::entity_component::gas::name%]",
|
||||
@@ -338,6 +341,7 @@
|
||||
"volume_storage": "[%key:component::sensor::entity_component::volume_storage::name%]",
|
||||
"water": "[%key:component::sensor::entity_component::water::name%]",
|
||||
"weight": "[%key:component::sensor::entity_component::weight::name%]",
|
||||
"wind_direction": "[%key:component::sensor::entity_component::wind_direction::name%]",
|
||||
"wind_speed": "[%key:component::sensor::entity_component::wind_speed::name%]"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -9,6 +9,7 @@ from tesla_fleet_api.teslemetry import EnergySite, Vehicle
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -229,7 +230,7 @@ class TeslemetryWallConnectorEntity(TeslemetryEntity):
|
||||
super().__init__(data.live_coordinator, key)
|
||||
|
||||
@property
|
||||
def _value(self) -> int:
|
||||
def _value(self) -> StateType:
|
||||
"""Return a specific wall connector value from coordinator data."""
|
||||
return (
|
||||
self.coordinator.data.get("wall_connectors", {})
|
||||
|
||||
@@ -1763,8 +1763,7 @@ class TeslemetryWallConnectorSensorEntity(TeslemetryWallConnectorEntity, SensorE
|
||||
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update the attributes of the sensor."""
|
||||
if self.exists:
|
||||
self._attr_native_value = self.entity_description.value_fn(self._value)
|
||||
self._attr_native_value = self.entity_description.value_fn(self._value)
|
||||
|
||||
|
||||
class TeslemetryEnergyInfoSensorEntity(TeslemetryEnergyInfoEntity, SensorEntity):
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/tibber",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tibber"],
|
||||
"requirements": ["pyTibber==0.30.8"]
|
||||
"requirements": ["pyTibber==0.31.2"]
|
||||
}
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["uiprotect", "unifi_discovery"],
|
||||
"requirements": ["uiprotect==7.5.5", "unifi-discovery==1.2.0"],
|
||||
"requirements": ["uiprotect==7.6.0", "unifi-discovery==1.2.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Ubiquiti Networks",
|
||||
|
||||
@@ -97,6 +97,7 @@ SKU_TO_BASE_DEVICE = {
|
||||
"LAP-V102S-AASR": "Vital100S", # Alt ID Model Vital100S
|
||||
"LAP-V102S-WEU": "Vital100S", # Alt ID Model Vital100S
|
||||
"LAP-V102S-WUK": "Vital100S", # Alt ID Model Vital100S
|
||||
"LAP-V102S-AUSR": "Vital100S", # Alt ID Model Vital100S
|
||||
"EverestAir": "EverestAir",
|
||||
"LAP-EL551S-AUS": "EverestAir", # Alt ID Model EverestAir
|
||||
"LAP-EL551S-AEUR": "EverestAir", # Alt ID Model EverestAir
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["application_credentials"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/weheat",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["weheat==2025.3.7"]
|
||||
"requirements": ["weheat==2025.4.29"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["holidays"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["holidays==0.70"]
|
||||
"requirements": ["holidays==0.72"]
|
||||
}
|
||||
|
||||
@@ -419,13 +419,26 @@ class ZHADeviceProxy(EventBase):
|
||||
@callback
|
||||
def handle_zha_event(self, zha_event: ZHAEvent) -> None:
|
||||
"""Handle a ZHA event."""
|
||||
if ATTR_UNIQUE_ID in zha_event.data:
|
||||
unique_id = zha_event.data[ATTR_UNIQUE_ID]
|
||||
|
||||
# Client cluster handler unique IDs in the ZHA lib were disambiguated by
|
||||
# adding a suffix of `_CLIENT`. Unfortunately, this breaks existing
|
||||
# automations that match the `unique_id` key. This can be removed in a
|
||||
# future release with proper notice of a breaking change.
|
||||
unique_id = unique_id.removesuffix("_CLIENT")
|
||||
else:
|
||||
unique_id = zha_event.unique_id
|
||||
|
||||
self.gateway_proxy.hass.bus.async_fire(
|
||||
ZHA_EVENT,
|
||||
{
|
||||
ATTR_DEVICE_IEEE: str(zha_event.device_ieee),
|
||||
ATTR_UNIQUE_ID: zha_event.unique_id,
|
||||
ATTR_DEVICE_ID: self.device_id,
|
||||
**zha_event.data,
|
||||
# The order of these keys is intentional, `zha_event.data` can contain
|
||||
# a `unique_id` key, which we explicitly replace
|
||||
ATTR_UNIQUE_ID: unique_id,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -278,6 +278,39 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
# and we'll handle the clean up below.
|
||||
await driver_events.setup(driver)
|
||||
|
||||
if (old_unique_id := entry.unique_id) is not None and old_unique_id != (
|
||||
new_unique_id := str(driver.controller.home_id)
|
||||
):
|
||||
device_registry = dr.async_get(hass)
|
||||
controller_model = "Unknown model"
|
||||
if (
|
||||
(own_node := driver.controller.own_node)
|
||||
and (
|
||||
controller_device_entry := device_registry.async_get_device(
|
||||
identifiers={get_device_id(driver, own_node)}
|
||||
)
|
||||
)
|
||||
and (model := controller_device_entry.model)
|
||||
):
|
||||
controller_model = model
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"migrate_unique_id.{entry.entry_id}",
|
||||
data={
|
||||
"config_entry_id": entry.entry_id,
|
||||
"config_entry_title": entry.title,
|
||||
"controller_model": controller_model,
|
||||
"new_unique_id": new_unique_id,
|
||||
"old_unique_id": old_unique_id,
|
||||
},
|
||||
is_fixable=True,
|
||||
severity=IssueSeverity.ERROR,
|
||||
translation_key="migrate_unique_id",
|
||||
)
|
||||
else:
|
||||
async_delete_issue(hass, DOMAIN, f"migrate_unique_id.{entry.entry_id}")
|
||||
|
||||
# If the listen task is already failed, we need to raise ConfigEntryNotReady
|
||||
if listen_task.done():
|
||||
listen_error, error_message = _get_listen_task_error(listen_task)
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable, Coroutine
|
||||
from contextlib import suppress
|
||||
import dataclasses
|
||||
from functools import partial, wraps
|
||||
from typing import Any, Concatenate, Literal, cast
|
||||
@@ -69,6 +71,7 @@ from homeassistant.components.websocket_api import (
|
||||
ActiveConnection,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -86,12 +89,16 @@ from .const import (
|
||||
DATA_CLIENT,
|
||||
DOMAIN,
|
||||
EVENT_DEVICE_ADDED_TO_REGISTRY,
|
||||
LOGGER,
|
||||
RESTORE_NVM_DRIVER_READY_TIMEOUT,
|
||||
USER_AGENT,
|
||||
)
|
||||
from .helpers import (
|
||||
CannotConnect,
|
||||
async_enable_statistics,
|
||||
async_get_node_from_device_id,
|
||||
async_get_provisioning_entry_from_device_id,
|
||||
async_get_version_info,
|
||||
get_device_id,
|
||||
)
|
||||
|
||||
@@ -182,6 +189,8 @@ STRATEGY = "strategy"
|
||||
# https://github.com/zwave-js/node-zwave-js/blob/master/packages/core/src/security/QR.ts#L41
|
||||
MINIMUM_QR_STRING_LENGTH = 52
|
||||
|
||||
HARD_RESET_CONTROLLER_DRIVER_READY_TIMEOUT = 60
|
||||
|
||||
|
||||
# Helper schemas
|
||||
PLANNED_PROVISIONING_ENTRY_SCHEMA = vol.All(
|
||||
@@ -2816,6 +2825,7 @@ async def websocket_hard_reset_controller(
|
||||
driver: Driver,
|
||||
) -> None:
|
||||
"""Hard reset controller."""
|
||||
unsubs: list[Callable[[], None]]
|
||||
|
||||
@callback
|
||||
def async_cleanup() -> None:
|
||||
@@ -2831,13 +2841,47 @@ async def websocket_hard_reset_controller(
|
||||
connection.send_result(msg[ID], device.id)
|
||||
async_cleanup()
|
||||
|
||||
@callback
|
||||
def set_driver_ready(event: dict) -> None:
|
||||
"Set the driver ready event."
|
||||
wait_driver_ready.set()
|
||||
|
||||
wait_driver_ready = asyncio.Event()
|
||||
|
||||
msg[DATA_UNSUBSCRIBE] = unsubs = [
|
||||
async_dispatcher_connect(
|
||||
hass, EVENT_DEVICE_ADDED_TO_REGISTRY, _handle_device_added
|
||||
)
|
||||
),
|
||||
driver.once("driver ready", set_driver_ready),
|
||||
]
|
||||
|
||||
await driver.async_hard_reset()
|
||||
|
||||
with suppress(TimeoutError):
|
||||
async with asyncio.timeout(HARD_RESET_CONTROLLER_DRIVER_READY_TIMEOUT):
|
||||
await wait_driver_ready.wait()
|
||||
|
||||
# When resetting the controller, the controller home id is also changed.
|
||||
# The controller state in the client is stale after resetting the controller,
|
||||
# so get the new home id with a new client using the helper function.
|
||||
# The client state will be refreshed by reloading the config entry,
|
||||
# after the unique id of the config entry has been updated.
|
||||
try:
|
||||
version_info = await async_get_version_info(hass, entry.data[CONF_URL])
|
||||
except CannotConnect:
|
||||
# Just log this error, as there's nothing to do about it here.
|
||||
# The stale unique id needs to be handled by a repair flow,
|
||||
# after the config entry has been reloaded.
|
||||
LOGGER.error(
|
||||
"Failed to get server version, cannot update config entry"
|
||||
"unique id with new home id, after controller reset"
|
||||
)
|
||||
else:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, unique_id=str(version_info.home_id)
|
||||
)
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
@@ -3043,14 +3087,28 @@ async def websocket_restore_nvm(
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def set_driver_ready(event: dict) -> None:
|
||||
"Set the driver ready event."
|
||||
wait_driver_ready.set()
|
||||
|
||||
wait_driver_ready = asyncio.Event()
|
||||
|
||||
# Set up subscription for progress events
|
||||
connection.subscriptions[msg["id"]] = async_cleanup
|
||||
msg[DATA_UNSUBSCRIBE] = unsubs = [
|
||||
controller.on("nvm convert progress", forward_progress),
|
||||
controller.on("nvm restore progress", forward_progress),
|
||||
driver.once("driver ready", set_driver_ready),
|
||||
]
|
||||
|
||||
await controller.async_restore_nvm_base64(msg["data"])
|
||||
|
||||
with suppress(TimeoutError):
|
||||
async with asyncio.timeout(RESTORE_NVM_DRIVER_READY_TIMEOUT):
|
||||
await wait_driver_ready.wait()
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
connection.send_message(
|
||||
websocket_api.event_message(
|
||||
msg[ID],
|
||||
|
||||
@@ -9,14 +9,13 @@ import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
from awesomeversion import AwesomeVersion
|
||||
from serial.tools import list_ports
|
||||
import voluptuous as vol
|
||||
from zwave_js_server.client import Client
|
||||
from zwave_js_server.exceptions import FailedCommand
|
||||
from zwave_js_server.model.driver import Driver
|
||||
from zwave_js_server.version import VersionInfo, get_server_version
|
||||
from zwave_js_server.version import VersionInfo
|
||||
|
||||
from homeassistant.components import usb
|
||||
from homeassistant.components.hassio import (
|
||||
@@ -36,7 +35,6 @@ from homeassistant.const import CONF_NAME, CONF_URL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.service_info.hassio import HassioServiceInfo
|
||||
from homeassistant.helpers.service_info.usb import UsbServiceInfo
|
||||
@@ -67,7 +65,9 @@ from .const import (
|
||||
CONF_USE_ADDON,
|
||||
DATA_CLIENT,
|
||||
DOMAIN,
|
||||
RESTORE_NVM_DRIVER_READY_TIMEOUT,
|
||||
)
|
||||
from .helpers import CannotConnect, async_get_version_info
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -78,8 +78,6 @@ ADDON_SETUP_TIMEOUT = 5
|
||||
ADDON_SETUP_TIMEOUT_ROUNDS = 40
|
||||
CONF_EMULATE_HARDWARE = "emulate_hardware"
|
||||
CONF_LOG_LEVEL = "log_level"
|
||||
RESTORE_NVM_DRIVER_READY_TIMEOUT = 60
|
||||
SERVER_VERSION_TIMEOUT = 10
|
||||
|
||||
ADDON_LOG_LEVELS = {
|
||||
"error": "Error",
|
||||
@@ -130,22 +128,6 @@ async def validate_input(hass: HomeAssistant, user_input: dict) -> VersionInfo:
|
||||
raise InvalidInput("cannot_connect") from err
|
||||
|
||||
|
||||
async def async_get_version_info(hass: HomeAssistant, ws_address: str) -> VersionInfo:
|
||||
"""Return Z-Wave JS version info."""
|
||||
try:
|
||||
async with asyncio.timeout(SERVER_VERSION_TIMEOUT):
|
||||
version_info: VersionInfo = await get_server_version(
|
||||
ws_address, async_get_clientsession(hass)
|
||||
)
|
||||
except (TimeoutError, aiohttp.ClientError) as err:
|
||||
# We don't want to spam the log if the add-on isn't started
|
||||
# or takes a long time to start.
|
||||
_LOGGER.debug("Failed to connect to Z-Wave JS server: %s", err)
|
||||
raise CannotConnect from err
|
||||
|
||||
return version_info
|
||||
|
||||
|
||||
def get_usb_ports() -> dict[str, str]:
|
||||
"""Return a dict of USB ports and their friendly names."""
|
||||
ports = list_ports.comports()
|
||||
@@ -907,10 +889,6 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Reset the current controller, and instruct the user to unplug it."""
|
||||
|
||||
if user_input is not None:
|
||||
config_entry = self._reconfigure_config_entry
|
||||
assert config_entry is not None
|
||||
# Unload the config entry before stopping the add-on.
|
||||
await self.hass.config_entries.async_unload(config_entry.entry_id)
|
||||
if self.usb_path:
|
||||
# USB discovery was used, so the device is already known.
|
||||
await self._async_set_addon_config({CONF_ADDON_DEVICE: self.usb_path})
|
||||
@@ -925,6 +903,11 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.error("Failed to reset controller: %s", err)
|
||||
return self.async_abort(reason="reset_failed")
|
||||
|
||||
config_entry = self._reconfigure_config_entry
|
||||
assert config_entry is not None
|
||||
# Unload the config entry before asking the user to unplug the controller.
|
||||
await self.hass.config_entries.async_unload(config_entry.entry_id)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="instruct_unplug",
|
||||
description_placeholders={
|
||||
@@ -1356,10 +1339,6 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return client.driver
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Indicate connection error."""
|
||||
|
||||
|
||||
class InvalidInput(HomeAssistantError):
|
||||
"""Error to indicate input data is invalid."""
|
||||
|
||||
|
||||
@@ -201,3 +201,7 @@ COVER_TILT_PROPERTY_KEYS: set[str | int | None] = {
|
||||
WindowCoveringPropertyKey.VERTICAL_SLATS_ANGLE,
|
||||
WindowCoveringPropertyKey.VERTICAL_SLATS_ANGLE_NO_POSITION,
|
||||
}
|
||||
|
||||
# Other constants
|
||||
|
||||
RESTORE_NVM_DRIVER_READY_TIMEOUT = 60
|
||||
|
||||
@@ -1204,7 +1204,7 @@ DISCOVERY_SCHEMAS = [
|
||||
property={RESET_METER_PROPERTY},
|
||||
type={ValueType.BOOLEAN},
|
||||
),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
ZWaveDiscoverySchema(
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
|
||||
@@ -2,11 +2,13 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from dataclasses import astuple, dataclass
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
import aiohttp
|
||||
import voluptuous as vol
|
||||
from zwave_js_server.client import Client as ZwaveClient
|
||||
from zwave_js_server.const import (
|
||||
@@ -25,6 +27,7 @@ from zwave_js_server.model.value import (
|
||||
ValueDataType,
|
||||
get_value_id_str,
|
||||
)
|
||||
from zwave_js_server.version import VersionInfo, get_server_version
|
||||
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
||||
@@ -38,6 +41,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.group import expand_entity_ids
|
||||
from homeassistant.helpers.typing import ConfigType, VolSchemaType
|
||||
@@ -54,6 +58,8 @@ from .const import (
|
||||
LOGGER,
|
||||
)
|
||||
|
||||
SERVER_VERSION_TIMEOUT = 10
|
||||
|
||||
|
||||
@dataclass
|
||||
class ZwaveValueID:
|
||||
@@ -568,3 +574,23 @@ def get_network_identifier_for_notification(
|
||||
return f"`{config_entry.title}`, with the home ID `{home_id}`,"
|
||||
return f"with the home ID `{home_id}`"
|
||||
return ""
|
||||
|
||||
|
||||
async def async_get_version_info(hass: HomeAssistant, ws_address: str) -> VersionInfo:
|
||||
"""Return Z-Wave JS version info."""
|
||||
try:
|
||||
async with asyncio.timeout(SERVER_VERSION_TIMEOUT):
|
||||
version_info: VersionInfo = await get_server_version(
|
||||
ws_address, async_get_clientsession(hass)
|
||||
)
|
||||
except (TimeoutError, aiohttp.ClientError) as err:
|
||||
# We don't want to spam the log if the add-on isn't started
|
||||
# or takes a long time to start.
|
||||
LOGGER.debug("Failed to connect to Z-Wave JS server: %s", err)
|
||||
raise CannotConnect from err
|
||||
|
||||
return version_info
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Indicate connection error."""
|
||||
|
||||
@@ -57,6 +57,47 @@ class DeviceConfigFileChangedFlow(RepairsFlow):
|
||||
)
|
||||
|
||||
|
||||
class MigrateUniqueIDFlow(RepairsFlow):
|
||||
"""Handler for an issue fixing flow."""
|
||||
|
||||
def __init__(self, data: dict[str, str]) -> None:
|
||||
"""Initialize."""
|
||||
self.description_placeholders: dict[str, str] = {
|
||||
"config_entry_title": data["config_entry_title"],
|
||||
"controller_model": data["controller_model"],
|
||||
"new_unique_id": data["new_unique_id"],
|
||||
"old_unique_id": data["old_unique_id"],
|
||||
}
|
||||
self._config_entry_id: str = data["config_entry_id"]
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the first step of a fix flow."""
|
||||
return await self.async_step_confirm()
|
||||
|
||||
async def async_step_confirm(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the confirm step of a fix flow."""
|
||||
if user_input is not None:
|
||||
config_entry = self.hass.config_entries.async_get_entry(
|
||||
self._config_entry_id
|
||||
)
|
||||
# If config entry was removed, we can ignore the issue.
|
||||
if config_entry is not None:
|
||||
self.hass.config_entries.async_update_entry(
|
||||
config_entry,
|
||||
unique_id=self.description_placeholders["new_unique_id"],
|
||||
)
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="confirm",
|
||||
description_placeholders=self.description_placeholders,
|
||||
)
|
||||
|
||||
|
||||
async def async_create_fix_flow(
|
||||
hass: HomeAssistant, issue_id: str, data: dict[str, str] | None
|
||||
) -> RepairsFlow:
|
||||
@@ -65,4 +106,7 @@ async def async_create_fix_flow(
|
||||
if issue_id.split(".")[0] == "device_config_file_changed":
|
||||
assert data
|
||||
return DeviceConfigFileChangedFlow(data)
|
||||
if issue_id.split(".")[0] == "migrate_unique_id":
|
||||
assert data
|
||||
return MigrateUniqueIDFlow(data)
|
||||
return ConfirmRepairFlow()
|
||||
|
||||
@@ -273,6 +273,17 @@
|
||||
"invalid_server_version": {
|
||||
"description": "The version of Z-Wave Server you are currently running is too old for this version of Home Assistant. Please update the Z-Wave Server to the latest version to fix this issue.",
|
||||
"title": "Newer version of Z-Wave Server needed"
|
||||
},
|
||||
"migrate_unique_id": {
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "A Z-Wave controller of model {controller_model} with a different ID ({new_unique_id}) than the previously connected controller ({old_unique_id}) was connected to the {config_entry_title} configuration entry.\n\nReasons for a different controller ID could be:\n\n1. The controller was factory reset, with a 3rd party application.\n2. A controller Non Volatile Memory (NVM) backup was restored to the controller, with a 3rd party application.\n3. A different controller was connected to this configuration entry.\n\nIf a different controller was connected, you should instead set up a new configuration entry for the new controller.\n\nIf you are sure that the current controller is the correct controller you can confirm this by pressing Submit, and the configuration entry will remember the new controller ID.",
|
||||
"title": "An unknown controller was detected"
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "An unknown controller was detected"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -25,7 +25,7 @@ if TYPE_CHECKING:
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2025
|
||||
MINOR_VERSION: Final = 5
|
||||
PATCH_VERSION: Final = "0b6"
|
||||
PATCH_VERSION: Final = "2"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2)
|
||||
|
||||
@@ -575,9 +575,11 @@ class DeviceRegistryItems[_EntryTypeT: (DeviceEntry, DeletedDeviceEntry)](
|
||||
"""Unindex an entry."""
|
||||
old_entry = self.data[key]
|
||||
for connection in old_entry.connections:
|
||||
del self._connections[connection]
|
||||
if connection in self._connections:
|
||||
del self._connections[connection]
|
||||
for identifier in old_entry.identifiers:
|
||||
del self._identifiers[identifier]
|
||||
if identifier in self._identifiers:
|
||||
del self._identifiers[identifier]
|
||||
|
||||
def get_entry(
|
||||
self,
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# Automatically generated by gen_requirements_all.py, do not edit
|
||||
|
||||
aiodhcpwatcher==1.1.1
|
||||
aiodiscover==2.6.1
|
||||
aiodns==3.3.0
|
||||
aiodiscover==2.7.0
|
||||
aiodns==3.4.0
|
||||
aiohasupervisor==0.3.1
|
||||
aiohttp-asyncmdnsresolver==0.1.1
|
||||
aiohttp-fast-zlib==0.2.3
|
||||
@@ -38,8 +38,8 @@ habluetooth==3.48.2
|
||||
hass-nabucasa==0.96.0
|
||||
hassil==2.2.3
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20250502.1
|
||||
home-assistant-intents==2025.4.30
|
||||
home-assistant-frontend==20250516.0
|
||||
home-assistant-intents==2025.5.7
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.6
|
||||
@@ -70,7 +70,7 @@ typing-extensions>=4.13.0,<5.0
|
||||
ulid-transform==1.4.0
|
||||
urllib3>=1.26.5,<2
|
||||
uv==0.7.1
|
||||
voluptuous-openapi==0.0.7
|
||||
voluptuous-openapi==0.1.0
|
||||
voluptuous-serialize==2.6.0
|
||||
voluptuous==0.15.2
|
||||
webrtc-models==0.3.0
|
||||
@@ -217,3 +217,8 @@ aiofiles>=24.1.0
|
||||
# https://github.com/aio-libs/multidict/issues/1134
|
||||
# https://github.com/aio-libs/multidict/issues/1131
|
||||
multidict>=6.4.2
|
||||
|
||||
# rpds-py > 0.25.0 requires cargo 1.84.0
|
||||
# Stable Alpine current only ships cargo 1.83.0
|
||||
# No wheels upstream available for armhf & armv7
|
||||
rpds-py==0.24.0
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2025.5.0b6"
|
||||
version = "2025.5.2"
|
||||
license = "Apache-2.0"
|
||||
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
@@ -23,7 +23,7 @@ classifiers = [
|
||||
]
|
||||
requires-python = ">=3.13.2"
|
||||
dependencies = [
|
||||
"aiodns==3.3.0",
|
||||
"aiodns==3.4.0",
|
||||
# Integrations may depend on hassio integration without listing it to
|
||||
# change behavior based on presence of supervisor. Deprecated with #127228
|
||||
# Lib can be removed with 2025.11
|
||||
@@ -66,7 +66,7 @@ dependencies = [
|
||||
# onboarding->cloud->assist_pipeline->conversation->home_assistant_intents. Onboarding needs
|
||||
# to be setup in stage 0, but we don't want to also promote cloud with all its
|
||||
# dependencies to stage 0.
|
||||
"home-assistant-intents==2025.4.30",
|
||||
"home-assistant-intents==2025.5.7",
|
||||
"ifaddr==0.2.0",
|
||||
"Jinja2==3.1.6",
|
||||
"lru-dict==1.3.0",
|
||||
@@ -120,7 +120,7 @@ dependencies = [
|
||||
"uv==0.7.1",
|
||||
"voluptuous==0.15.2",
|
||||
"voluptuous-serialize==2.6.0",
|
||||
"voluptuous-openapi==0.0.7",
|
||||
"voluptuous-openapi==0.1.0",
|
||||
"yarl==1.20.0",
|
||||
"webrtc-models==0.3.0",
|
||||
"zeroconf==0.147.0",
|
||||
|
||||
6
requirements.txt
generated
6
requirements.txt
generated
@@ -3,7 +3,7 @@
|
||||
-c homeassistant/package_constraints.txt
|
||||
|
||||
# Home Assistant Core
|
||||
aiodns==3.3.0
|
||||
aiodns==3.4.0
|
||||
aiohasupervisor==0.3.1
|
||||
aiohttp==3.11.18
|
||||
aiohttp_cors==0.7.0
|
||||
@@ -27,7 +27,7 @@ hass-nabucasa==0.96.0
|
||||
hassil==2.2.3
|
||||
httpx==0.28.1
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-intents==2025.4.30
|
||||
home-assistant-intents==2025.5.7
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.6
|
||||
lru-dict==1.3.0
|
||||
@@ -57,7 +57,7 @@ urllib3>=1.26.5,<2
|
||||
uv==0.7.1
|
||||
voluptuous==0.15.2
|
||||
voluptuous-serialize==2.6.0
|
||||
voluptuous-openapi==0.0.7
|
||||
voluptuous-openapi==0.1.0
|
||||
yarl==1.20.0
|
||||
webrtc-models==0.3.0
|
||||
zeroconf==0.147.0
|
||||
|
||||
48
requirements_all.txt
generated
48
requirements_all.txt
generated
@@ -214,16 +214,16 @@ aiobafi6==0.9.0
|
||||
aiobotocore==2.21.1
|
||||
|
||||
# homeassistant.components.comelit
|
||||
aiocomelit==0.12.0
|
||||
aiocomelit==0.12.1
|
||||
|
||||
# homeassistant.components.dhcp
|
||||
aiodhcpwatcher==1.1.1
|
||||
|
||||
# homeassistant.components.dhcp
|
||||
aiodiscover==2.6.1
|
||||
aiodiscover==2.7.0
|
||||
|
||||
# homeassistant.components.dnsip
|
||||
aiodns==3.3.0
|
||||
aiodns==3.4.0
|
||||
|
||||
# homeassistant.components.duke_energy
|
||||
aiodukeenergy==0.3.0
|
||||
@@ -628,7 +628,7 @@ blockchain==1.4.4
|
||||
bluecurrent-api==1.2.3
|
||||
|
||||
# homeassistant.components.bluemaestro
|
||||
bluemaestro-ble==0.4.0
|
||||
bluemaestro-ble==0.4.1
|
||||
|
||||
# homeassistant.components.decora
|
||||
# bluepy==1.3.0
|
||||
@@ -762,7 +762,7 @@ debugpy==1.8.13
|
||||
# decora==0.6
|
||||
|
||||
# homeassistant.components.ecovacs
|
||||
deebot-client==13.0.1
|
||||
deebot-client==13.2.0
|
||||
|
||||
# homeassistant.components.ihc
|
||||
# homeassistant.components.namecheapdns
|
||||
@@ -782,7 +782,7 @@ denonavr==1.0.1
|
||||
devialet==1.5.7
|
||||
|
||||
# homeassistant.components.devolo_home_control
|
||||
devolo-home-control-api==0.18.3
|
||||
devolo-home-control-api==0.19.0
|
||||
|
||||
# homeassistant.components.devolo_home_network
|
||||
devolo-plc-api==1.5.1
|
||||
@@ -958,7 +958,7 @@ fnv-hash-fast==1.5.0
|
||||
foobot_async==1.0.0
|
||||
|
||||
# homeassistant.components.forecast_solar
|
||||
forecast-solar==4.1.0
|
||||
forecast-solar==4.2.0
|
||||
|
||||
# homeassistant.components.fortios
|
||||
fortiosapi==1.0.5
|
||||
@@ -986,7 +986,7 @@ gardena-bluetooth==1.6.0
|
||||
gassist-text==0.0.12
|
||||
|
||||
# homeassistant.components.google
|
||||
gcal-sync==7.0.0
|
||||
gcal-sync==7.0.1
|
||||
|
||||
# homeassistant.components.geniushub
|
||||
geniushub-client==0.7.1
|
||||
@@ -1158,13 +1158,13 @@ hole==0.8.0
|
||||
|
||||
# homeassistant.components.holiday
|
||||
# homeassistant.components.workday
|
||||
holidays==0.70
|
||||
holidays==0.72
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20250502.1
|
||||
home-assistant-frontend==20250516.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.4.30
|
||||
home-assistant-intents==2025.5.7
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==2.0.1.1
|
||||
@@ -1200,7 +1200,7 @@ ibmiotf==0.3.4
|
||||
# homeassistant.components.local_calendar
|
||||
# homeassistant.components.local_todo
|
||||
# homeassistant.components.remote_calendar
|
||||
ical==9.2.0
|
||||
ical==9.2.4
|
||||
|
||||
# homeassistant.components.caldav
|
||||
icalendar==6.1.0
|
||||
@@ -1427,7 +1427,7 @@ microBeesPy==0.3.5
|
||||
mill-local==0.3.0
|
||||
|
||||
# homeassistant.components.mill
|
||||
millheater==0.12.3
|
||||
millheater==0.12.5
|
||||
|
||||
# homeassistant.components.minio
|
||||
minio==7.1.12
|
||||
@@ -1804,7 +1804,7 @@ pyRFXtrx==0.31.1
|
||||
pySDCP==1
|
||||
|
||||
# homeassistant.components.tibber
|
||||
pyTibber==0.30.8
|
||||
pyTibber==0.31.2
|
||||
|
||||
# homeassistant.components.dlink
|
||||
pyW215==0.7.0
|
||||
@@ -1955,7 +1955,7 @@ pyeiscp==0.0.7
|
||||
pyemoncms==0.1.1
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
pyenphase==1.26.0
|
||||
pyenphase==1.26.1
|
||||
|
||||
# homeassistant.components.envisalink
|
||||
pyenvisalink==4.7
|
||||
@@ -2093,7 +2093,7 @@ pykwb==0.0.8
|
||||
pylacrosse==0.4
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==2.0.0
|
||||
pylamarzocco==2.0.3
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
@@ -2293,7 +2293,7 @@ pysaj==0.0.16
|
||||
pyschlage==2025.4.0
|
||||
|
||||
# homeassistant.components.sensibo
|
||||
pysensibo==1.1.0
|
||||
pysensibo==1.2.1
|
||||
|
||||
# homeassistant.components.serial
|
||||
pyserial-asyncio-fast==0.16
|
||||
@@ -2326,7 +2326,7 @@ pysma==0.7.5
|
||||
pysmappee==0.2.29
|
||||
|
||||
# homeassistant.components.smartthings
|
||||
pysmartthings==3.2.0
|
||||
pysmartthings==3.2.2
|
||||
|
||||
# homeassistant.components.smarty
|
||||
pysmarty2==0.10.2
|
||||
@@ -2437,7 +2437,7 @@ python-juicenet==1.1.0
|
||||
python-kasa[speedups]==0.10.2
|
||||
|
||||
# homeassistant.components.linkplay
|
||||
python-linkplay==0.2.4
|
||||
python-linkplay==0.2.5
|
||||
|
||||
# homeassistant.components.lirc
|
||||
# python-lirc==1.2.3
|
||||
@@ -2486,7 +2486,7 @@ python-roborock==2.18.2
|
||||
python-smarttub==0.0.39
|
||||
|
||||
# homeassistant.components.snoo
|
||||
python-snoo==0.6.5
|
||||
python-snoo==0.6.6
|
||||
|
||||
# homeassistant.components.songpal
|
||||
python-songpal==0.16.2
|
||||
@@ -2631,13 +2631,13 @@ refoss-ha==1.2.5
|
||||
regenmaschine==2024.03.0
|
||||
|
||||
# homeassistant.components.renault
|
||||
renault-api==0.3.0
|
||||
renault-api==0.3.1
|
||||
|
||||
# homeassistant.components.renson
|
||||
renson-endura-delta==1.7.2
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.13.2
|
||||
reolink-aio==0.13.3
|
||||
|
||||
# homeassistant.components.idteck_prox
|
||||
rfk101py==0.0.1
|
||||
@@ -2975,7 +2975,7 @@ typedmonarchmoney==0.4.4
|
||||
uasiren==0.0.1
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
uiprotect==7.5.5
|
||||
uiprotect==7.6.0
|
||||
|
||||
# homeassistant.components.landisgyr_heat_meter
|
||||
ultraheat-api==0.5.7
|
||||
@@ -3074,7 +3074,7 @@ webio-api==0.1.11
|
||||
webmin-xmlrpc==0.0.2
|
||||
|
||||
# homeassistant.components.weheat
|
||||
weheat==2025.3.7
|
||||
weheat==2025.4.29
|
||||
|
||||
# homeassistant.components.whirlpool
|
||||
whirlpool-sixth-sense==0.20.0
|
||||
|
||||
48
requirements_test_all.txt
generated
48
requirements_test_all.txt
generated
@@ -202,16 +202,16 @@ aiobafi6==0.9.0
|
||||
aiobotocore==2.21.1
|
||||
|
||||
# homeassistant.components.comelit
|
||||
aiocomelit==0.12.0
|
||||
aiocomelit==0.12.1
|
||||
|
||||
# homeassistant.components.dhcp
|
||||
aiodhcpwatcher==1.1.1
|
||||
|
||||
# homeassistant.components.dhcp
|
||||
aiodiscover==2.6.1
|
||||
aiodiscover==2.7.0
|
||||
|
||||
# homeassistant.components.dnsip
|
||||
aiodns==3.3.0
|
||||
aiodns==3.4.0
|
||||
|
||||
# homeassistant.components.duke_energy
|
||||
aiodukeenergy==0.3.0
|
||||
@@ -556,7 +556,7 @@ blinkpy==0.23.0
|
||||
bluecurrent-api==1.2.3
|
||||
|
||||
# homeassistant.components.bluemaestro
|
||||
bluemaestro-ble==0.4.0
|
||||
bluemaestro-ble==0.4.1
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
bluetooth-adapters==0.21.4
|
||||
@@ -653,7 +653,7 @@ dbus-fast==2.43.0
|
||||
debugpy==1.8.13
|
||||
|
||||
# homeassistant.components.ecovacs
|
||||
deebot-client==13.0.1
|
||||
deebot-client==13.2.0
|
||||
|
||||
# homeassistant.components.ihc
|
||||
# homeassistant.components.namecheapdns
|
||||
@@ -673,7 +673,7 @@ denonavr==1.0.1
|
||||
devialet==1.5.7
|
||||
|
||||
# homeassistant.components.devolo_home_control
|
||||
devolo-home-control-api==0.18.3
|
||||
devolo-home-control-api==0.19.0
|
||||
|
||||
# homeassistant.components.devolo_home_network
|
||||
devolo-plc-api==1.5.1
|
||||
@@ -818,7 +818,7 @@ fnv-hash-fast==1.5.0
|
||||
foobot_async==1.0.0
|
||||
|
||||
# homeassistant.components.forecast_solar
|
||||
forecast-solar==4.1.0
|
||||
forecast-solar==4.2.0
|
||||
|
||||
# homeassistant.components.freebox
|
||||
freebox-api==1.2.2
|
||||
@@ -840,7 +840,7 @@ gardena-bluetooth==1.6.0
|
||||
gassist-text==0.0.12
|
||||
|
||||
# homeassistant.components.google
|
||||
gcal-sync==7.0.0
|
||||
gcal-sync==7.0.1
|
||||
|
||||
# homeassistant.components.geniushub
|
||||
geniushub-client==0.7.1
|
||||
@@ -988,13 +988,13 @@ hole==0.8.0
|
||||
|
||||
# homeassistant.components.holiday
|
||||
# homeassistant.components.workday
|
||||
holidays==0.70
|
||||
holidays==0.72
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20250502.1
|
||||
home-assistant-frontend==20250516.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.4.30
|
||||
home-assistant-intents==2025.5.7
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==2.0.1.1
|
||||
@@ -1021,7 +1021,7 @@ ibeacon-ble==1.2.0
|
||||
# homeassistant.components.local_calendar
|
||||
# homeassistant.components.local_todo
|
||||
# homeassistant.components.remote_calendar
|
||||
ical==9.2.0
|
||||
ical==9.2.4
|
||||
|
||||
# homeassistant.components.caldav
|
||||
icalendar==6.1.0
|
||||
@@ -1200,7 +1200,7 @@ microBeesPy==0.3.5
|
||||
mill-local==0.3.0
|
||||
|
||||
# homeassistant.components.mill
|
||||
millheater==0.12.3
|
||||
millheater==0.12.5
|
||||
|
||||
# homeassistant.components.minio
|
||||
minio==7.1.12
|
||||
@@ -1491,7 +1491,7 @@ pyHomee==1.2.8
|
||||
pyRFXtrx==0.31.1
|
||||
|
||||
# homeassistant.components.tibber
|
||||
pyTibber==0.30.8
|
||||
pyTibber==0.31.2
|
||||
|
||||
# homeassistant.components.dlink
|
||||
pyW215==0.7.0
|
||||
@@ -1600,7 +1600,7 @@ pyeiscp==0.0.7
|
||||
pyemoncms==0.1.1
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
pyenphase==1.26.0
|
||||
pyenphase==1.26.1
|
||||
|
||||
# homeassistant.components.everlights
|
||||
pyeverlights==0.1.0
|
||||
@@ -1708,7 +1708,7 @@ pykrakenapi==0.1.8
|
||||
pykulersky==0.5.8
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==2.0.0
|
||||
pylamarzocco==2.0.3
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
@@ -1875,7 +1875,7 @@ pysabnzbd==1.1.1
|
||||
pyschlage==2025.4.0
|
||||
|
||||
# homeassistant.components.sensibo
|
||||
pysensibo==1.1.0
|
||||
pysensibo==1.2.1
|
||||
|
||||
# homeassistant.components.acer_projector
|
||||
# homeassistant.components.crownstone
|
||||
@@ -1899,7 +1899,7 @@ pysma==0.7.5
|
||||
pysmappee==0.2.29
|
||||
|
||||
# homeassistant.components.smartthings
|
||||
pysmartthings==3.2.0
|
||||
pysmartthings==3.2.2
|
||||
|
||||
# homeassistant.components.smarty
|
||||
pysmarty2==0.10.2
|
||||
@@ -1980,7 +1980,7 @@ python-juicenet==1.1.0
|
||||
python-kasa[speedups]==0.10.2
|
||||
|
||||
# homeassistant.components.linkplay
|
||||
python-linkplay==0.2.4
|
||||
python-linkplay==0.2.5
|
||||
|
||||
# homeassistant.components.matter
|
||||
python-matter-server==7.0.0
|
||||
@@ -2023,7 +2023,7 @@ python-roborock==2.18.2
|
||||
python-smarttub==0.0.39
|
||||
|
||||
# homeassistant.components.snoo
|
||||
python-snoo==0.6.5
|
||||
python-snoo==0.6.6
|
||||
|
||||
# homeassistant.components.songpal
|
||||
python-songpal==0.16.2
|
||||
@@ -2138,13 +2138,13 @@ refoss-ha==1.2.5
|
||||
regenmaschine==2024.03.0
|
||||
|
||||
# homeassistant.components.renault
|
||||
renault-api==0.3.0
|
||||
renault-api==0.3.1
|
||||
|
||||
# homeassistant.components.renson
|
||||
renson-endura-delta==1.7.2
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.13.2
|
||||
reolink-aio==0.13.3
|
||||
|
||||
# homeassistant.components.rflink
|
||||
rflink==0.0.66
|
||||
@@ -2404,7 +2404,7 @@ typedmonarchmoney==0.4.4
|
||||
uasiren==0.0.1
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
uiprotect==7.5.5
|
||||
uiprotect==7.6.0
|
||||
|
||||
# homeassistant.components.landisgyr_heat_meter
|
||||
ultraheat-api==0.5.7
|
||||
@@ -2485,7 +2485,7 @@ webio-api==0.1.11
|
||||
webmin-xmlrpc==0.0.2
|
||||
|
||||
# homeassistant.components.weheat
|
||||
weheat==2025.3.7
|
||||
weheat==2025.4.29
|
||||
|
||||
# homeassistant.components.whirlpool
|
||||
whirlpool-sixth-sense==0.20.0
|
||||
|
||||
@@ -246,6 +246,11 @@ aiofiles>=24.1.0
|
||||
# https://github.com/aio-libs/multidict/issues/1134
|
||||
# https://github.com/aio-libs/multidict/issues/1131
|
||||
multidict>=6.4.2
|
||||
|
||||
# rpds-py > 0.25.0 requires cargo 1.84.0
|
||||
# Stable Alpine current only ships cargo 1.83.0
|
||||
# No wheels upstream available for armhf & armv7
|
||||
rpds-py==0.24.0
|
||||
"""
|
||||
|
||||
GENERATED_MESSAGE = (
|
||||
|
||||
2
script/hassfest/docker/Dockerfile
generated
2
script/hassfest/docker/Dockerfile
generated
@@ -25,7 +25,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.7.1,source=/uv,target=/bin/uv \
|
||||
-c /usr/src/homeassistant/homeassistant/package_constraints.txt \
|
||||
-r /usr/src/homeassistant/requirements.txt \
|
||||
stdlib-list==0.10.0 pipdeptree==2.25.1 tqdm==4.67.1 ruff==0.11.0 \
|
||||
PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.2.3 home-assistant-intents==2025.4.30 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
|
||||
PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.2.3 home-assistant-intents==2025.5.7 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
|
||||
|
||||
LABEL "name"="hassfest"
|
||||
LABEL "maintainer"="Home Assistant <hello@home-assistant.io>"
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@@ -177,7 +177,7 @@ async def _test_downloading_encrypted_backup(
|
||||
enc_metadata = json.loads(outer_tar.extractfile("./backup.json").read())
|
||||
assert enc_metadata["protected"] is True
|
||||
with (
|
||||
outer_tar.extractfile("core.tar.gz") as inner_tar_file,
|
||||
outer_tar.extractfile("homeassistant.tar.gz") as inner_tar_file,
|
||||
pytest.raises(tarfile.ReadError, match="file could not be opened"),
|
||||
):
|
||||
# pylint: disable-next=consider-using-with
|
||||
@@ -209,7 +209,7 @@ async def _test_downloading_encrypted_backup(
|
||||
dec_metadata = json.loads(outer_tar.extractfile("./backup.json").read())
|
||||
assert dec_metadata == enc_metadata | {"protected": False}
|
||||
with (
|
||||
outer_tar.extractfile("core.tar.gz") as inner_tar_file,
|
||||
outer_tar.extractfile("homeassistant.tar.gz") as inner_tar_file,
|
||||
tarfile.open(fileobj=inner_tar_file, mode="r") as inner_tar,
|
||||
):
|
||||
assert inner_tar.getnames() == [
|
||||
|
||||
@@ -174,7 +174,10 @@ async def test_decrypted_backup_streamer(hass: HomeAssistant) -> None:
|
||||
)
|
||||
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
|
||||
backup = AgentBackup(
|
||||
addons=["addon_1", "addon_2"],
|
||||
addons=[
|
||||
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
|
||||
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
|
||||
],
|
||||
backup_id="1234",
|
||||
date="2024-12-02T07:23:58.261875-05:00",
|
||||
database_included=False,
|
||||
@@ -218,7 +221,10 @@ async def test_decrypted_backup_streamer_interrupt_stuck_reader(
|
||||
"""Test the decrypted backup streamer."""
|
||||
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
|
||||
backup = AgentBackup(
|
||||
addons=["addon_1", "addon_2"],
|
||||
addons=[
|
||||
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
|
||||
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
|
||||
],
|
||||
backup_id="1234",
|
||||
date="2024-12-02T07:23:58.261875-05:00",
|
||||
database_included=False,
|
||||
@@ -253,7 +259,10 @@ async def test_decrypted_backup_streamer_interrupt_stuck_writer(
|
||||
"""Test the decrypted backup streamer."""
|
||||
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
|
||||
backup = AgentBackup(
|
||||
addons=["addon_1", "addon_2"],
|
||||
addons=[
|
||||
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
|
||||
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
|
||||
],
|
||||
backup_id="1234",
|
||||
date="2024-12-02T07:23:58.261875-05:00",
|
||||
database_included=False,
|
||||
@@ -283,7 +292,10 @@ async def test_decrypted_backup_streamer_wrong_password(hass: HomeAssistant) ->
|
||||
"""Test the decrypted backup streamer with wrong password."""
|
||||
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
|
||||
backup = AgentBackup(
|
||||
addons=["addon_1", "addon_2"],
|
||||
addons=[
|
||||
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
|
||||
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
|
||||
],
|
||||
backup_id="1234",
|
||||
date="2024-12-02T07:23:58.261875-05:00",
|
||||
database_included=False,
|
||||
@@ -320,7 +332,10 @@ async def test_encrypted_backup_streamer(hass: HomeAssistant) -> None:
|
||||
)
|
||||
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
|
||||
backup = AgentBackup(
|
||||
addons=["addon_1", "addon_2"],
|
||||
addons=[
|
||||
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
|
||||
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
|
||||
],
|
||||
backup_id="1234",
|
||||
date="2024-12-02T07:23:58.261875-05:00",
|
||||
database_included=False,
|
||||
@@ -353,15 +368,16 @@ async def test_encrypted_backup_streamer(hass: HomeAssistant) -> None:
|
||||
bytes.fromhex("00000000000000000000000000000000"),
|
||||
)
|
||||
encryptor = EncryptedBackupStreamer(hass, backup, open_backup, "hunter2")
|
||||
assert encryptor.backup() == dataclasses.replace(
|
||||
backup, protected=True, size=backup.size + len(expected_padding)
|
||||
)
|
||||
|
||||
encrypted_stream = await encryptor.open_stream()
|
||||
encrypted_output = b""
|
||||
async for chunk in encrypted_stream:
|
||||
encrypted_output += chunk
|
||||
await encryptor.wait()
|
||||
assert encryptor.backup() == dataclasses.replace(
|
||||
backup, protected=True, size=backup.size + len(expected_padding)
|
||||
)
|
||||
|
||||
encrypted_stream = await encryptor.open_stream()
|
||||
encrypted_output = b""
|
||||
async for chunk in encrypted_stream:
|
||||
encrypted_output += chunk
|
||||
await encryptor.wait()
|
||||
|
||||
# Expect the output to match the stored encrypted backup file, with additional
|
||||
# padding.
|
||||
@@ -377,7 +393,10 @@ async def test_encrypted_backup_streamer_interrupt_stuck_reader(
|
||||
"test_backups/c0cb53bd.tar.decrypted", DOMAIN
|
||||
)
|
||||
backup = AgentBackup(
|
||||
addons=["addon_1", "addon_2"],
|
||||
addons=[
|
||||
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
|
||||
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
|
||||
],
|
||||
backup_id="1234",
|
||||
date="2024-12-02T07:23:58.261875-05:00",
|
||||
database_included=False,
|
||||
@@ -414,7 +433,10 @@ async def test_encrypted_backup_streamer_interrupt_stuck_writer(
|
||||
"test_backups/c0cb53bd.tar.decrypted", DOMAIN
|
||||
)
|
||||
backup = AgentBackup(
|
||||
addons=["addon_1", "addon_2"],
|
||||
addons=[
|
||||
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
|
||||
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
|
||||
],
|
||||
backup_id="1234",
|
||||
date="2024-12-02T07:23:58.261875-05:00",
|
||||
database_included=False,
|
||||
@@ -447,7 +469,10 @@ async def test_encrypted_backup_streamer_random_nonce(hass: HomeAssistant) -> No
|
||||
)
|
||||
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
|
||||
backup = AgentBackup(
|
||||
addons=["addon_1", "addon_2"],
|
||||
addons=[
|
||||
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
|
||||
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
|
||||
],
|
||||
backup_id="1234",
|
||||
date="2024-12-02T07:23:58.261875-05:00",
|
||||
database_included=False,
|
||||
@@ -490,7 +515,7 @@ async def test_encrypted_backup_streamer_random_nonce(hass: HomeAssistant) -> No
|
||||
await encryptor1.wait()
|
||||
await encryptor2.wait()
|
||||
|
||||
# Output from the two streames should differ but have the same length.
|
||||
# Output from the two streams should differ but have the same length.
|
||||
assert encrypted_output1 != encrypted_output3
|
||||
assert len(encrypted_output1) == len(encrypted_output3)
|
||||
|
||||
@@ -508,7 +533,10 @@ async def test_encrypted_backup_streamer_error(hass: HomeAssistant) -> None:
|
||||
"test_backups/c0cb53bd.tar.decrypted", DOMAIN
|
||||
)
|
||||
backup = AgentBackup(
|
||||
addons=["addon_1", "addon_2"],
|
||||
addons=[
|
||||
AddonInfo(name="Core 1", slug="core1", version="1.0.0"),
|
||||
AddonInfo(name="Core 2", slug="core2", version="1.0.0"),
|
||||
],
|
||||
backup_id="1234",
|
||||
date="2024-12-02T07:23:58.261875-05:00",
|
||||
database_included=False,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user