forked from home-assistant/core
Compare commits
240 Commits
config_sub
...
2025.1.4
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6145ea2323 | ||
|
|
223b437cb9 | ||
|
|
b9443fa204 | ||
|
|
acbbb19788 | ||
|
|
7590a868b9 | ||
|
|
4b13c20e74 | ||
|
|
4cf1b1a707 | ||
|
|
1f8129f4b8 | ||
|
|
2e4a19b058 | ||
|
|
0caa1ed825 | ||
|
|
e7a4f5fd27 | ||
|
|
0512fc5e0c | ||
|
|
8440a27152 | ||
|
|
7af7219b01 | ||
|
|
3e1d13b6ad | ||
|
|
d9e6549ad5 | ||
|
|
3c534a73f5 | ||
|
|
92b786e8cf | ||
|
|
4ed027b1cc | ||
|
|
b9b9322c91 | ||
|
|
3922b8eb80 | ||
|
|
5d1e2d17da | ||
|
|
b1445e5926 | ||
|
|
8101fee9bb | ||
|
|
670371ff38 | ||
|
|
f8eb42a094 | ||
|
|
ca891bfc3e | ||
|
|
6da6de6a35 | ||
|
|
1bf1804492 | ||
|
|
11205f1c9d | ||
|
|
84b3db1674 | ||
|
|
a42c2b2986 | ||
|
|
480045887a | ||
|
|
4f5235cbd4 | ||
|
|
83ab6b8ea2 | ||
|
|
cc0989b50e | ||
|
|
44046c5f83 | ||
|
|
0bd03346e8 | ||
|
|
c6cde13615 | ||
|
|
0e37e04928 | ||
|
|
bef545259e | ||
|
|
d77ec8ffbe | ||
|
|
75a1a46a49 | ||
|
|
2b636423d9 | ||
|
|
ed4c54a700 | ||
|
|
1d22fa9b45 | ||
|
|
5356ffa539 | ||
|
|
0660eae6f4 | ||
|
|
56f54cdccf | ||
|
|
48c23c2e79 | ||
|
|
93c5915faa | ||
|
|
8865fc0c33 | ||
|
|
9680abf51e | ||
|
|
c687a6f669 | ||
|
|
bceccd85ee | ||
|
|
0027d907a4 | ||
|
|
5d201406cb | ||
|
|
30924b561a | ||
|
|
1eddb4a21b | ||
|
|
42cdd25d90 | ||
|
|
b8b7daff5a | ||
|
|
7f3f550b7b | ||
|
|
3c14e2f0a8 | ||
|
|
9601455d9f | ||
|
|
902bd57b4b | ||
|
|
ab071d1c1b | ||
|
|
2c02eefa11 | ||
|
|
44808c02f9 | ||
|
|
d59a91a905 | ||
|
|
298f059488 | ||
|
|
7a5525951d | ||
|
|
9a9514d53b | ||
|
|
5337ab2e72 | ||
|
|
b815899fdc | ||
|
|
81a669c163 | ||
|
|
188def51c6 | ||
|
|
eb345971b4 | ||
|
|
9288dce7ed | ||
|
|
4867d3a187 | ||
|
|
c40771ba6a | ||
|
|
2fc489d17d | ||
|
|
279785b22e | ||
|
|
e5c986171b | ||
|
|
58805f721c | ||
|
|
29989e9034 | ||
|
|
fbd031a03d | ||
|
|
fe1ce39831 | ||
|
|
914c6459dc | ||
|
|
43ffdd0eef | ||
|
|
39d16ed5ce | ||
|
|
07f3d939e3 | ||
|
|
eda60073ee | ||
|
|
09ffa38ddf | ||
|
|
b32a791ea4 | ||
|
|
a4ea25631a | ||
|
|
bd8ea646a9 | ||
|
|
538a2ea057 | ||
|
|
b461bc2fb5 | ||
|
|
103960e0a7 | ||
|
|
1c4273ce91 | ||
|
|
0f0209d4bb | ||
|
|
27b8b8458b | ||
|
|
c022d91baa | ||
|
|
0daac09008 | ||
|
|
ca8416fe50 | ||
|
|
a14f6faaaf | ||
|
|
a9a14381d3 | ||
|
|
a4d0794fe4 | ||
|
|
9ead6fe362 | ||
|
|
017679abe1 | ||
|
|
0bd7b793fe | ||
|
|
c46a70fdcf | ||
|
|
8c2ec5e7c8 | ||
|
|
3063f0b565 | ||
|
|
aafc1ff074 | ||
|
|
45142b0cc0 | ||
|
|
a412acec0e | ||
|
|
ac4bd32137 | ||
|
|
7e1e63374f | ||
|
|
03fd6a901b | ||
|
|
46b2830699 | ||
|
|
b416ae1387 | ||
|
|
962b880146 | ||
|
|
9c98125d20 | ||
|
|
c9f1fee6bb | ||
|
|
9b8ed9643f | ||
|
|
7ea7178aa9 | ||
|
|
c5746291cc | ||
|
|
1af384bc0a | ||
|
|
ea82c1b73e | ||
|
|
96936f5f4a | ||
|
|
316f93f208 | ||
|
|
f719a14537 | ||
|
|
a830a14342 | ||
|
|
1b67d51e24 | ||
|
|
e1f6475623 | ||
|
|
59a3fe857b | ||
|
|
f364e29148 | ||
|
|
47190e4ac1 | ||
|
|
7fa1983da0 | ||
|
|
9b906e94c7 | ||
|
|
5ac4d5bef7 | ||
|
|
995e222959 | ||
|
|
61ac8e7e8c | ||
|
|
67ec71031d | ||
|
|
59f866bcf7 | ||
|
|
d75d970fc7 | ||
|
|
0a13516ddd | ||
|
|
21aca3c146 | ||
|
|
faf9c2ee40 | ||
|
|
e89a1da462 | ||
|
|
8ace126d9f | ||
|
|
ca6bae6b15 | ||
|
|
c9ba267fec | ||
|
|
0e79c17cb8 | ||
|
|
4cb413521d | ||
|
|
f97439eaab | ||
|
|
568b637dc5 | ||
|
|
3a8f71a64a | ||
|
|
fea3dfda94 | ||
|
|
554cdd1784 | ||
|
|
ce7a0650e4 | ||
|
|
5895aa4cde | ||
|
|
bd5477729a | ||
|
|
2e21ac7001 | ||
|
|
ab6394b26c | ||
|
|
0ae4a9a911 | ||
|
|
f709989717 | ||
|
|
952363eca3 | ||
|
|
a7995e0093 | ||
|
|
1064ef9dc6 | ||
|
|
c2f06fbd47 | ||
|
|
a36fd09644 | ||
|
|
b89995a79f | ||
|
|
c908f823c5 | ||
|
|
229c32b0da | ||
|
|
e303a9a2b5 | ||
|
|
54fa30c2b8 | ||
|
|
fbd6cf7244 | ||
|
|
c10175e25c | ||
|
|
82f0e8cc19 | ||
|
|
623e1b08b8 | ||
|
|
0c73251004 | ||
|
|
d9057fc43e | ||
|
|
077c9e62b4 | ||
|
|
7456ce1c01 | ||
|
|
a627fa70a7 | ||
|
|
c402eaec3f | ||
|
|
ea51ecd384 | ||
|
|
0873d27d7b | ||
|
|
45fd7fb6d5 | ||
|
|
e22685640c | ||
|
|
5756166545 | ||
|
|
2f8a92c725 | ||
|
|
cf9ccc6fb4 | ||
|
|
b05b9b9a33 | ||
|
|
352d5d14a3 | ||
|
|
52e47f55c8 | ||
|
|
0470bff9a2 | ||
|
|
a38839b420 | ||
|
|
394b2be40a | ||
|
|
291dd6dc66 | ||
|
|
ef87366346 | ||
|
|
bd243f68a4 | ||
|
|
951baa3972 | ||
|
|
1874eec8b3 | ||
|
|
3120a90f26 | ||
|
|
7032361bf5 | ||
|
|
bd786b53ee | ||
|
|
f6a9cd38c0 | ||
|
|
1a909d3a8a | ||
|
|
b84ae2abc3 | ||
|
|
15b80c59fc | ||
|
|
c11bdcc949 | ||
|
|
1957ab1ccf | ||
|
|
ef2af44795 | ||
|
|
f0e8360401 | ||
|
|
03fb136218 | ||
|
|
d415b7bc8d | ||
|
|
9242b67e0d | ||
|
|
6e7d095831 | ||
|
|
ef05133a66 | ||
|
|
7b2fc282e5 | ||
|
|
4ca17dbb9e | ||
|
|
5d7a22fa76 | ||
|
|
502fbe65ee | ||
|
|
ce83071900 | ||
|
|
4f1e9b2338 | ||
|
|
f23bc51b88 | ||
|
|
44150e9fd7 | ||
|
|
cf9686a802 | ||
|
|
657e5b73b6 | ||
|
|
d3666ecf8a | ||
|
|
bed186cce4 | ||
|
|
2b8240746a | ||
|
|
efabb82cb6 | ||
|
|
80955ba821 | ||
|
|
bb371c87d5 | ||
|
|
7ce563b0b4 | ||
|
|
c2f6e5036e |
@@ -89,7 +89,7 @@ from .helpers import (
|
||||
)
|
||||
from .helpers.dispatcher import async_dispatcher_send_internal
|
||||
from .helpers.storage import get_internal_store_manager
|
||||
from .helpers.system_info import async_get_system_info, is_official_image
|
||||
from .helpers.system_info import async_get_system_info
|
||||
from .helpers.typing import ConfigType
|
||||
from .setup import (
|
||||
# _setup_started is marked as protected to make it clear
|
||||
@@ -106,6 +106,7 @@ from .util.async_ import create_eager_task
|
||||
from .util.hass_dict import HassKey
|
||||
from .util.logging import async_activate_log_queue_handler
|
||||
from .util.package import async_get_user_site, is_docker_env, is_virtual_env
|
||||
from .util.system_info import is_official_image
|
||||
|
||||
with contextlib.suppress(ImportError):
|
||||
# Ensure anyio backend is imported to avoid it being imported in the event loop
|
||||
|
||||
@@ -26,5 +26,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioacaia"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioacaia==0.1.11"]
|
||||
"requirements": ["aioacaia==0.1.13"]
|
||||
}
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==0.9.7"]
|
||||
"requirements": ["aioairzone==0.9.9"]
|
||||
}
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["python_homeassistant_analytics"],
|
||||
"requirements": ["python-homeassistant-analytics==0.8.0"],
|
||||
"requirements": ["python-homeassistant-analytics==0.8.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -156,7 +156,12 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# and one of them, which could end up being in discovery_info.host, is from a
|
||||
# different device. If any of the discovery_info.ip_addresses matches the
|
||||
# existing host, don't update the host.
|
||||
if existing_config_entry and len(discovery_info.ip_addresses) > 1:
|
||||
if (
|
||||
existing_config_entry
|
||||
# Ignored entries don't have host
|
||||
and CONF_HOST in existing_config_entry.data
|
||||
and len(discovery_info.ip_addresses) > 1
|
||||
):
|
||||
existing_host = existing_config_entry.data[CONF_HOST]
|
||||
if existing_host != self.host:
|
||||
if existing_host in [
|
||||
|
||||
@@ -44,12 +44,12 @@
|
||||
}
|
||||
},
|
||||
"apps": {
|
||||
"title": "Configure Android Apps",
|
||||
"description": "Configure application id {app_id}",
|
||||
"title": "Configure Android apps",
|
||||
"description": "Configure application ID {app_id}",
|
||||
"data": {
|
||||
"app_name": "Application Name",
|
||||
"app_name": "Application name",
|
||||
"app_id": "Application ID",
|
||||
"app_icon": "Application Icon",
|
||||
"app_icon": "Application icon",
|
||||
"app_delete": "Check to delete this application"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,7 +44,10 @@ class APCUPSdData(dict[str, str]):
|
||||
@property
|
||||
def serial_no(self) -> str | None:
|
||||
"""Return the unique serial number of the UPS, if available."""
|
||||
return self.get("SERIALNO")
|
||||
sn = self.get("SERIALNO")
|
||||
# We had user reports that some UPS models simply return "Blank" as serial number, in
|
||||
# which case we fall back to `None` to indicate that it is actually not available.
|
||||
return None if sn == "Blank" else sn
|
||||
|
||||
|
||||
class APCUPSdCoordinator(DataUpdateCoordinator[APCUPSdData]):
|
||||
|
||||
@@ -98,7 +98,6 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
VERSION = 1
|
||||
|
||||
scan_filter: str | None = None
|
||||
all_identifiers: set[str]
|
||||
atv: BaseConfig | None = None
|
||||
atv_identifiers: list[str] | None = None
|
||||
_host: str # host in zeroconf discovery info, should not be accessed by other flows
|
||||
@@ -118,6 +117,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a new AppleTVConfigFlow."""
|
||||
self.credentials: dict[int, str | None] = {} # Protocol -> credentials
|
||||
self.all_identifiers: set[str] = set()
|
||||
|
||||
@property
|
||||
def device_identifier(self) -> str | None:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["apprise"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["apprise==1.9.0"]
|
||||
"requirements": ["apprise==1.9.1"]
|
||||
}
|
||||
|
||||
@@ -120,6 +120,8 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
"""Wait for the client to be ready."""
|
||||
|
||||
if not self.data or Attribute.MAC_ADDRESS not in self.data:
|
||||
await self.client.read_mac_address()
|
||||
|
||||
data = await self.client.wait_for_response(
|
||||
FunctionalDomain.IDENTIFICATION, 2, WAIT_TIMEOUT
|
||||
)
|
||||
@@ -130,12 +132,9 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
|
||||
return False
|
||||
|
||||
if not self.data or Attribute.NAME not in self.data:
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.IDENTIFICATION, 4, WAIT_TIMEOUT
|
||||
)
|
||||
|
||||
if not self.data or Attribute.THERMOSTAT_MODES not in self.data:
|
||||
await self.client.read_thermostat_iaq_available()
|
||||
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.CONTROL, 7, WAIT_TIMEOUT
|
||||
)
|
||||
@@ -144,10 +143,16 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
not self.data
|
||||
or Attribute.INDOOR_TEMPERATURE_CONTROLLING_SENSOR_STATUS not in self.data
|
||||
):
|
||||
await self.client.read_sensors()
|
||||
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.SENSORS, 2, WAIT_TIMEOUT
|
||||
)
|
||||
|
||||
await self.client.read_thermostat_status()
|
||||
|
||||
await self.client.read_iaq_status()
|
||||
|
||||
await ready_callback(True)
|
||||
|
||||
return True
|
||||
|
||||
@@ -50,7 +50,7 @@ async def async_setup_entry(
|
||||
|
||||
descriptions: list[AprilaireHumidifierDescription] = []
|
||||
|
||||
if coordinator.data.get(Attribute.HUMIDIFICATION_AVAILABLE) in (0, 1, 2):
|
||||
if coordinator.data.get(Attribute.HUMIDIFICATION_AVAILABLE) in (1, 2):
|
||||
descriptions.append(
|
||||
AprilaireHumidifierDescription(
|
||||
key="humidifier",
|
||||
@@ -67,7 +67,7 @@ async def async_setup_entry(
|
||||
)
|
||||
)
|
||||
|
||||
if coordinator.data.get(Attribute.DEHUMIDIFICATION_AVAILABLE) in (0, 1):
|
||||
if coordinator.data.get(Attribute.DEHUMIDIFICATION_AVAILABLE) == 1:
|
||||
descriptions.append(
|
||||
AprilaireHumidifierDescription(
|
||||
key="dehumidifier",
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyaprilaire"],
|
||||
"requirements": ["pyaprilaire==0.7.4"]
|
||||
"requirements": ["pyaprilaire==0.7.7"]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from aiohttp import ClientConnectorError
|
||||
|
||||
from homeassistant.components.number import NumberDeviceClass, NumberEntity, NumberMode
|
||||
from homeassistant.const import UnitOfPower
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -45,7 +47,13 @@ class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Set the state with the value fetched from the inverter."""
|
||||
self._attr_native_value = await self._api.get_max_power()
|
||||
try:
|
||||
status = await self._api.get_max_power()
|
||||
except (TimeoutError, ClientConnectorError):
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = True
|
||||
self._attr_native_value = status
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set the desired output power."""
|
||||
|
||||
@@ -1017,9 +1017,18 @@ class PipelineRun:
|
||||
raise RuntimeError("Recognize intent was not prepared")
|
||||
|
||||
if self.pipeline.conversation_language == MATCH_ALL:
|
||||
# LLMs support all languages ('*') so use pipeline language for
|
||||
# intent fallback.
|
||||
input_language = self.pipeline.language
|
||||
# LLMs support all languages ('*') so use languages from the
|
||||
# pipeline for intent fallback.
|
||||
#
|
||||
# We prioritize the STT and TTS languages because they may be more
|
||||
# specific, such as "zh-CN" instead of just "zh". This is necessary
|
||||
# for languages whose intents are split out by region when
|
||||
# preferring local intent matching.
|
||||
input_language = (
|
||||
self.pipeline.stt_language
|
||||
or self.pipeline.tts_language
|
||||
or self.pipeline.language
|
||||
)
|
||||
else:
|
||||
input_language = self.pipeline.conversation_language
|
||||
|
||||
|
||||
@@ -75,7 +75,7 @@ class AudioBuffer:
|
||||
class VoiceCommandSegmenter:
|
||||
"""Segments an audio stream into voice commands."""
|
||||
|
||||
speech_seconds: float = 0.1
|
||||
speech_seconds: float = 0.3
|
||||
"""Seconds of speech before voice command has started."""
|
||||
|
||||
command_seconds: float = 1.0
|
||||
|
||||
@@ -31,8 +31,8 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"invalid_unique_id": "Impossible to determine a valid unique id for the device",
|
||||
"no_unique_id": "A device without a valid unique id is already configured. Configuration of multiple instance is not possible"
|
||||
"invalid_unique_id": "Impossible to determine a valid unique ID for the device",
|
||||
"no_unique_id": "A device without a valid unique ID is already configured. Configuration of multiple instances is not possible"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
@@ -42,7 +42,7 @@
|
||||
"consider_home": "Seconds to wait before considering a device away",
|
||||
"track_unknown": "Track unknown / unnamed devices",
|
||||
"interface": "The interface that you want statistics from (e.g. eth0, eth1 etc)",
|
||||
"dnsmasq": "The location in the router of the dnsmasq.leases files",
|
||||
"dnsmasq": "The location of the dnsmasq.leases file in the router",
|
||||
"require_ip": "Devices must have IP (for access point mode)"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aussie_broadband",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aussiebb"],
|
||||
"requirements": ["pyaussiebb==0.1.4"]
|
||||
"requirements": ["pyaussiebb==0.1.5"]
|
||||
}
|
||||
|
||||
@@ -5,6 +5,10 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
# Pre-import backup to avoid it being imported
|
||||
# later when the import executor is busy and delaying
|
||||
# startup
|
||||
from . import backup # noqa: F401
|
||||
from .agent import (
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
@@ -17,8 +21,10 @@ from .manager import (
|
||||
BackupManager,
|
||||
BackupPlatformProtocol,
|
||||
BackupReaderWriter,
|
||||
BackupReaderWriterError,
|
||||
CoreBackupReaderWriter,
|
||||
CreateBackupEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
NewBackup,
|
||||
WrittenBackup,
|
||||
@@ -35,8 +41,10 @@ __all__ = [
|
||||
"BackupAgentPlatformProtocol",
|
||||
"BackupPlatformProtocol",
|
||||
"BackupReaderWriter",
|
||||
"BackupReaderWriterError",
|
||||
"CreateBackupEvent",
|
||||
"Folder",
|
||||
"IncorrectPasswordError",
|
||||
"LocalBackupAgent",
|
||||
"NewBackup",
|
||||
"WrittenBackup",
|
||||
|
||||
@@ -7,6 +7,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass, field, replace
|
||||
from datetime import datetime, timedelta
|
||||
from enum import StrEnum
|
||||
import random
|
||||
from typing import TYPE_CHECKING, Self, TypedDict
|
||||
|
||||
from cronsim import CronSim
|
||||
@@ -17,7 +18,7 @@ from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import LOGGER
|
||||
from .models import Folder
|
||||
from .models import BackupManagerError, Folder
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .manager import BackupManager, ManagerBackup
|
||||
@@ -28,6 +29,10 @@ if TYPE_CHECKING:
|
||||
CRON_PATTERN_DAILY = "45 4 * * *"
|
||||
CRON_PATTERN_WEEKLY = "45 4 * * {}"
|
||||
|
||||
# Randomize the start time of the backup by up to 60 minutes to avoid
|
||||
# all backups running at the same time.
|
||||
BACKUP_START_TIME_JITTER = 60 * 60
|
||||
|
||||
|
||||
class StoredBackupConfig(TypedDict):
|
||||
"""Represent the stored backup config."""
|
||||
@@ -124,6 +129,7 @@ class BackupConfig:
|
||||
def load(self, stored_config: StoredBackupConfig) -> None:
|
||||
"""Load config."""
|
||||
self.data = BackupConfigData.from_dict(stored_config)
|
||||
self.data.retention.apply(self._manager)
|
||||
self.data.schedule.apply(self._manager)
|
||||
|
||||
async def update(
|
||||
@@ -160,8 +166,13 @@ class RetentionConfig:
|
||||
def apply(self, manager: BackupManager) -> None:
|
||||
"""Apply backup retention configuration."""
|
||||
if self.days is not None:
|
||||
LOGGER.debug(
|
||||
"Scheduling next automatic delete of backups older than %s in 1 day",
|
||||
self.days,
|
||||
)
|
||||
self._schedule_next(manager)
|
||||
else:
|
||||
LOGGER.debug("Unscheduling next automatic delete")
|
||||
self._unschedule_next(manager)
|
||||
|
||||
def to_dict(self) -> StoredRetentionConfig:
|
||||
@@ -318,11 +329,13 @@ class BackupSchedule:
|
||||
password=config_data.create_backup.password,
|
||||
with_automatic_settings=True,
|
||||
)
|
||||
except BackupManagerError as err:
|
||||
LOGGER.error("Error creating backup: %s", err)
|
||||
except Exception: # noqa: BLE001
|
||||
# another more specific exception will be added
|
||||
# and handled in the future
|
||||
LOGGER.exception("Unexpected error creating automatic backup")
|
||||
|
||||
next_time += timedelta(seconds=random.randint(0, BACKUP_START_TIME_JITTER))
|
||||
LOGGER.debug("Scheduling next automatic backup at %s", next_time)
|
||||
manager.remove_next_backup_event = async_track_point_in_time(
|
||||
manager.hass, _create_backup, next_time
|
||||
)
|
||||
|
||||
@@ -46,15 +46,11 @@ from .const import (
|
||||
EXCLUDE_FROM_BACKUP,
|
||||
LOGGER,
|
||||
)
|
||||
from .models import AgentBackup, Folder
|
||||
from .models import AgentBackup, BackupManagerError, Folder
|
||||
from .store import BackupStore
|
||||
from .util import make_backup_dir, read_backup, validate_password
|
||||
|
||||
|
||||
class IncorrectPasswordError(HomeAssistantError):
|
||||
"""Raised when the password is incorrect."""
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
class NewBackup:
|
||||
"""New backup class."""
|
||||
@@ -245,6 +241,14 @@ class BackupReaderWriter(abc.ABC):
|
||||
"""Restore a backup."""
|
||||
|
||||
|
||||
class BackupReaderWriterError(HomeAssistantError):
|
||||
"""Backup reader/writer error."""
|
||||
|
||||
|
||||
class IncorrectPasswordError(BackupReaderWriterError):
|
||||
"""Raised when the password is incorrect."""
|
||||
|
||||
|
||||
class BackupManager:
|
||||
"""Define the format that backup managers can have."""
|
||||
|
||||
@@ -373,7 +377,9 @@ class BackupManager:
|
||||
)
|
||||
for result in pre_backup_results:
|
||||
if isinstance(result, Exception):
|
||||
raise result
|
||||
raise BackupManagerError(
|
||||
f"Error during pre-backup: {result}"
|
||||
) from result
|
||||
|
||||
async def async_post_backup_actions(self) -> None:
|
||||
"""Perform post backup actions."""
|
||||
@@ -386,7 +392,9 @@ class BackupManager:
|
||||
)
|
||||
for result in post_backup_results:
|
||||
if isinstance(result, Exception):
|
||||
raise result
|
||||
raise BackupManagerError(
|
||||
f"Error during post-backup: {result}"
|
||||
) from result
|
||||
|
||||
async def load_platforms(self) -> None:
|
||||
"""Load backup platforms."""
|
||||
@@ -422,11 +430,22 @@ class BackupManager:
|
||||
return_exceptions=True,
|
||||
)
|
||||
for idx, result in enumerate(sync_backup_results):
|
||||
if isinstance(result, Exception):
|
||||
if isinstance(result, BackupReaderWriterError):
|
||||
# writer errors will affect all agents
|
||||
# no point in continuing
|
||||
raise BackupManagerError(str(result)) from result
|
||||
if isinstance(result, BackupAgentError):
|
||||
LOGGER.error("Error uploading to %s: %s", agent_ids[idx], result)
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
LOGGER.exception(
|
||||
"Error during backup upload - %s", result, exc_info=result
|
||||
)
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
# trap bugs from agents
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
LOGGER.error("Unexpected error: %s", result, exc_info=result)
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
|
||||
return agent_errors
|
||||
|
||||
async def async_get_backups(
|
||||
@@ -449,7 +468,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
for agent_backup in result:
|
||||
if (backup_id := agent_backup.backup_id) not in backups:
|
||||
if known_backup := self.known_backups.get(backup_id):
|
||||
@@ -499,7 +518,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
if not result:
|
||||
continue
|
||||
if backup is None:
|
||||
@@ -563,7 +582,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
|
||||
if not agent_errors:
|
||||
self.known_backups.remove(backup_id)
|
||||
@@ -578,7 +597,7 @@ class BackupManager:
|
||||
) -> None:
|
||||
"""Receive and store a backup file from upload."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
self.async_on_backup_event(
|
||||
ReceiveBackupEvent(stage=None, state=ReceiveBackupState.IN_PROGRESS)
|
||||
)
|
||||
@@ -652,6 +671,7 @@ class BackupManager:
|
||||
include_homeassistant=include_homeassistant,
|
||||
name=name,
|
||||
password=password,
|
||||
raise_task_error=True,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
assert self._backup_finish_task
|
||||
@@ -669,11 +689,12 @@ class BackupManager:
|
||||
include_homeassistant: bool,
|
||||
name: str | None,
|
||||
password: str | None,
|
||||
raise_task_error: bool = False,
|
||||
with_automatic_settings: bool = False,
|
||||
) -> NewBackup:
|
||||
"""Initiate generating a backup."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
|
||||
if with_automatic_settings:
|
||||
self.config.data.last_attempted_automatic_backup = dt_util.now()
|
||||
@@ -692,6 +713,7 @@ class BackupManager:
|
||||
include_homeassistant=include_homeassistant,
|
||||
name=name,
|
||||
password=password,
|
||||
raise_task_error=raise_task_error,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
except Exception:
|
||||
@@ -714,57 +736,81 @@ class BackupManager:
|
||||
include_homeassistant: bool,
|
||||
name: str | None,
|
||||
password: str | None,
|
||||
raise_task_error: bool,
|
||||
with_automatic_settings: bool,
|
||||
) -> NewBackup:
|
||||
"""Initiate generating a backup."""
|
||||
if not agent_ids:
|
||||
raise HomeAssistantError("At least one agent must be selected")
|
||||
if any(agent_id not in self.backup_agents for agent_id in agent_ids):
|
||||
raise HomeAssistantError("Invalid agent selected")
|
||||
raise BackupManagerError("At least one agent must be selected")
|
||||
if invalid_agents := [
|
||||
agent_id for agent_id in agent_ids if agent_id not in self.backup_agents
|
||||
]:
|
||||
raise BackupManagerError(f"Invalid agents selected: {invalid_agents}")
|
||||
if include_all_addons and include_addons:
|
||||
raise HomeAssistantError(
|
||||
raise BackupManagerError(
|
||||
"Cannot include all addons and specify specific addons"
|
||||
)
|
||||
|
||||
backup_name = (
|
||||
name
|
||||
or f"{"Automatic" if with_automatic_settings else "Custom"} {HAVERSION}"
|
||||
or f"{"Automatic" if with_automatic_settings else "Custom"} backup {HAVERSION}"
|
||||
)
|
||||
new_backup, self._backup_task = await self._reader_writer.async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
backup_name=backup_name,
|
||||
extra_metadata={
|
||||
"instance_id": await instance_id.async_get(self.hass),
|
||||
"with_automatic_settings": with_automatic_settings,
|
||||
},
|
||||
include_addons=include_addons,
|
||||
include_all_addons=include_all_addons,
|
||||
include_database=include_database,
|
||||
include_folders=include_folders,
|
||||
include_homeassistant=include_homeassistant,
|
||||
on_progress=self.async_on_backup_event,
|
||||
password=password,
|
||||
)
|
||||
self._backup_finish_task = self.hass.async_create_task(
|
||||
|
||||
try:
|
||||
(
|
||||
new_backup,
|
||||
self._backup_task,
|
||||
) = await self._reader_writer.async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
backup_name=backup_name,
|
||||
extra_metadata={
|
||||
"instance_id": await instance_id.async_get(self.hass),
|
||||
"with_automatic_settings": with_automatic_settings,
|
||||
},
|
||||
include_addons=include_addons,
|
||||
include_all_addons=include_all_addons,
|
||||
include_database=include_database,
|
||||
include_folders=include_folders,
|
||||
include_homeassistant=include_homeassistant,
|
||||
on_progress=self.async_on_backup_event,
|
||||
password=password,
|
||||
)
|
||||
except BackupReaderWriterError as err:
|
||||
raise BackupManagerError(str(err)) from err
|
||||
|
||||
backup_finish_task = self._backup_finish_task = self.hass.async_create_task(
|
||||
self._async_finish_backup(agent_ids, with_automatic_settings),
|
||||
name="backup_manager_finish_backup",
|
||||
)
|
||||
if not raise_task_error:
|
||||
|
||||
def log_finish_task_error(task: asyncio.Task[None]) -> None:
|
||||
if task.done() and not task.cancelled() and (err := task.exception()):
|
||||
if isinstance(err, BackupManagerError):
|
||||
LOGGER.error("Error creating backup: %s", err)
|
||||
else:
|
||||
LOGGER.error("Unexpected error: %s", err, exc_info=err)
|
||||
|
||||
backup_finish_task.add_done_callback(log_finish_task_error)
|
||||
|
||||
return new_backup
|
||||
|
||||
async def _async_finish_backup(
|
||||
self, agent_ids: list[str], with_automatic_settings: bool
|
||||
) -> None:
|
||||
"""Finish a backup."""
|
||||
if TYPE_CHECKING:
|
||||
assert self._backup_task is not None
|
||||
backup_success = False
|
||||
try:
|
||||
written_backup = await self._backup_task
|
||||
except Exception as err: # noqa: BLE001
|
||||
LOGGER.debug("Generating backup failed", exc_info=err)
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.FAILED)
|
||||
)
|
||||
except Exception as err:
|
||||
if with_automatic_settings:
|
||||
self._update_issue_backup_failed()
|
||||
|
||||
if isinstance(err, BackupReaderWriterError):
|
||||
raise BackupManagerError(str(err)) from err
|
||||
raise # unexpected error
|
||||
else:
|
||||
LOGGER.debug(
|
||||
"Generated new backup with backup_id %s, uploading to agents %s",
|
||||
@@ -777,28 +823,40 @@ class BackupManager:
|
||||
state=CreateBackupState.IN_PROGRESS,
|
||||
)
|
||||
)
|
||||
agent_errors = await self._async_upload_backup(
|
||||
backup=written_backup.backup,
|
||||
agent_ids=agent_ids,
|
||||
open_stream=written_backup.open_stream,
|
||||
)
|
||||
await written_backup.release_stream()
|
||||
if with_automatic_settings:
|
||||
# create backup was successful, update last_completed_automatic_backup
|
||||
self.config.data.last_completed_automatic_backup = dt_util.now()
|
||||
self.store.save()
|
||||
self._update_issue_after_agent_upload(agent_errors)
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
|
||||
try:
|
||||
agent_errors = await self._async_upload_backup(
|
||||
backup=written_backup.backup,
|
||||
agent_ids=agent_ids,
|
||||
open_stream=written_backup.open_stream,
|
||||
)
|
||||
finally:
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
if not agent_errors:
|
||||
if with_automatic_settings:
|
||||
# create backup was successful, update last_completed_automatic_backup
|
||||
self.config.data.last_completed_automatic_backup = dt_util.now()
|
||||
self.store.save()
|
||||
backup_success = True
|
||||
|
||||
if with_automatic_settings:
|
||||
self._update_issue_after_agent_upload(agent_errors)
|
||||
# delete old backups more numerous than copies
|
||||
# try this regardless of agent errors above
|
||||
await delete_backups_exceeding_configured_count(self)
|
||||
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED)
|
||||
)
|
||||
finally:
|
||||
self._backup_task = None
|
||||
self._backup_finish_task = None
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(
|
||||
stage=None,
|
||||
state=CreateBackupState.COMPLETED
|
||||
if backup_success
|
||||
else CreateBackupState.FAILED,
|
||||
)
|
||||
)
|
||||
self.async_on_backup_event(IdleEvent())
|
||||
|
||||
async def async_restore_backup(
|
||||
@@ -814,7 +872,7 @@ class BackupManager:
|
||||
) -> None:
|
||||
"""Initiate restoring a backup."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.IN_PROGRESS)
|
||||
@@ -829,6 +887,9 @@ class BackupManager:
|
||||
restore_folders=restore_folders,
|
||||
restore_homeassistant=restore_homeassistant,
|
||||
)
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.COMPLETED)
|
||||
)
|
||||
except Exception:
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.FAILED)
|
||||
@@ -851,7 +912,7 @@ class BackupManager:
|
||||
"""Initiate restoring a backup."""
|
||||
agent = self.backup_agents[agent_id]
|
||||
if not await agent.async_get_backup(backup_id):
|
||||
raise HomeAssistantError(
|
||||
raise BackupManagerError(
|
||||
f"Backup {backup_id} not found in agent {agent_id}"
|
||||
)
|
||||
|
||||
@@ -1024,11 +1085,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
backup_id = _generate_backup_id(date_str, backup_name)
|
||||
|
||||
if include_addons or include_all_addons or include_folders:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Addons and folders are not supported by core backup"
|
||||
)
|
||||
if not include_homeassistant:
|
||||
raise HomeAssistantError("Home Assistant must be included in backup")
|
||||
raise BackupReaderWriterError("Home Assistant must be included in backup")
|
||||
|
||||
backup_task = self._hass.async_create_task(
|
||||
self._async_create_backup(
|
||||
@@ -1099,6 +1160,13 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
password,
|
||||
local_agent_tar_file_path,
|
||||
)
|
||||
except (BackupManagerError, OSError, tarfile.TarError, ValueError) as err:
|
||||
# BackupManagerError from async_pre_backup_actions
|
||||
# OSError from file operations
|
||||
# TarError from tarfile
|
||||
# ValueError from json_bytes
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
else:
|
||||
backup = AgentBackup(
|
||||
addons=[],
|
||||
backup_id=backup_id,
|
||||
@@ -1116,12 +1184,15 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
async_add_executor_job = self._hass.async_add_executor_job
|
||||
|
||||
async def send_backup() -> AsyncIterator[bytes]:
|
||||
f = await async_add_executor_job(tar_file_path.open, "rb")
|
||||
try:
|
||||
while chunk := await async_add_executor_job(f.read, 2**20):
|
||||
yield chunk
|
||||
finally:
|
||||
await async_add_executor_job(f.close)
|
||||
f = await async_add_executor_job(tar_file_path.open, "rb")
|
||||
try:
|
||||
while chunk := await async_add_executor_job(f.read, 2**20):
|
||||
yield chunk
|
||||
finally:
|
||||
await async_add_executor_job(f.close)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
async def open_backup() -> AsyncIterator[bytes]:
|
||||
return send_backup()
|
||||
@@ -1129,14 +1200,20 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
async def remove_backup() -> None:
|
||||
if local_agent_tar_file_path:
|
||||
return
|
||||
await async_add_executor_job(tar_file_path.unlink, True)
|
||||
try:
|
||||
await async_add_executor_job(tar_file_path.unlink, True)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
return WrittenBackup(
|
||||
backup=backup, open_stream=open_backup, release_stream=remove_backup
|
||||
)
|
||||
finally:
|
||||
# Inform integrations the backup is done
|
||||
await manager.async_post_backup_actions()
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
def _mkdir_and_generate_backup_contents(
|
||||
self,
|
||||
@@ -1206,6 +1283,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
if self._local_agent_id in agent_ids:
|
||||
local_agent = manager.local_backup_agents[self._local_agent_id]
|
||||
tar_file_path = local_agent.get_backup_path(backup.backup_id)
|
||||
await async_add_executor_job(make_backup_dir, tar_file_path.parent)
|
||||
await async_add_executor_job(shutil.move, temp_file, tar_file_path)
|
||||
else:
|
||||
tar_file_path = temp_file
|
||||
@@ -1249,11 +1327,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
"""
|
||||
|
||||
if restore_addons or restore_folders:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Addons and folders are not supported in core restore"
|
||||
)
|
||||
if not restore_homeassistant and not restore_database:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Home Assistant or database must be included in restore"
|
||||
)
|
||||
|
||||
@@ -1298,7 +1376,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
)
|
||||
|
||||
await self._hass.async_add_executor_job(_write_restore_file)
|
||||
await self._hass.services.async_call("homeassistant", "restart", {})
|
||||
await self._hass.services.async_call("homeassistant", "restart", blocking=True)
|
||||
|
||||
|
||||
def _generate_backup_id(date: str, name: str) -> str:
|
||||
|
||||
@@ -6,6 +6,8 @@ from dataclasses import asdict, dataclass
|
||||
from enum import StrEnum
|
||||
from typing import Any, Self
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AddonInfo:
|
||||
@@ -67,3 +69,7 @@ class AgentBackup:
|
||||
protected=data["protected"],
|
||||
size=data["size"],
|
||||
)
|
||||
|
||||
|
||||
class BackupManagerError(HomeAssistantError):
|
||||
"""Backup manager error."""
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
"description": "The automatic backup could not be created. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_upload_agents": {
|
||||
"title": "Automatic backup could not be uploaded to agents",
|
||||
"description": "The automatic backup could not be uploaded to agents {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
"title": "Automatic backup could not be uploaded to the configured locations",
|
||||
"description": "The automatic backup could not be uploaded to the configured locations {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -20,6 +20,6 @@
|
||||
"bluetooth-auto-recovery==1.4.2",
|
||||
"bluetooth-data-tools==1.20.0",
|
||||
"dbus-fast==2.24.3",
|
||||
"habluetooth==3.6.0"
|
||||
"habluetooth==3.7.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bring",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["bring_api"],
|
||||
"requirements": ["bring-api==0.9.1"]
|
||||
}
|
||||
|
||||
@@ -111,7 +111,7 @@
|
||||
"services": {
|
||||
"send_message": {
|
||||
"name": "[%key:component::notify::services::notify::name%]",
|
||||
"description": "Send a mobile push notification to members of a shared Bring! list.",
|
||||
"description": "Sends a mobile push notification to members of a shared Bring! list.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "List",
|
||||
@@ -122,8 +122,8 @@
|
||||
"description": "Type of push notification to send to list members."
|
||||
},
|
||||
"item": {
|
||||
"name": "Article (Required if message type `Urgent Message` selected)",
|
||||
"description": "Article name to include in an urgent message e.g. `Urgent Message - Please buy Cilantro urgently`"
|
||||
"name": "Article (Required if notification type `Urgent message` is selected)",
|
||||
"description": "Article name to include in an urgent message e.g. `Urgent message - Please buy Cilantro urgently`"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -134,7 +134,7 @@
|
||||
"going_shopping": "I'm going shopping! - Last chance to make changes",
|
||||
"changed_list": "List updated - Take a look at the articles",
|
||||
"shopping_done": "Shopping done - The fridge is well stocked",
|
||||
"urgent_message": "Urgent Message - Please buy `Article name` urgently"
|
||||
"urgent_message": "Urgent message - Please buy `Article` urgently"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
}
|
||||
},
|
||||
"discovery_confirm": {
|
||||
"description": "Do you want to setup {name}?"
|
||||
"description": "Do you want to set up {name}?"
|
||||
},
|
||||
"reconfigure": {
|
||||
"description": "Reconfigure your Cambridge Audio Streamer.",
|
||||
@@ -28,7 +28,7 @@
|
||||
"cannot_connect": "Failed to connect to Cambridge Audio device. Please make sure the device is powered up and connected to the network. Try power-cycling the device if it does not connect."
|
||||
},
|
||||
"abort": {
|
||||
"wrong_device": "This Cambridge Audio device does not match the existing device id. Please make sure you entered the correct IP address.",
|
||||
"wrong_device": "This Cambridge Audio device does not match the existing device ID. Please make sure you entered the correct IP address.",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
|
||||
@@ -516,6 +516,19 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Flag supported features."""
|
||||
return self._attr_supported_features
|
||||
|
||||
@property
|
||||
def supported_features_compat(self) -> CameraEntityFeature:
|
||||
"""Return the supported features as CameraEntityFeature.
|
||||
|
||||
Remove this compatibility shim in 2025.1 or later.
|
||||
"""
|
||||
features = self.supported_features
|
||||
if type(features) is int: # noqa: E721
|
||||
new_features = CameraEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
@cached_property
|
||||
def is_recording(self) -> bool:
|
||||
"""Return true if the device is recording."""
|
||||
@@ -569,7 +582,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
|
||||
self._deprecate_attr_frontend_stream_type_logged = True
|
||||
return self._attr_frontend_stream_type
|
||||
if CameraEntityFeature.STREAM not in self.supported_features:
|
||||
if CameraEntityFeature.STREAM not in self.supported_features_compat:
|
||||
return None
|
||||
if (
|
||||
self._webrtc_provider
|
||||
@@ -798,7 +811,9 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
self.__supports_stream = self.supported_features & CameraEntityFeature.STREAM
|
||||
self.__supports_stream = (
|
||||
self.supported_features_compat & CameraEntityFeature.STREAM
|
||||
)
|
||||
await self.async_refresh_providers(write_state=False)
|
||||
|
||||
async def async_refresh_providers(self, *, write_state: bool = True) -> None:
|
||||
@@ -838,7 +853,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
self, fn: Callable[[HomeAssistant, Camera], Coroutine[None, None, _T | None]]
|
||||
) -> _T | None:
|
||||
"""Get first provider that supports this camera."""
|
||||
if CameraEntityFeature.STREAM not in self.supported_features:
|
||||
if CameraEntityFeature.STREAM not in self.supported_features_compat:
|
||||
return None
|
||||
|
||||
return await fn(self.hass, self)
|
||||
@@ -896,7 +911,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def camera_capabilities(self) -> CameraCapabilities:
|
||||
"""Return the camera capabilities."""
|
||||
frontend_stream_types = set()
|
||||
if CameraEntityFeature.STREAM in self.supported_features:
|
||||
if CameraEntityFeature.STREAM in self.supported_features_compat:
|
||||
if self._supports_native_sync_webrtc or self._supports_native_async_webrtc:
|
||||
# The camera has a native WebRTC implementation
|
||||
frontend_stream_types.add(StreamType.WEB_RTC)
|
||||
@@ -916,7 +931,8 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""
|
||||
super().async_write_ha_state()
|
||||
if self.__supports_stream != (
|
||||
supports_stream := self.supported_features & CameraEntityFeature.STREAM
|
||||
supports_stream := self.supported_features_compat
|
||||
& CameraEntityFeature.STREAM
|
||||
):
|
||||
self.__supports_stream = supports_stream
|
||||
self._invalidate_camera_capabilities_cache()
|
||||
|
||||
@@ -36,7 +36,14 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.signal_type import SignalType
|
||||
|
||||
from . import account_link, http_api
|
||||
# Pre-import backup to avoid it being imported
|
||||
# later when the import executor is busy and delaying
|
||||
# startup
|
||||
from . import (
|
||||
account_link,
|
||||
backup, # noqa: F401
|
||||
http_api,
|
||||
)
|
||||
from .client import CloudClient
|
||||
from .const import (
|
||||
CONF_ACCOUNT_LINK_SERVER,
|
||||
|
||||
@@ -2,9 +2,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import base64
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
|
||||
import hashlib
|
||||
import logging
|
||||
import random
|
||||
from typing import Any, Self
|
||||
|
||||
from aiohttp import ClientError, ClientTimeout, StreamReader
|
||||
@@ -23,7 +26,11 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from .client import CloudClient
|
||||
from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_STORAGE_BACKUP = "backup"
|
||||
_RETRY_LIMIT = 5
|
||||
_RETRY_SECONDS_MIN = 60
|
||||
_RETRY_SECONDS_MAX = 600
|
||||
|
||||
|
||||
async def _b64md5(stream: AsyncIterator[bytes]) -> str:
|
||||
@@ -136,13 +143,55 @@ class CloudBackupAgent(BackupAgent):
|
||||
raise BackupAgentError("Failed to get download details") from err
|
||||
|
||||
try:
|
||||
resp = await self._cloud.websession.get(details["url"])
|
||||
resp = await self._cloud.websession.get(
|
||||
details["url"],
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
|
||||
resp.raise_for_status()
|
||||
except ClientError as err:
|
||||
raise BackupAgentError("Failed to download backup") from err
|
||||
|
||||
return ChunkAsyncStreamIterator(resp.content)
|
||||
|
||||
async def _async_do_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
filename: str,
|
||||
base64md5hash: str,
|
||||
metadata: dict[str, Any],
|
||||
size: int,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
try:
|
||||
details = await async_files_upload_details(
|
||||
self._cloud,
|
||||
storage_type=_STORAGE_BACKUP,
|
||||
filename=filename,
|
||||
metadata=metadata,
|
||||
size=size,
|
||||
base64md5hash=base64md5hash,
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to get upload details") from err
|
||||
|
||||
try:
|
||||
upload_status = await self._cloud.websession.put(
|
||||
details["url"],
|
||||
data=await open_stream(),
|
||||
headers=details["headers"] | {"content-length": str(size)},
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
_LOGGER.log(
|
||||
logging.DEBUG if upload_status.status < 400 else logging.WARNING,
|
||||
"Backup upload status: %s",
|
||||
upload_status.status,
|
||||
)
|
||||
upload_status.raise_for_status()
|
||||
except (TimeoutError, ClientError) as err:
|
||||
raise BackupAgentError("Failed to upload backup") from err
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
@@ -159,29 +208,34 @@ class CloudBackupAgent(BackupAgent):
|
||||
raise BackupAgentError("Cloud backups must be protected")
|
||||
|
||||
base64md5hash = await _b64md5(await open_stream())
|
||||
filename = self._get_backup_filename()
|
||||
metadata = backup.as_dict()
|
||||
size = backup.size
|
||||
|
||||
try:
|
||||
details = await async_files_upload_details(
|
||||
self._cloud,
|
||||
storage_type=_STORAGE_BACKUP,
|
||||
filename=self._get_backup_filename(),
|
||||
metadata=backup.as_dict(),
|
||||
size=backup.size,
|
||||
base64md5hash=base64md5hash,
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to get upload details") from err
|
||||
|
||||
try:
|
||||
upload_status = await self._cloud.websession.put(
|
||||
details["url"],
|
||||
data=await open_stream(),
|
||||
headers=details["headers"] | {"content-length": str(backup.size)},
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
upload_status.raise_for_status()
|
||||
except (TimeoutError, ClientError) as err:
|
||||
raise BackupAgentError("Failed to upload backup") from err
|
||||
tries = 1
|
||||
while tries <= _RETRY_LIMIT:
|
||||
try:
|
||||
await self._async_do_upload_backup(
|
||||
open_stream=open_stream,
|
||||
filename=filename,
|
||||
base64md5hash=base64md5hash,
|
||||
metadata=metadata,
|
||||
size=size,
|
||||
)
|
||||
break
|
||||
except BackupAgentError as err:
|
||||
if tries == _RETRY_LIMIT:
|
||||
raise
|
||||
tries += 1
|
||||
retry_timer = random.randint(_RETRY_SECONDS_MIN, _RETRY_SECONDS_MAX)
|
||||
_LOGGER.info(
|
||||
"Failed to upload backup, retrying (%s/%s) in %ss: %s",
|
||||
tries,
|
||||
_RETRY_LIMIT,
|
||||
retry_timer,
|
||||
err,
|
||||
)
|
||||
await asyncio.sleep(retry_timer)
|
||||
|
||||
async def async_delete_backup(
|
||||
self,
|
||||
@@ -208,6 +262,7 @@ class CloudBackupAgent(BackupAgent):
|
||||
"""List backups."""
|
||||
try:
|
||||
backups = await async_files_list(self._cloud, storage_type=_STORAGE_BACKUP)
|
||||
_LOGGER.debug("Cloud backups: %s", backups)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to list backups") from err
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"requirements": ["aiocomelit==0.9.1"]
|
||||
"requirements": ["aiocomelit==0.10.1"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.20"]
|
||||
"requirements": ["hassil==2.1.0", "home-assistant-intents==2025.1.1"]
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from cookidoo_api import Cookidoo, CookidooConfig, CookidooLocalizationConfig
|
||||
from cookidoo_api import Cookidoo, CookidooConfig, get_localization_options
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_COUNTRY,
|
||||
@@ -22,15 +22,17 @@ PLATFORMS: list[Platform] = [Platform.TODO]
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool:
|
||||
"""Set up Cookidoo from a config entry."""
|
||||
|
||||
localizations = await get_localization_options(
|
||||
country=entry.data[CONF_COUNTRY].lower(),
|
||||
language=entry.data[CONF_LANGUAGE],
|
||||
)
|
||||
|
||||
cookidoo = Cookidoo(
|
||||
async_get_clientsession(hass),
|
||||
CookidooConfig(
|
||||
email=entry.data[CONF_EMAIL],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
localization=CookidooLocalizationConfig(
|
||||
country_code=entry.data[CONF_COUNTRY].lower(),
|
||||
language=entry.data[CONF_LANGUAGE],
|
||||
),
|
||||
localization=localizations[0],
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ from cookidoo_api import (
|
||||
Cookidoo,
|
||||
CookidooAuthException,
|
||||
CookidooConfig,
|
||||
CookidooLocalizationConfig,
|
||||
CookidooRequestException,
|
||||
get_country_options,
|
||||
get_localization_options,
|
||||
@@ -219,18 +218,19 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
data_input[CONF_LANGUAGE] = (
|
||||
await get_localization_options(country=data_input[CONF_COUNTRY].lower())
|
||||
)[0] # Pick any language to test login
|
||||
)[0].language # Pick any language to test login
|
||||
|
||||
localizations = await get_localization_options(
|
||||
country=data_input[CONF_COUNTRY].lower(),
|
||||
language=data_input[CONF_LANGUAGE],
|
||||
)
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
cookidoo = Cookidoo(
|
||||
session,
|
||||
async_get_clientsession(self.hass),
|
||||
CookidooConfig(
|
||||
email=data_input[CONF_EMAIL],
|
||||
password=data_input[CONF_PASSWORD],
|
||||
localization=CookidooLocalizationConfig(
|
||||
country_code=data_input[CONF_COUNTRY].lower(),
|
||||
language=data_input[CONF_LANGUAGE],
|
||||
),
|
||||
localization=localizations[0],
|
||||
),
|
||||
)
|
||||
try:
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/cookidoo",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["cookidoo_api"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["cookidoo-api==0.10.0"]
|
||||
"requirements": ["cookidoo-api==0.12.2"]
|
||||
}
|
||||
|
||||
@@ -300,6 +300,10 @@ class CoverEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def supported_features(self) -> CoverEntityFeature:
|
||||
"""Flag supported features."""
|
||||
if (features := self._attr_supported_features) is not None:
|
||||
if type(features) is int: # noqa: E721
|
||||
new_features = CoverEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
supported_features = (
|
||||
|
||||
@@ -266,7 +266,7 @@ class DeconzBaseLight[_LightDeviceT: Group | Light](
|
||||
@property
|
||||
def color_temp_kelvin(self) -> int | None:
|
||||
"""Return the CT color value."""
|
||||
if self._device.color_temp is None:
|
||||
if self._device.color_temp is None or self._device.color_temp == 0:
|
||||
return None
|
||||
return color_temperature_mired_to_kelvin(self._device.color_temp)
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==10.0.1"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==11.0.0"]
|
||||
}
|
||||
|
||||
@@ -6,11 +6,16 @@ from dataclasses import dataclass
|
||||
|
||||
from elevenlabs import AsyncElevenLabs, Model
|
||||
from elevenlabs.core import ApiError
|
||||
from httpx import ConnectError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryError,
|
||||
ConfigEntryNotReady,
|
||||
)
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
|
||||
from .const import CONF_MODEL
|
||||
@@ -48,6 +53,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ElevenLabsConfigEntry) -
|
||||
model_id = entry.options[CONF_MODEL]
|
||||
try:
|
||||
model = await get_model_by_id(client, model_id)
|
||||
except ConnectError as err:
|
||||
raise ConfigEntryNotReady("Failed to connect") from err
|
||||
except ApiError as err:
|
||||
raise ConfigEntryAuthFailed("Auth failed") from err
|
||||
|
||||
|
||||
@@ -15,5 +15,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/elkm1",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["elkm1_lib"],
|
||||
"requirements": ["elkm1-lib==2.2.10"]
|
||||
"requirements": ["elkm1-lib==2.2.11"]
|
||||
}
|
||||
|
||||
@@ -151,7 +151,9 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
port=self._panel_direct_port,
|
||||
)
|
||||
)
|
||||
ssl_context = build_direct_ssl_context(cadata=self._panel_direct_ssl_cert)
|
||||
ssl_context = await self.hass.async_add_executor_job(
|
||||
build_direct_ssl_context, self._panel_direct_ssl_cert
|
||||
)
|
||||
|
||||
# Attempt the connection to make sure the pin works. Also, take the chance to retrieve the panel ID via APIs.
|
||||
client_api_url = get_direct_api_url(
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/elmax",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["elmax_api"],
|
||||
"requirements": ["elmax-api==0.0.6.3"],
|
||||
"requirements": ["elmax-api==0.0.6.4rc0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_elmax-ssl._tcp.local."
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["openwebif"],
|
||||
"requirements": ["openwebifpy==4.3.0"]
|
||||
"requirements": ["openwebifpy==4.3.1"]
|
||||
}
|
||||
|
||||
@@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==1.1.0"]
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.0.0"]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ from typing import TYPE_CHECKING
|
||||
|
||||
from aioesphomeapi import APIClient, DeviceInfo
|
||||
from bleak_esphome import connect_scanner
|
||||
from bleak_esphome.backend.cache import ESPHomeBluetoothCache
|
||||
|
||||
from homeassistant.components.bluetooth import async_register_scanner
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback as hass_callback
|
||||
@@ -28,10 +27,9 @@ def async_connect_scanner(
|
||||
entry_data: RuntimeEntryData,
|
||||
cli: APIClient,
|
||||
device_info: DeviceInfo,
|
||||
cache: ESPHomeBluetoothCache,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Connect scanner."""
|
||||
client_data = connect_scanner(cli, device_info, cache, entry_data.available)
|
||||
client_data = connect_scanner(cli, device_info, entry_data.available)
|
||||
entry_data.bluetooth_device = client_data.bluetooth_device
|
||||
client_data.disconnect_callbacks = entry_data.disconnect_callbacks
|
||||
scanner = client_data.scanner
|
||||
|
||||
@@ -6,8 +6,6 @@ from dataclasses import dataclass, field
|
||||
from functools import cache
|
||||
from typing import Self
|
||||
|
||||
from bleak_esphome.backend.cache import ESPHomeBluetoothCache
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.json import JSONEncoder
|
||||
|
||||
@@ -22,9 +20,6 @@ class DomainData:
|
||||
"""Define a class that stores global esphome data in hass.data[DOMAIN]."""
|
||||
|
||||
_stores: dict[str, ESPHomeStorage] = field(default_factory=dict)
|
||||
bluetooth_cache: ESPHomeBluetoothCache = field(
|
||||
default_factory=ESPHomeBluetoothCache
|
||||
)
|
||||
|
||||
def get_entry_data(self, entry: ESPHomeConfigEntry) -> RuntimeEntryData:
|
||||
"""Return the runtime entry data associated with this config entry.
|
||||
|
||||
@@ -423,9 +423,7 @@ class ESPHomeManager:
|
||||
|
||||
if device_info.bluetooth_proxy_feature_flags_compat(api_version):
|
||||
entry_data.disconnect_callbacks.add(
|
||||
async_connect_scanner(
|
||||
hass, entry_data, cli, device_info, self.domain_data.bluetooth_cache
|
||||
)
|
||||
async_connect_scanner(hass, entry_data, cli, device_info)
|
||||
)
|
||||
|
||||
if device_info.voice_assistant_feature_flags_compat(api_version) and (
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
"requirements": [
|
||||
"aioesphomeapi==28.0.0",
|
||||
"esphome-dashboard-api==1.2.3",
|
||||
"bleak-esphome==1.1.0"
|
||||
"bleak-esphome==2.0.0"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ from pyezviz.exceptions import PyEzvizError
|
||||
from pyezviz.utils import decrypt_image
|
||||
|
||||
from homeassistant.components.image import Image, ImageEntity, ImageEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.config_entries import SOURCE_IGNORE, ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
@@ -57,7 +57,9 @@ class EzvizLastMotion(EzvizEntity, ImageEntity):
|
||||
)
|
||||
camera = hass.config_entries.async_entry_for_domain_unique_id(DOMAIN, serial)
|
||||
self.alarm_image_password = (
|
||||
camera.data[CONF_PASSWORD] if camera is not None else None
|
||||
camera.data[CONF_PASSWORD]
|
||||
if camera and camera.source != SOURCE_IGNORE
|
||||
else None
|
||||
)
|
||||
|
||||
async def _async_load_image_from_url(self, url: str) -> Image | None:
|
||||
|
||||
@@ -14,6 +14,7 @@ import feedparser
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import dt as dt_util
|
||||
@@ -101,7 +102,11 @@ class FeedReaderCoordinator(
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the feed manager."""
|
||||
feed = await self._async_fetch_feed()
|
||||
try:
|
||||
feed = await self._async_fetch_feed()
|
||||
except UpdateFailed as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
self.logger.debug("Feed data fetched from %s : %s", self.url, feed["feed"])
|
||||
if feed_author := feed["feed"].get("author"):
|
||||
self.feed_author = html.unescape(feed_author)
|
||||
|
||||
@@ -23,10 +23,10 @@ from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.system_info import is_official_image
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.signal_type import SignalType
|
||||
from homeassistant.util.system_info import is_official_image
|
||||
|
||||
DOMAIN = "ffmpeg"
|
||||
|
||||
|
||||
@@ -2,10 +2,11 @@
|
||||
|
||||
from datetime import datetime as dt
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import jwt
|
||||
from pyflick import FlickAPI
|
||||
from pyflick.authentication import AbstractFlickAuth
|
||||
from pyflick.authentication import SimpleFlickAuth
|
||||
from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -20,7 +21,8 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
from .const import CONF_TOKEN_EXPIRY, DOMAIN
|
||||
from .const import CONF_ACCOUNT_ID, CONF_SUPPLY_NODE_REF, CONF_TOKEN_EXPIRY
|
||||
from .coordinator import FlickConfigEntry, FlickElectricDataCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -29,36 +31,85 @@ CONF_ID_TOKEN = "id_token"
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: FlickConfigEntry) -> bool:
|
||||
"""Set up Flick Electric from a config entry."""
|
||||
auth = HassFlickAuth(hass, entry)
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
hass.data[DOMAIN][entry.entry_id] = FlickAPI(auth)
|
||||
coordinator = FlickElectricDataCoordinator(
|
||||
hass, FlickAPI(auth), entry.data[CONF_SUPPLY_NODE_REF]
|
||||
)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: FlickConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
class HassFlickAuth(AbstractFlickAuth):
|
||||
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
_LOGGER.debug(
|
||||
"Migrating configuration from version %s.%s",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
if config_entry.version > 2:
|
||||
return False
|
||||
|
||||
if config_entry.version == 1:
|
||||
api = FlickAPI(HassFlickAuth(hass, config_entry))
|
||||
|
||||
accounts = await api.getCustomerAccounts()
|
||||
active_accounts = [
|
||||
account for account in accounts if account["status"] == "active"
|
||||
]
|
||||
|
||||
# A single active account can be auto-migrated
|
||||
if (len(active_accounts)) == 1:
|
||||
account = active_accounts[0]
|
||||
|
||||
new_data = {**config_entry.data}
|
||||
new_data[CONF_ACCOUNT_ID] = account["id"]
|
||||
new_data[CONF_SUPPLY_NODE_REF] = account["main_consumer"]["supply_node_ref"]
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry,
|
||||
title=account["address"],
|
||||
unique_id=account["id"],
|
||||
data=new_data,
|
||||
version=2,
|
||||
)
|
||||
return True
|
||||
|
||||
config_entry.async_start_reauth(hass, data={**config_entry.data})
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class HassFlickAuth(SimpleFlickAuth):
|
||||
"""Implementation of AbstractFlickAuth based on a Home Assistant entity config."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
def __init__(self, hass: HomeAssistant, entry: FlickConfigEntry) -> None:
|
||||
"""Flick authentication based on a Home Assistant entity config."""
|
||||
super().__init__(aiohttp_client.async_get_clientsession(hass))
|
||||
super().__init__(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
client_id=entry.data.get(CONF_CLIENT_ID, DEFAULT_CLIENT_ID),
|
||||
client_secret=entry.data.get(CONF_CLIENT_SECRET, DEFAULT_CLIENT_SECRET),
|
||||
websession=aiohttp_client.async_get_clientsession(hass),
|
||||
)
|
||||
self._entry = entry
|
||||
self._hass = hass
|
||||
|
||||
async def _get_entry_token(self):
|
||||
async def _get_entry_token(self) -> dict[str, Any]:
|
||||
# No token saved, generate one
|
||||
if (
|
||||
CONF_TOKEN_EXPIRY not in self._entry.data
|
||||
@@ -75,13 +126,8 @@ class HassFlickAuth(AbstractFlickAuth):
|
||||
async def _update_token(self):
|
||||
_LOGGER.debug("Fetching new access token")
|
||||
|
||||
token = await self.get_new_token(
|
||||
username=self._entry.data[CONF_USERNAME],
|
||||
password=self._entry.data[CONF_PASSWORD],
|
||||
client_id=self._entry.data.get(CONF_CLIENT_ID, DEFAULT_CLIENT_ID),
|
||||
client_secret=self._entry.data.get(
|
||||
CONF_CLIENT_SECRET, DEFAULT_CLIENT_SECRET
|
||||
),
|
||||
token = await super().get_new_token(
|
||||
self._username, self._password, self._client_id, self._client_secret
|
||||
)
|
||||
|
||||
_LOGGER.debug("New token: %s", token)
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
"""Config Flow for Flick Electric integration."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyflick.authentication import AuthException, SimpleFlickAuth
|
||||
from aiohttp import ClientResponseError
|
||||
from pyflick import FlickAPI
|
||||
from pyflick.authentication import AbstractFlickAuth, SimpleFlickAuth
|
||||
from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET
|
||||
from pyflick.types import APIException, AuthException, CustomerAccount
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import (
|
||||
CONF_CLIENT_ID,
|
||||
CONF_CLIENT_SECRET,
|
||||
@@ -17,12 +21,18 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import CONF_ACCOUNT_ID, CONF_SUPPLY_NODE_REF, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
LOGIN_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
@@ -35,10 +45,13 @@ DATA_SCHEMA = vol.Schema(
|
||||
class FlickConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Flick config flow."""
|
||||
|
||||
VERSION = 1
|
||||
VERSION = 2
|
||||
auth: AbstractFlickAuth
|
||||
accounts: list[CustomerAccount]
|
||||
data: dict[str, Any]
|
||||
|
||||
async def _validate_input(self, user_input):
|
||||
auth = SimpleFlickAuth(
|
||||
async def _validate_auth(self, user_input: Mapping[str, Any]) -> bool:
|
||||
self.auth = SimpleFlickAuth(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
websession=aiohttp_client.async_get_clientsession(self.hass),
|
||||
@@ -48,22 +61,83 @@ class FlickConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
token = await auth.async_get_access_token()
|
||||
except TimeoutError as err:
|
||||
token = await self.auth.async_get_access_token()
|
||||
except (TimeoutError, ClientResponseError) as err:
|
||||
raise CannotConnect from err
|
||||
except AuthException as err:
|
||||
raise InvalidAuth from err
|
||||
|
||||
return token is not None
|
||||
|
||||
async def async_step_select_account(
|
||||
self, user_input: Mapping[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Ask user to select account."""
|
||||
|
||||
errors = {}
|
||||
if user_input is not None and CONF_ACCOUNT_ID in user_input:
|
||||
self.data[CONF_ACCOUNT_ID] = user_input[CONF_ACCOUNT_ID]
|
||||
self.data[CONF_SUPPLY_NODE_REF] = self._get_supply_node_ref(
|
||||
user_input[CONF_ACCOUNT_ID]
|
||||
)
|
||||
try:
|
||||
# Ensure supply node is active
|
||||
await FlickAPI(self.auth).getPricing(self.data[CONF_SUPPLY_NODE_REF])
|
||||
except (APIException, ClientResponseError):
|
||||
errors["base"] = "cannot_connect"
|
||||
except AuthException:
|
||||
# We should never get here as we have a valid token
|
||||
return self.async_abort(reason="no_permissions")
|
||||
else:
|
||||
# Supply node is active
|
||||
return await self._async_create_entry()
|
||||
|
||||
try:
|
||||
self.accounts = await FlickAPI(self.auth).getCustomerAccounts()
|
||||
except (APIException, ClientResponseError):
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
active_accounts = [a for a in self.accounts if a["status"] == "active"]
|
||||
|
||||
if len(active_accounts) == 0:
|
||||
return self.async_abort(reason="no_accounts")
|
||||
|
||||
if len(active_accounts) == 1:
|
||||
self.data[CONF_ACCOUNT_ID] = active_accounts[0]["id"]
|
||||
self.data[CONF_SUPPLY_NODE_REF] = self._get_supply_node_ref(
|
||||
active_accounts[0]["id"]
|
||||
)
|
||||
|
||||
return await self._async_create_entry()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="select_account",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ACCOUNT_ID): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
SelectOptionDict(
|
||||
value=account["id"], label=account["address"]
|
||||
)
|
||||
for account in active_accounts
|
||||
],
|
||||
mode=SelectSelectorMode.LIST,
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
self, user_input: Mapping[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle gathering login info."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
await self._validate_input(user_input)
|
||||
await self._validate_auth(user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
@@ -72,20 +146,61 @@ class FlickConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(
|
||||
f"flick_electric_{user_input[CONF_USERNAME]}"
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=f"Flick Electric: {user_input[CONF_USERNAME]}",
|
||||
data=user_input,
|
||||
)
|
||||
self.data = dict(user_input)
|
||||
return await self.async_step_select_account(user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors
|
||||
step_id="user", data_schema=LOGIN_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, user_input: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle re-authentication."""
|
||||
|
||||
self.data = {**user_input}
|
||||
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
async def _async_create_entry(self) -> ConfigFlowResult:
|
||||
"""Create an entry for the flow."""
|
||||
|
||||
await self.async_set_unique_id(self.data[CONF_ACCOUNT_ID])
|
||||
|
||||
account = self._get_account(self.data[CONF_ACCOUNT_ID])
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
# Migration completed
|
||||
if self._get_reauth_entry().version == 1:
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self._get_reauth_entry(),
|
||||
unique_id=self.unique_id,
|
||||
data=self.data,
|
||||
version=self.VERSION,
|
||||
)
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
unique_id=self.unique_id,
|
||||
title=account["address"],
|
||||
data=self.data,
|
||||
)
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=account["address"],
|
||||
data=self.data,
|
||||
)
|
||||
|
||||
def _get_account(self, account_id: str) -> CustomerAccount:
|
||||
"""Get the account for the account ID."""
|
||||
return next(a for a in self.accounts if a["id"] == account_id)
|
||||
|
||||
def _get_supply_node_ref(self, account_id: str) -> str:
|
||||
"""Get the supply node ref for the account."""
|
||||
return self._get_account(account_id)["main_consumer"][CONF_SUPPLY_NODE_REF]
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
DOMAIN = "flick_electric"
|
||||
|
||||
CONF_TOKEN_EXPIRY = "expires"
|
||||
CONF_ACCOUNT_ID = "account_id"
|
||||
CONF_SUPPLY_NODE_REF = "supply_node_ref"
|
||||
|
||||
ATTR_START_AT = "start_at"
|
||||
ATTR_END_AT = "end_at"
|
||||
|
||||
47
homeassistant/components/flick_electric/coordinator.py
Normal file
47
homeassistant/components/flick_electric/coordinator.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""Data Coordinator for Flick Electric."""
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from pyflick import FlickAPI, FlickPrice
|
||||
from pyflick.types import APIException, AuthException
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
type FlickConfigEntry = ConfigEntry[FlickElectricDataCoordinator]
|
||||
|
||||
|
||||
class FlickElectricDataCoordinator(DataUpdateCoordinator[FlickPrice]):
|
||||
"""Coordinator for flick power price."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, api: FlickAPI, supply_node_ref: str
|
||||
) -> None:
|
||||
"""Initialize FlickElectricDataCoordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name="Flick Electric",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
self.supply_node_ref = supply_node_ref
|
||||
self._api = api
|
||||
|
||||
async def _async_update_data(self) -> FlickPrice:
|
||||
"""Fetch pricing data from Flick Electric."""
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
return await self._api.getPricing(self.supply_node_ref)
|
||||
except AuthException as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except (APIException, aiohttp.ClientResponseError) as err:
|
||||
raise UpdateFailed from err
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyflick"],
|
||||
"requirements": ["PyFlick==0.0.2"]
|
||||
"requirements": ["PyFlick==1.1.3"]
|
||||
}
|
||||
|
||||
@@ -1,74 +1,72 @@
|
||||
"""Support for Flick Electric Pricing data."""
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from decimal import Decimal
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyflick import FlickAPI, FlickPrice
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CURRENCY_CENT, UnitOfEnergy
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util.dt import utcnow
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import ATTR_COMPONENTS, ATTR_END_AT, ATTR_START_AT, DOMAIN
|
||||
from .const import ATTR_COMPONENTS, ATTR_END_AT, ATTR_START_AT
|
||||
from .coordinator import FlickConfigEntry, FlickElectricDataCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
hass: HomeAssistant,
|
||||
entry: FlickConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Flick Sensor Setup."""
|
||||
api: FlickAPI = hass.data[DOMAIN][entry.entry_id]
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities([FlickPricingSensor(api)], True)
|
||||
async_add_entities([FlickPricingSensor(coordinator)])
|
||||
|
||||
|
||||
class FlickPricingSensor(SensorEntity):
|
||||
class FlickPricingSensor(CoordinatorEntity[FlickElectricDataCoordinator], SensorEntity):
|
||||
"""Entity object for Flick Electric sensor."""
|
||||
|
||||
_attr_attribution = "Data provided by Flick Electric"
|
||||
_attr_native_unit_of_measurement = f"{CURRENCY_CENT}/{UnitOfEnergy.KILO_WATT_HOUR}"
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "power_price"
|
||||
_attributes: dict[str, Any] = {}
|
||||
|
||||
def __init__(self, api: FlickAPI) -> None:
|
||||
def __init__(self, coordinator: FlickElectricDataCoordinator) -> None:
|
||||
"""Entity object for Flick Electric sensor."""
|
||||
self._api: FlickAPI = api
|
||||
self._price: FlickPrice = None
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.supply_node_ref}_pricing"
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
def native_value(self) -> Decimal:
|
||||
"""Return the state of the sensor."""
|
||||
return self._price.price
|
||||
# The API should return a unit price with quantity of 1.0 when no start/end time is provided
|
||||
if self.coordinator.data.quantity != 1:
|
||||
_LOGGER.warning(
|
||||
"Unexpected quantity for unit price: %s", self.coordinator.data
|
||||
)
|
||||
return self.coordinator.data.cost * 100
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the state attributes."""
|
||||
return self._attributes
|
||||
components: dict[str, float] = {}
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the Flick Pricing data from the web service."""
|
||||
if self._price and self._price.end_at >= utcnow():
|
||||
return # Power price data is still valid
|
||||
|
||||
async with asyncio.timeout(60):
|
||||
self._price = await self._api.getPricing()
|
||||
|
||||
_LOGGER.debug("Pricing data: %s", self._price)
|
||||
|
||||
self._attributes[ATTR_START_AT] = self._price.start_at
|
||||
self._attributes[ATTR_END_AT] = self._price.end_at
|
||||
for component in self._price.components:
|
||||
for component in self.coordinator.data.components:
|
||||
if component.charge_setter not in ATTR_COMPONENTS:
|
||||
_LOGGER.warning("Found unknown component: %s", component.charge_setter)
|
||||
continue
|
||||
|
||||
self._attributes[component.charge_setter] = float(component.value)
|
||||
components[component.charge_setter] = float(component.value * 100)
|
||||
|
||||
return {
|
||||
ATTR_START_AT: self.coordinator.data.start_at,
|
||||
ATTR_END_AT: self.coordinator.data.end_at,
|
||||
**components,
|
||||
}
|
||||
|
||||
@@ -9,6 +9,12 @@
|
||||
"client_id": "Client ID (optional)",
|
||||
"client_secret": "Client Secret (optional)"
|
||||
}
|
||||
},
|
||||
"select_account": {
|
||||
"title": "Select account",
|
||||
"data": {
|
||||
"account_id": "Account"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@@ -17,7 +23,10 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"no_permissions": "Cannot get pricing for this account. Please check user permissions.",
|
||||
"no_accounts": "No services are active on this Flick account"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/freebox",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["freebox_api"],
|
||||
"requirements": ["freebox-api==1.2.1"],
|
||||
"requirements": ["freebox-api==1.2.2"],
|
||||
"zeroconf": ["_fbx-api._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -214,6 +214,18 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
||||
self._options = options
|
||||
await self.hass.async_add_executor_job(self.setup)
|
||||
|
||||
device_registry = dr.async_get(self.hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=self.config_entry.entry_id,
|
||||
configuration_url=f"http://{self.host}",
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, self.mac)},
|
||||
identifiers={(DOMAIN, self.unique_id)},
|
||||
manufacturer="AVM",
|
||||
model=self.model,
|
||||
name=self.config_entry.title,
|
||||
sw_version=self.current_firmware,
|
||||
)
|
||||
|
||||
def setup(self) -> None:
|
||||
"""Set up FritzboxTools class."""
|
||||
|
||||
|
||||
@@ -68,23 +68,14 @@ class FritzBoxBaseEntity:
|
||||
"""Init device info class."""
|
||||
self._avm_wrapper = avm_wrapper
|
||||
self._device_name = device_name
|
||||
|
||||
@property
|
||||
def mac_address(self) -> str:
|
||||
"""Return the mac address of the main device."""
|
||||
return self._avm_wrapper.mac
|
||||
self.mac_address = self._avm_wrapper.mac
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return the device information."""
|
||||
return DeviceInfo(
|
||||
configuration_url=f"http://{self._avm_wrapper.host}",
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, self.mac_address)},
|
||||
identifiers={(DOMAIN, self._avm_wrapper.unique_id)},
|
||||
manufacturer="AVM",
|
||||
model=self._avm_wrapper.model,
|
||||
name=self._device_name,
|
||||
sw_version=self._avm_wrapper.current_firmware,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"domain": "frontend",
|
||||
"name": "Home Assistant Frontend",
|
||||
"after_dependencies": ["backup"],
|
||||
"codeowners": ["@home-assistant/frontend"],
|
||||
"dependencies": [
|
||||
"api",
|
||||
@@ -20,5 +21,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20241223.1"]
|
||||
"requirements": ["home-assistant-frontend==20250109.2"]
|
||||
}
|
||||
|
||||
@@ -349,7 +349,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the start of the config flow."""
|
||||
errors = {}
|
||||
description_placeholders = {}
|
||||
hass = self.hass
|
||||
if user_input:
|
||||
# Secondary validation because serialised vol can't seem to handle this complexity:
|
||||
@@ -365,8 +364,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
except InvalidStreamException as err:
|
||||
errors[CONF_STREAM_SOURCE] = str(err)
|
||||
if err.details:
|
||||
errors["error_details"] = err.details
|
||||
self.preview_stream = None
|
||||
if not errors:
|
||||
user_input[CONF_CONTENT_TYPE] = still_format
|
||||
@@ -385,8 +382,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# temporary preview for user to check the image
|
||||
self.preview_cam = user_input
|
||||
return await self.async_step_user_confirm()
|
||||
if "error_details" in errors:
|
||||
description_placeholders["error"] = errors.pop("error_details")
|
||||
elif self.user_input:
|
||||
user_input = self.user_input
|
||||
else:
|
||||
@@ -394,7 +389,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=build_schema(user_input),
|
||||
description_placeholders=description_placeholders,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -412,7 +406,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self.title, data={}, options=self.user_input
|
||||
)
|
||||
register_preview(self.hass)
|
||||
preview_url = f"/api/generic/preview_flow_image/{self.flow_id}?t={datetime.now().isoformat()}"
|
||||
return self.async_show_form(
|
||||
step_id="user_confirm",
|
||||
data_schema=vol.Schema(
|
||||
@@ -420,7 +413,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
vol.Required(CONF_CONFIRMED_OK, default=False): bool,
|
||||
}
|
||||
),
|
||||
description_placeholders={"preview_url": preview_url},
|
||||
errors=None,
|
||||
preview="generic_camera",
|
||||
)
|
||||
@@ -437,6 +429,7 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
def __init__(self) -> None:
|
||||
"""Initialize Generic IP Camera options flow."""
|
||||
self.preview_cam: dict[str, Any] = {}
|
||||
self.preview_stream: Stream | None = None
|
||||
self.user_input: dict[str, Any] = {}
|
||||
|
||||
async def async_step_init(
|
||||
@@ -444,42 +437,45 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage Generic IP Camera options."""
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders = {}
|
||||
hass = self.hass
|
||||
|
||||
if user_input is not None:
|
||||
errors, still_format = await async_test_still(
|
||||
hass, self.config_entry.options | user_input
|
||||
)
|
||||
try:
|
||||
await async_test_and_preview_stream(hass, user_input)
|
||||
except InvalidStreamException as err:
|
||||
errors[CONF_STREAM_SOURCE] = str(err)
|
||||
if err.details:
|
||||
errors["error_details"] = err.details
|
||||
# Stream preview during options flow not yet implemented
|
||||
|
||||
still_url = user_input.get(CONF_STILL_IMAGE_URL)
|
||||
if not errors:
|
||||
if still_url is None:
|
||||
# If user didn't specify a still image URL,
|
||||
# The automatically generated still image that stream generates
|
||||
# is always jpeg
|
||||
still_format = "image/jpeg"
|
||||
data = {
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS: self.config_entry.options.get(
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False
|
||||
),
|
||||
**user_input,
|
||||
CONF_CONTENT_TYPE: still_format
|
||||
or self.config_entry.options.get(CONF_CONTENT_TYPE),
|
||||
}
|
||||
self.user_input = data
|
||||
# temporary preview for user to check the image
|
||||
self.preview_cam = data
|
||||
return await self.async_step_confirm_still()
|
||||
if "error_details" in errors:
|
||||
description_placeholders["error"] = errors.pop("error_details")
|
||||
if user_input:
|
||||
# Secondary validation because serialised vol can't seem to handle this complexity:
|
||||
if not user_input.get(CONF_STILL_IMAGE_URL) and not user_input.get(
|
||||
CONF_STREAM_SOURCE
|
||||
):
|
||||
errors["base"] = "no_still_image_or_stream_url"
|
||||
else:
|
||||
errors, still_format = await async_test_still(hass, user_input)
|
||||
try:
|
||||
self.preview_stream = await async_test_and_preview_stream(
|
||||
hass, user_input
|
||||
)
|
||||
except InvalidStreamException as err:
|
||||
errors[CONF_STREAM_SOURCE] = str(err)
|
||||
self.preview_stream = None
|
||||
if not errors:
|
||||
user_input[CONF_CONTENT_TYPE] = still_format
|
||||
still_url = user_input.get(CONF_STILL_IMAGE_URL)
|
||||
if still_url is None:
|
||||
# If user didn't specify a still image URL,
|
||||
# The automatically generated still image that stream generates
|
||||
# is always jpeg
|
||||
still_format = "image/jpeg"
|
||||
data = {
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS: self.config_entry.options.get(
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False
|
||||
),
|
||||
**user_input,
|
||||
CONF_CONTENT_TYPE: still_format
|
||||
or self.config_entry.options.get(CONF_CONTENT_TYPE),
|
||||
}
|
||||
self.user_input = data
|
||||
# temporary preview for user to check the image
|
||||
self.preview_cam = data
|
||||
return await self.async_step_user_confirm()
|
||||
elif self.user_input:
|
||||
user_input = self.user_input
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=build_schema(
|
||||
@@ -487,15 +483,17 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
True,
|
||||
self.show_advanced_options,
|
||||
),
|
||||
description_placeholders=description_placeholders,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_confirm_still(
|
||||
async def async_step_user_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle user clicking confirm after still preview."""
|
||||
if user_input:
|
||||
if ha_stream := self.preview_stream:
|
||||
# Kill off the temp stream we created.
|
||||
await ha_stream.stop()
|
||||
if not user_input.get(CONF_CONFIRMED_OK):
|
||||
return await self.async_step_init()
|
||||
return self.async_create_entry(
|
||||
@@ -503,18 +501,22 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
data=self.user_input,
|
||||
)
|
||||
register_preview(self.hass)
|
||||
preview_url = f"/api/generic/preview_flow_image/{self.flow_id}?t={datetime.now().isoformat()}"
|
||||
return self.async_show_form(
|
||||
step_id="confirm_still",
|
||||
step_id="user_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_CONFIRMED_OK, default=False): bool,
|
||||
}
|
||||
),
|
||||
description_placeholders={"preview_url": preview_url},
|
||||
errors=None,
|
||||
preview="generic_camera",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def async_setup_preview(hass: HomeAssistant) -> None:
|
||||
"""Set up preview WS API."""
|
||||
websocket_api.async_register_command(hass, ws_start_preview)
|
||||
|
||||
|
||||
class CameraImagePreview(HomeAssistantView):
|
||||
"""Camera view to temporarily serve an image."""
|
||||
@@ -556,7 +558,7 @@ class CameraImagePreview(HomeAssistantView):
|
||||
{
|
||||
vol.Required("type"): "generic_camera/start_preview",
|
||||
vol.Required("flow_id"): str,
|
||||
vol.Optional("flow_type"): vol.Any("config_flow"),
|
||||
vol.Optional("flow_type"): vol.Any("config_flow", "options_flow"),
|
||||
vol.Optional("user_input"): dict,
|
||||
}
|
||||
)
|
||||
@@ -570,10 +572,17 @@ async def ws_start_preview(
|
||||
_LOGGER.debug("Generating websocket handler for generic camera preview")
|
||||
|
||||
flow_id = msg["flow_id"]
|
||||
flow = cast(
|
||||
GenericIPCamConfigFlow,
|
||||
hass.config_entries.flow._progress.get(flow_id), # noqa: SLF001
|
||||
)
|
||||
flow: GenericIPCamConfigFlow | GenericOptionsFlowHandler
|
||||
if msg.get("flow_type", "config_flow") == "config_flow":
|
||||
flow = cast(
|
||||
GenericIPCamConfigFlow,
|
||||
hass.config_entries.flow._progress.get(flow_id), # noqa: SLF001
|
||||
)
|
||||
else: # (flow type == "options flow")
|
||||
flow = cast(
|
||||
GenericOptionsFlowHandler,
|
||||
hass.config_entries.options._progress.get(flow_id), # noqa: SLF001
|
||||
)
|
||||
user_input = flow.preview_cam
|
||||
|
||||
# Create an EntityPlatform, needed for name translations
|
||||
|
||||
@@ -67,17 +67,17 @@
|
||||
"use_wallclock_as_timestamps": "This option may correct segmenting or crashing issues arising from buggy timestamp implementations on some cameras"
|
||||
}
|
||||
},
|
||||
"confirm_still": {
|
||||
"title": "Preview",
|
||||
"description": "",
|
||||
"user_confirm": {
|
||||
"title": "Confirmation",
|
||||
"description": "Please wait for previews to load...",
|
||||
"data": {
|
||||
"confirmed_ok": "This image looks good."
|
||||
"confirmed_ok": "Everything looks good."
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"unknown_with_details": "[%key:common::config_flow::error::unknown_with_details]",
|
||||
"unknown_with_details": "[%key:component::generic::config::error::unknown_with_details%]",
|
||||
"already_exists": "[%key:component::generic::config::error::already_exists%]",
|
||||
"unable_still_load": "[%key:component::generic::config::error::unable_still_load%]",
|
||||
"unable_still_load_auth": "[%key:component::generic::config::error::unable_still_load_auth%]",
|
||||
|
||||
@@ -34,6 +34,18 @@
|
||||
"moderate": "Moderate",
|
||||
"good": "Good",
|
||||
"very_good": "Very good"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"c6h6": {
|
||||
@@ -51,6 +63,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"o3_index": {
|
||||
@@ -62,6 +86,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"pm10_index": {
|
||||
@@ -73,6 +109,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"pm25_index": {
|
||||
@@ -84,6 +132,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"so2_index": {
|
||||
@@ -95,6 +155,18 @@
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,6 +20,10 @@ CONF_GAIN = "gain"
|
||||
CONF_PROFILES = "profiles"
|
||||
CONF_TEXT_TYPE = "text_type"
|
||||
|
||||
DEFAULT_SPEED = 1.0
|
||||
DEFAULT_PITCH = 0
|
||||
DEFAULT_GAIN = 0
|
||||
|
||||
# STT constants
|
||||
CONF_STT_MODEL = "stt_model"
|
||||
|
||||
|
||||
@@ -31,7 +31,10 @@ from .const import (
|
||||
CONF_SPEED,
|
||||
CONF_TEXT_TYPE,
|
||||
CONF_VOICE,
|
||||
DEFAULT_GAIN,
|
||||
DEFAULT_LANG,
|
||||
DEFAULT_PITCH,
|
||||
DEFAULT_SPEED,
|
||||
)
|
||||
|
||||
DEFAULT_VOICE = ""
|
||||
@@ -104,15 +107,15 @@ def tts_options_schema(
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_SPEED,
|
||||
default=defaults.get(CONF_SPEED, 1.0),
|
||||
default=defaults.get(CONF_SPEED, DEFAULT_SPEED),
|
||||
): NumberSelector(NumberSelectorConfig(min=0.25, max=4.0, step=0.01)),
|
||||
vol.Optional(
|
||||
CONF_PITCH,
|
||||
default=defaults.get(CONF_PITCH, 0),
|
||||
default=defaults.get(CONF_PITCH, DEFAULT_PITCH),
|
||||
): NumberSelector(NumberSelectorConfig(min=-20.0, max=20.0, step=0.1)),
|
||||
vol.Optional(
|
||||
CONF_GAIN,
|
||||
default=defaults.get(CONF_GAIN, 0),
|
||||
default=defaults.get(CONF_GAIN, DEFAULT_GAIN),
|
||||
): NumberSelector(NumberSelectorConfig(min=-96.0, max=16.0, step=0.1)),
|
||||
vol.Optional(
|
||||
CONF_PROFILES,
|
||||
|
||||
@@ -35,7 +35,10 @@ from .const import (
|
||||
CONF_SPEED,
|
||||
CONF_TEXT_TYPE,
|
||||
CONF_VOICE,
|
||||
DEFAULT_GAIN,
|
||||
DEFAULT_LANG,
|
||||
DEFAULT_PITCH,
|
||||
DEFAULT_SPEED,
|
||||
DOMAIN,
|
||||
)
|
||||
from .helpers import async_tts_voices, tts_options_schema, tts_platform_schema
|
||||
@@ -191,11 +194,23 @@ class BaseGoogleCloudProvider:
|
||||
ssml_gender=gender,
|
||||
name=voice,
|
||||
),
|
||||
# Avoid: "This voice does not support speaking rate or pitch parameters at this time."
|
||||
# by not specifying the fields unless they differ from the defaults
|
||||
audio_config=texttospeech.AudioConfig(
|
||||
audio_encoding=encoding,
|
||||
speaking_rate=options[CONF_SPEED],
|
||||
pitch=options[CONF_PITCH],
|
||||
volume_gain_db=options[CONF_GAIN],
|
||||
speaking_rate=(
|
||||
options[CONF_SPEED]
|
||||
if options[CONF_SPEED] != DEFAULT_SPEED
|
||||
else None
|
||||
),
|
||||
pitch=(
|
||||
options[CONF_PITCH]
|
||||
if options[CONF_PITCH] != DEFAULT_PITCH
|
||||
else None
|
||||
),
|
||||
volume_gain_db=(
|
||||
options[CONF_GAIN] if options[CONF_GAIN] != DEFAULT_GAIN else None
|
||||
),
|
||||
effects_profile_id=options[CONF_PROFILES],
|
||||
),
|
||||
)
|
||||
|
||||
@@ -204,9 +204,7 @@ class GoogleGenerativeAIConversationEntity(
|
||||
"""Process a sentence."""
|
||||
result = conversation.ConversationResult(
|
||||
response=intent.IntentResponse(language=user_input.language),
|
||||
conversation_id=user_input.conversation_id
|
||||
if user_input.conversation_id in self.history
|
||||
else ulid.ulid_now(),
|
||||
conversation_id=user_input.conversation_id or ulid.ulid_now(),
|
||||
)
|
||||
assert result.conversation_id
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ from google.oauth2.credentials import Credentials
|
||||
from googleapiclient.discovery import Resource, build
|
||||
from googleapiclient.errors import HttpError
|
||||
from googleapiclient.http import BatchHttpRequest, HttpRequest
|
||||
from httplib2 import ServerNotFoundError
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -115,7 +116,7 @@ class AsyncConfigEntryAuth:
|
||||
def response_handler(_, response, exception: HttpError) -> None:
|
||||
if exception is not None:
|
||||
raise GoogleTasksApiError(
|
||||
f"Google Tasks API responded with error ({exception.status_code})"
|
||||
f"Google Tasks API responded with error ({exception.reason or exception.status_code})"
|
||||
) from exception
|
||||
if response:
|
||||
data = json.loads(response)
|
||||
@@ -150,9 +151,9 @@ class AsyncConfigEntryAuth:
|
||||
async def _execute(self, request: HttpRequest | BatchHttpRequest) -> Any:
|
||||
try:
|
||||
result = await self._hass.async_add_executor_job(request.execute)
|
||||
except HttpError as err:
|
||||
except (HttpError, ServerNotFoundError) as err:
|
||||
raise GoogleTasksApiError(
|
||||
f"Google Tasks API responded with error ({err.status_code})"
|
||||
f"Google Tasks API responded with: {err.reason or err.status_code})"
|
||||
) from err
|
||||
if result:
|
||||
_raise_if_error(result)
|
||||
|
||||
1
homeassistant/components/harvey/__init__.py
Normal file
1
homeassistant/components/harvey/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Virtual integration: Harvey."""
|
||||
6
homeassistant/components/harvey/manifest.json
Normal file
6
homeassistant/components/harvey/manifest.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "harvey",
|
||||
"name": "Harvey",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "aquacell"
|
||||
}
|
||||
@@ -10,6 +10,7 @@ from typing import Any, cast
|
||||
|
||||
from aiohasupervisor.exceptions import (
|
||||
SupervisorBadRequestError,
|
||||
SupervisorError,
|
||||
SupervisorNotFoundError,
|
||||
)
|
||||
from aiohasupervisor.models import (
|
||||
@@ -23,8 +24,10 @@ from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupReaderWriter,
|
||||
BackupReaderWriterError,
|
||||
CreateBackupEvent,
|
||||
Folder,
|
||||
IncorrectPasswordError,
|
||||
NewBackup,
|
||||
WrittenBackup,
|
||||
)
|
||||
@@ -213,6 +216,10 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
password: str | None,
|
||||
) -> tuple[NewBackup, asyncio.Task[WrittenBackup]]:
|
||||
"""Create a backup."""
|
||||
if not include_homeassistant and include_database:
|
||||
raise HomeAssistantError(
|
||||
"Cannot create a backup with database but without Home Assistant"
|
||||
)
|
||||
manager = self._hass.data[DATA_MANAGER]
|
||||
|
||||
include_addons_set: supervisor_backups.AddonSet | set[str] | None = None
|
||||
@@ -220,11 +227,12 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
include_addons_set = supervisor_backups.AddonSet.ALL
|
||||
elif include_addons:
|
||||
include_addons_set = set(include_addons)
|
||||
include_folders_set = (
|
||||
{supervisor_backups.Folder(folder) for folder in include_folders}
|
||||
if include_folders
|
||||
else None
|
||||
)
|
||||
include_folders_set = {
|
||||
supervisor_backups.Folder(folder) for folder in include_folders or []
|
||||
}
|
||||
# Always include SSL if Home Assistant is included
|
||||
if include_homeassistant:
|
||||
include_folders_set.add(supervisor_backups.Folder.SSL)
|
||||
|
||||
hassio_agents: list[SupervisorBackupAgent] = [
|
||||
cast(SupervisorBackupAgent, manager.backup_agents[agent_id])
|
||||
@@ -233,20 +241,23 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
]
|
||||
locations = [agent.location for agent in hassio_agents]
|
||||
|
||||
backup = await self._client.backups.partial_backup(
|
||||
supervisor_backups.PartialBackupOptions(
|
||||
addons=include_addons_set,
|
||||
folders=include_folders_set,
|
||||
homeassistant=include_homeassistant,
|
||||
name=backup_name,
|
||||
password=password,
|
||||
compressed=True,
|
||||
location=locations or LOCATION_CLOUD_BACKUP,
|
||||
homeassistant_exclude_database=not include_database,
|
||||
background=True,
|
||||
extra=extra_metadata,
|
||||
try:
|
||||
backup = await self._client.backups.partial_backup(
|
||||
supervisor_backups.PartialBackupOptions(
|
||||
addons=include_addons_set,
|
||||
folders=include_folders_set,
|
||||
homeassistant=include_homeassistant,
|
||||
name=backup_name,
|
||||
password=password,
|
||||
compressed=True,
|
||||
location=locations or LOCATION_CLOUD_BACKUP,
|
||||
homeassistant_exclude_database=not include_database,
|
||||
background=True,
|
||||
extra=extra_metadata,
|
||||
)
|
||||
)
|
||||
)
|
||||
except SupervisorError as err:
|
||||
raise BackupReaderWriterError(f"Error creating backup: {err}") from err
|
||||
backup_task = self._hass.async_create_task(
|
||||
self._async_wait_for_backup(
|
||||
backup, remove_after_upload=not bool(locations)
|
||||
@@ -278,22 +289,35 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
finally:
|
||||
unsub()
|
||||
if not backup_id:
|
||||
raise HomeAssistantError("Backup failed")
|
||||
raise BackupReaderWriterError("Backup failed")
|
||||
|
||||
async def open_backup() -> AsyncIterator[bytes]:
|
||||
return await self._client.backups.download_backup(backup_id)
|
||||
try:
|
||||
return await self._client.backups.download_backup(backup_id)
|
||||
except SupervisorError as err:
|
||||
raise BackupReaderWriterError(
|
||||
f"Error downloading backup: {err}"
|
||||
) from err
|
||||
|
||||
async def remove_backup() -> None:
|
||||
if not remove_after_upload:
|
||||
return
|
||||
await self._client.backups.remove_backup(
|
||||
backup_id,
|
||||
options=supervisor_backups.RemoveBackupOptions(
|
||||
location={LOCATION_CLOUD_BACKUP}
|
||||
),
|
||||
)
|
||||
try:
|
||||
await self._client.backups.remove_backup(
|
||||
backup_id,
|
||||
options=supervisor_backups.RemoveBackupOptions(
|
||||
location={LOCATION_CLOUD_BACKUP}
|
||||
),
|
||||
)
|
||||
except SupervisorError as err:
|
||||
raise BackupReaderWriterError(f"Error removing backup: {err}") from err
|
||||
|
||||
details = await self._client.backups.backup_info(backup_id)
|
||||
try:
|
||||
details = await self._client.backups.backup_info(backup_id)
|
||||
except SupervisorError as err:
|
||||
raise BackupReaderWriterError(
|
||||
f"Error getting backup details: {err}"
|
||||
) from err
|
||||
|
||||
return WrittenBackup(
|
||||
backup=_backup_details_to_agent_backup(details),
|
||||
@@ -359,8 +383,16 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
restore_homeassistant: bool,
|
||||
) -> None:
|
||||
"""Restore a backup."""
|
||||
if restore_homeassistant and not restore_database:
|
||||
raise HomeAssistantError("Cannot restore Home Assistant without database")
|
||||
manager = self._hass.data[DATA_MANAGER]
|
||||
# The backup manager has already checked that the backup exists so we don't need to
|
||||
# check that here.
|
||||
backup = await manager.backup_agents[agent_id].async_get_backup(backup_id)
|
||||
if (
|
||||
backup
|
||||
and restore_homeassistant
|
||||
and restore_database != backup.database_included
|
||||
):
|
||||
raise HomeAssistantError("Restore database must match backup")
|
||||
if not restore_homeassistant and restore_database:
|
||||
raise HomeAssistantError("Cannot restore database without Home Assistant")
|
||||
restore_addons_set = set(restore_addons) if restore_addons else None
|
||||
@@ -370,7 +402,6 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
else None
|
||||
)
|
||||
|
||||
manager = self._hass.data[DATA_MANAGER]
|
||||
restore_location: str | None
|
||||
if manager.backup_agents[agent_id].domain != DOMAIN:
|
||||
# Download the backup to the supervisor. Supervisor will clean up the backup
|
||||
@@ -385,17 +416,24 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
agent = cast(SupervisorBackupAgent, manager.backup_agents[agent_id])
|
||||
restore_location = agent.location
|
||||
|
||||
job = await self._client.backups.partial_restore(
|
||||
backup_id,
|
||||
supervisor_backups.PartialRestoreOptions(
|
||||
addons=restore_addons_set,
|
||||
folders=restore_folders_set,
|
||||
homeassistant=restore_homeassistant,
|
||||
password=password,
|
||||
background=True,
|
||||
location=restore_location,
|
||||
),
|
||||
)
|
||||
try:
|
||||
job = await self._client.backups.partial_restore(
|
||||
backup_id,
|
||||
supervisor_backups.PartialRestoreOptions(
|
||||
addons=restore_addons_set,
|
||||
folders=restore_folders_set,
|
||||
homeassistant=restore_homeassistant,
|
||||
password=password,
|
||||
background=True,
|
||||
location=restore_location,
|
||||
),
|
||||
)
|
||||
except SupervisorBadRequestError as err:
|
||||
# Supervisor currently does not transmit machine parsable error types
|
||||
message = err.args[0]
|
||||
if message.startswith("Invalid password for backup"):
|
||||
raise IncorrectPasswordError(message) from err
|
||||
raise HomeAssistantError(message) from err
|
||||
|
||||
restore_complete = asyncio.Event()
|
||||
|
||||
|
||||
@@ -118,9 +118,7 @@ class HistoryStats:
|
||||
<= current_period_end_timestamp
|
||||
):
|
||||
self._history_current_period.append(
|
||||
HistoryState(
|
||||
new_state.state, new_state.last_changed.timestamp()
|
||||
)
|
||||
HistoryState(new_state.state, new_state.last_changed_timestamp)
|
||||
)
|
||||
new_data = True
|
||||
if not new_data and current_period_end_timestamp < now_timestamp:
|
||||
@@ -131,6 +129,16 @@ class HistoryStats:
|
||||
await self._async_history_from_db(
|
||||
current_period_start_timestamp, current_period_end_timestamp
|
||||
)
|
||||
if event and (new_state := event.data["new_state"]) is not None:
|
||||
if (
|
||||
current_period_start_timestamp
|
||||
<= floored_timestamp(new_state.last_changed)
|
||||
<= current_period_end_timestamp
|
||||
):
|
||||
self._history_current_period.append(
|
||||
HistoryState(new_state.state, new_state.last_changed_timestamp)
|
||||
)
|
||||
|
||||
self._previous_run_before_start = False
|
||||
|
||||
seconds_matched, match_count = self._async_compute_seconds_and_changes(
|
||||
|
||||
@@ -113,12 +113,17 @@ class HiveBinarySensorEntity(HiveEntity, BinarySensorEntity):
|
||||
await self.hive.session.updateData(self.device)
|
||||
self.device = await self.hive.sensor.getSensor(self.device)
|
||||
self.attributes = self.device.get("attributes", {})
|
||||
self._attr_is_on = self.device["status"]["state"]
|
||||
|
||||
if self.device["hiveType"] != "Connectivity":
|
||||
self._attr_available = self.device["deviceData"].get("online")
|
||||
self._attr_available = (
|
||||
self.device["deviceData"].get("online") and "status" in self.device
|
||||
)
|
||||
else:
|
||||
self._attr_available = True
|
||||
|
||||
if self._attr_available:
|
||||
self._attr_is_on = self.device["status"].get("state")
|
||||
|
||||
|
||||
class HiveSensorEntity(HiveEntity, BinarySensorEntity):
|
||||
"""Hive Sensor Entity."""
|
||||
|
||||
@@ -114,6 +114,7 @@ class HiveDeviceLight(HiveEntity, LightEntity):
|
||||
self._attr_hs_color = color_util.color_RGB_to_hs(*rgb)
|
||||
self._attr_color_mode = ColorMode.HS
|
||||
else:
|
||||
color_temp = self.device["status"].get("color_temp")
|
||||
self._attr_color_temp_kelvin = (
|
||||
None
|
||||
if color_temp is None
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
},
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["apyhiveapi"],
|
||||
"requirements": ["pyhiveapi==0.5.16"]
|
||||
"requirements": ["pyhive-integration==1.0.1"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.63", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.65", "babel==2.15.0"]
|
||||
}
|
||||
|
||||
@@ -168,7 +168,7 @@ async def _run_appliance_service[*_Ts](
|
||||
error_translation_placeholders: dict[str, str],
|
||||
) -> None:
|
||||
try:
|
||||
await hass.async_add_executor_job(getattr(appliance, method), args)
|
||||
await hass.async_add_executor_job(getattr(appliance, method), *args)
|
||||
except api.HomeConnectError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
|
||||
@@ -220,7 +220,7 @@ async def async_setup_entry(
|
||||
with contextlib.suppress(HomeConnectError):
|
||||
programs = device.appliance.get_programs_available()
|
||||
if programs:
|
||||
for program in programs:
|
||||
for program in programs.copy():
|
||||
if program not in PROGRAMS_TRANSLATION_KEYS_MAP:
|
||||
programs.remove(program)
|
||||
if program not in programs_not_found:
|
||||
|
||||
@@ -52,6 +52,7 @@ from .const import (
|
||||
PROP_MIN_VALUE,
|
||||
SERV_LIGHTBULB,
|
||||
)
|
||||
from .util import get_min_max
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -120,12 +121,14 @@ class Light(HomeAccessory):
|
||||
self.char_brightness = serv_light.configure_char(CHAR_BRIGHTNESS, value=100)
|
||||
|
||||
if CHAR_COLOR_TEMPERATURE in self.chars:
|
||||
self.min_mireds = color_temperature_kelvin_to_mired(
|
||||
min_mireds = color_temperature_kelvin_to_mired(
|
||||
attributes.get(ATTR_MAX_COLOR_TEMP_KELVIN, DEFAULT_MAX_COLOR_TEMP)
|
||||
)
|
||||
self.max_mireds = color_temperature_kelvin_to_mired(
|
||||
max_mireds = color_temperature_kelvin_to_mired(
|
||||
attributes.get(ATTR_MIN_COLOR_TEMP_KELVIN, DEFAULT_MIN_COLOR_TEMP)
|
||||
)
|
||||
# Ensure min is less than max
|
||||
self.min_mireds, self.max_mireds = get_min_max(min_mireds, max_mireds)
|
||||
if not self.color_temp_supported and not self.rgbww_supported:
|
||||
self.max_mireds = self.min_mireds
|
||||
self.char_color_temp = serv_light.configure_char(
|
||||
@@ -282,7 +285,11 @@ class Light(HomeAccessory):
|
||||
hue, saturation = color_temperature_to_hs(color_temp)
|
||||
elif color_mode == ColorMode.WHITE:
|
||||
hue, saturation = 0, 0
|
||||
elif hue_sat := attributes.get(ATTR_HS_COLOR):
|
||||
elif (
|
||||
(hue_sat := attributes.get(ATTR_HS_COLOR))
|
||||
and isinstance(hue_sat, (list, tuple))
|
||||
and len(hue_sat) == 2
|
||||
):
|
||||
hue, saturation = hue_sat
|
||||
else:
|
||||
hue = None
|
||||
|
||||
@@ -14,6 +14,7 @@ from homeassistant.components.climate import (
|
||||
ATTR_HVAC_ACTION,
|
||||
ATTR_HVAC_MODE,
|
||||
ATTR_HVAC_MODES,
|
||||
ATTR_MAX_HUMIDITY,
|
||||
ATTR_MAX_TEMP,
|
||||
ATTR_MIN_HUMIDITY,
|
||||
ATTR_MIN_TEMP,
|
||||
@@ -21,6 +22,7 @@ from homeassistant.components.climate import (
|
||||
ATTR_SWING_MODES,
|
||||
ATTR_TARGET_TEMP_HIGH,
|
||||
ATTR_TARGET_TEMP_LOW,
|
||||
DEFAULT_MAX_HUMIDITY,
|
||||
DEFAULT_MAX_TEMP,
|
||||
DEFAULT_MIN_HUMIDITY,
|
||||
DEFAULT_MIN_TEMP,
|
||||
@@ -90,7 +92,7 @@ from .const import (
|
||||
SERV_FANV2,
|
||||
SERV_THERMOSTAT,
|
||||
)
|
||||
from .util import temperature_to_homekit, temperature_to_states
|
||||
from .util import get_min_max, temperature_to_homekit, temperature_to_states
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -208,7 +210,10 @@ class Thermostat(HomeAccessory):
|
||||
self.fan_chars: list[str] = []
|
||||
|
||||
attributes = state.attributes
|
||||
min_humidity = attributes.get(ATTR_MIN_HUMIDITY, DEFAULT_MIN_HUMIDITY)
|
||||
min_humidity, _ = get_min_max(
|
||||
attributes.get(ATTR_MIN_HUMIDITY, DEFAULT_MIN_HUMIDITY),
|
||||
attributes.get(ATTR_MAX_HUMIDITY, DEFAULT_MAX_HUMIDITY),
|
||||
)
|
||||
features = attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
|
||||
if features & ClimateEntityFeature.TARGET_TEMPERATURE_RANGE:
|
||||
@@ -839,6 +844,9 @@ def _get_temperature_range_from_state(
|
||||
else:
|
||||
max_temp = default_max
|
||||
|
||||
# Handle reversed temperature range
|
||||
min_temp, max_temp = get_min_max(min_temp, max_temp)
|
||||
|
||||
# Homekit only supports 10-38, overwriting
|
||||
# the max to appears to work, but less than 0 causes
|
||||
# a crash on the home app
|
||||
|
||||
@@ -655,3 +655,14 @@ def state_changed_event_is_same_state(event: Event[EventStateChangedData]) -> bo
|
||||
old_state = event_data["old_state"]
|
||||
new_state = event_data["new_state"]
|
||||
return bool(new_state and old_state and new_state.state == old_state.state)
|
||||
|
||||
|
||||
def get_min_max(value1: float, value2: float) -> tuple[float, float]:
|
||||
"""Return the minimum and maximum of two values.
|
||||
|
||||
HomeKit will go unavailable if the min and max are reversed
|
||||
so we make sure the min is always the min and the max is always the max
|
||||
as any mistakes made in integrations will cause the entire
|
||||
bridge to go unavailable.
|
||||
"""
|
||||
return min(value1, value2), max(value1, value2)
|
||||
|
||||
@@ -12,6 +12,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["homewizard_energy"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-homewizard-energy==v7.0.0"],
|
||||
"requirements": ["python-homewizard-energy==v7.0.1"],
|
||||
"zeroconf": ["_hwenergy._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioautomower"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioautomower==2024.12.0"]
|
||||
"requirements": ["aioautomower==2025.1.0"]
|
||||
}
|
||||
|
||||
@@ -68,7 +68,7 @@ ZONE_BINARY_SENSORS: tuple[HydrawiseBinarySensorEntityDescription, ...] = (
|
||||
)
|
||||
|
||||
SCHEMA_START_WATERING: VolDictType = {
|
||||
vol.Optional("duration"): vol.All(vol.Coerce(int), vol.Range(min=0, max=90)),
|
||||
vol.Optional("duration"): vol.All(vol.Coerce(int), vol.Range(min=0, max=1440)),
|
||||
}
|
||||
SCHEMA_SUSPEND: VolDictType = {
|
||||
vol.Required("until"): cv.datetime,
|
||||
|
||||
@@ -10,7 +10,7 @@ start_watering:
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
max: 90
|
||||
max: 1440
|
||||
unit_of_measurement: min
|
||||
mode: box
|
||||
suspend:
|
||||
|
||||
@@ -385,7 +385,7 @@ class InputDatetime(collection.CollectionEntity, RestoreEntity):
|
||||
@callback
|
||||
def async_set_datetime(self, date=None, time=None, datetime=None, timestamp=None):
|
||||
"""Set a new date / time."""
|
||||
if timestamp:
|
||||
if timestamp is not None:
|
||||
datetime = dt_util.as_local(dt_util.utc_from_timestamp(timestamp))
|
||||
|
||||
if datetime:
|
||||
|
||||
@@ -188,8 +188,8 @@ PINECIL_NUMBER_DESCRIPTIONS: tuple[IronOSNumberEntityDescription, ...] = (
|
||||
characteristic=CharSetting.POWER_LIMIT,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=0,
|
||||
native_max_value=12,
|
||||
native_step=0.1,
|
||||
native_max_value=120,
|
||||
native_step=5,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
entity_registry_enabled_default=False,
|
||||
|
||||
@@ -128,8 +128,8 @@
|
||||
"temp_unit": {
|
||||
"name": "Temperature display unit",
|
||||
"state": {
|
||||
"celsius": "Celsius (C°)",
|
||||
"fahrenheit": "Fahrenheit (F°)"
|
||||
"celsius": "Celsius (°C)",
|
||||
"fahrenheit": "Fahrenheit (°F)"
|
||||
}
|
||||
},
|
||||
"desc_scroll_speed": {
|
||||
|
||||
@@ -6,5 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ituran",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["pyituran==0.1.4"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["keba_kecontact"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["keba-kecontact==1.1.0"]
|
||||
"requirements": ["keba-kecontact==1.3.0"]
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"requirements": [
|
||||
"xknx==3.4.0",
|
||||
"xknxproject==3.8.1",
|
||||
"knx-frontend==2024.11.16.205004"
|
||||
"knx-frontend==2025.1.18.164225"
|
||||
],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -3,23 +3,30 @@
|
||||
"step": {
|
||||
"connection_type": {
|
||||
"title": "KNX connection",
|
||||
"description": "Please enter the connection type we should use for your KNX connection. \n AUTOMATIC - The integration takes care of the connectivity to your KNX Bus by performing a gateway scan. \n TUNNELING - The integration will connect to your KNX bus via tunneling. \n ROUTING - The integration will connect to your KNX bus via routing.",
|
||||
"description": "'Automatic' performs a gateway scan on start, to find a KNX IP interface. It will connect via a tunnel. (Not available if a gateway scan was not successful.) \n\n 'Tunneling' will connect to a specific KNX IP interface over a tunnel. \n\n 'Routing' will use Multicast to communicate with KNX IP routers.",
|
||||
"data": {
|
||||
"connection_type": "KNX Connection Type"
|
||||
},
|
||||
"data_description": {
|
||||
"connection_type": "Please select the connection type you want to use for your KNX connection."
|
||||
}
|
||||
},
|
||||
"tunnel": {
|
||||
"title": "Tunnel",
|
||||
"description": "Please select a gateway from the list.",
|
||||
"data": {
|
||||
"gateway": "KNX Tunnel Connection"
|
||||
"gateway": "Please select a gateway from the list."
|
||||
},
|
||||
"data_description": {
|
||||
"gateway": "Select a KNX tunneling interface you want use for the connection."
|
||||
}
|
||||
},
|
||||
"tcp_tunnel_endpoint": {
|
||||
"title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]",
|
||||
"description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]",
|
||||
"title": "Tunnel endpoint",
|
||||
"data": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]"
|
||||
"tunnel_endpoint_ia": "Select the tunnel endpoint used for the connection."
|
||||
},
|
||||
"data_description": {
|
||||
"tunnel_endpoint_ia": "'Automatic' selects a free tunnel endpoint for you when connecting. If you're unsure, this is the best option."
|
||||
}
|
||||
},
|
||||
"manual_tunnel": {
|
||||
@@ -27,23 +34,24 @@
|
||||
"description": "Please enter the connection information of your tunneling device.",
|
||||
"data": {
|
||||
"tunneling_type": "KNX Tunneling Type",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"route_back": "Route back / NAT mode",
|
||||
"local_ip": "Local IP interface"
|
||||
},
|
||||
"data_description": {
|
||||
"port": "Port of the KNX/IP tunneling device.",
|
||||
"tunneling_type": "Select the tunneling type of your KNX/IP tunneling device. Older interfaces may only support `UDP`.",
|
||||
"host": "IP address or hostname of the KNX/IP tunneling device.",
|
||||
"port": "Port used by the KNX/IP tunneling device.",
|
||||
"route_back": "Enable if your KNXnet/IP tunneling server is behind NAT. Only applies for UDP connections.",
|
||||
"local_ip": "Local IP or interface name used for the connection from Home Assistant. Leave blank to use auto-discovery."
|
||||
}
|
||||
},
|
||||
"secure_key_source_menu_tunnel": {
|
||||
"title": "KNX IP-Secure",
|
||||
"description": "Select how you want to configure KNX/IP Secure.",
|
||||
"description": "How do you want to configure KNX/IP Secure?",
|
||||
"menu_options": {
|
||||
"secure_knxkeys": "Use a `.knxkeys` file containing IP secure keys",
|
||||
"secure_knxkeys": "Use a `.knxkeys` file providing IP secure keys",
|
||||
"secure_tunnel_manual": "Configure IP secure credentials manually"
|
||||
}
|
||||
},
|
||||
@@ -57,20 +65,23 @@
|
||||
},
|
||||
"secure_knxkeys": {
|
||||
"title": "Import KNX Keyring",
|
||||
"description": "Please select a `.knxkeys` file to import.",
|
||||
"description": "The Keyring is used to encrypt and decrypt KNX IP Secure communication.",
|
||||
"data": {
|
||||
"knxkeys_file": "Keyring file",
|
||||
"knxkeys_password": "The password to decrypt the `.knxkeys` file"
|
||||
"knxkeys_password": "Keyring password"
|
||||
},
|
||||
"data_description": {
|
||||
"knxkeys_password": "This was set when exporting the file from ETS."
|
||||
"knxkeys_file": "Select a `.knxkeys` file. This can be exported from ETS.",
|
||||
"knxkeys_password": "The password to open the `.knxkeys` file was set when exporting."
|
||||
}
|
||||
},
|
||||
"knxkeys_tunnel_select": {
|
||||
"title": "Tunnel endpoint",
|
||||
"description": "Select the tunnel endpoint used for the connection.",
|
||||
"title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]",
|
||||
"data": {
|
||||
"user_id": "'Automatic' selects a free tunnel endpoint for you when connecting. If you're unsure, this is the best option."
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]"
|
||||
},
|
||||
"data_description": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]"
|
||||
}
|
||||
},
|
||||
"secure_tunnel_manual": {
|
||||
@@ -82,7 +93,7 @@
|
||||
"device_authentication": "Device authentication password"
|
||||
},
|
||||
"data_description": {
|
||||
"user_id": "This is often tunnel number +1. So 'Tunnel 2' would have User-ID '3'.",
|
||||
"user_id": "This usually is tunnel number +1. So first tunnel in the list presented in ETS would have User-ID `2`.",
|
||||
"user_password": "Password for the specific tunnel connection set in the 'Properties' panel of the tunnel in ETS.",
|
||||
"device_authentication": "This is set in the 'IP' panel of the interface in ETS."
|
||||
}
|
||||
@@ -95,8 +106,8 @@
|
||||
"sync_latency_tolerance": "Network latency tolerance"
|
||||
},
|
||||
"data_description": {
|
||||
"backbone_key": "Can be seen in the 'Security' report of an ETS project. Eg. '00112233445566778899AABBCCDDEEFF'",
|
||||
"sync_latency_tolerance": "Default is 1000."
|
||||
"backbone_key": "Can be seen in the 'Security' report of your ETS project. Eg. `00112233445566778899AABBCCDDEEFF`",
|
||||
"sync_latency_tolerance": "Should be equal to the backbone configuration of your ETS project. Default is `1000`"
|
||||
}
|
||||
},
|
||||
"routing": {
|
||||
@@ -104,13 +115,16 @@
|
||||
"description": "Please configure the routing options.",
|
||||
"data": {
|
||||
"individual_address": "Individual address",
|
||||
"routing_secure": "Use KNX IP Secure",
|
||||
"routing_secure": "KNX IP Secure Routing",
|
||||
"multicast_group": "Multicast group",
|
||||
"multicast_port": "Multicast port",
|
||||
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data::local_ip%]"
|
||||
},
|
||||
"data_description": {
|
||||
"individual_address": "KNX address to be used by Home Assistant, e.g. `0.0.4`",
|
||||
"routing_secure": "Select if your installation uses encrypted communication according to the KNX IP Secure standard. This setting requires compatible devices and configuration. You'll be prompted for credentials in the next step.",
|
||||
"multicast_group": "Multicast group used by your installation. Default is `224.0.23.12`",
|
||||
"multicast_port": "Multicast port used by your installation. Default is `3671`",
|
||||
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data_description::local_ip%]"
|
||||
}
|
||||
}
|
||||
@@ -148,7 +162,7 @@
|
||||
},
|
||||
"data_description": {
|
||||
"state_updater": "Set default for reading states from the KNX Bus. When disabled, Home Assistant will not actively retrieve entity states from the KNX Bus. Can be overridden by `sync_state` entity options.",
|
||||
"rate_limit": "Maximum outgoing telegrams per second.\n`0` to disable limit. Recommended: 0 or 20 to 40",
|
||||
"rate_limit": "Maximum outgoing telegrams per second.\n`0` to disable limit. Recommended: `0` or between `20` and `40`",
|
||||
"telegram_log_size": "Telegrams to keep in memory for KNX panel group monitor. Maximum: {telegram_log_size_max}"
|
||||
}
|
||||
},
|
||||
@@ -157,20 +171,27 @@
|
||||
"description": "[%key:component::knx::config::step::connection_type::description%]",
|
||||
"data": {
|
||||
"connection_type": "[%key:component::knx::config::step::connection_type::data::connection_type%]"
|
||||
},
|
||||
"data_description": {
|
||||
"connection_type": "[%key:component::knx::config::step::connection_type::data_description::connection_type%]"
|
||||
}
|
||||
},
|
||||
"tunnel": {
|
||||
"title": "[%key:component::knx::config::step::tunnel::title%]",
|
||||
"description": "[%key:component::knx::config::step::tunnel::description%]",
|
||||
"data": {
|
||||
"gateway": "[%key:component::knx::config::step::tunnel::data::gateway%]"
|
||||
},
|
||||
"data_description": {
|
||||
"gateway": "[%key:component::knx::config::step::tunnel::data_description::gateway%]"
|
||||
}
|
||||
},
|
||||
"tcp_tunnel_endpoint": {
|
||||
"title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]",
|
||||
"description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]",
|
||||
"title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]",
|
||||
"data": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]"
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]"
|
||||
},
|
||||
"data_description": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]"
|
||||
}
|
||||
},
|
||||
"manual_tunnel": {
|
||||
@@ -184,6 +205,7 @@
|
||||
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data::local_ip%]"
|
||||
},
|
||||
"data_description": {
|
||||
"tunneling_type": "[%key:component::knx::config::step::manual_tunnel::data_description::tunneling_type%]",
|
||||
"port": "[%key:component::knx::config::step::manual_tunnel::data_description::port%]",
|
||||
"host": "[%key:component::knx::config::step::manual_tunnel::data_description::host%]",
|
||||
"route_back": "[%key:component::knx::config::step::manual_tunnel::data_description::route_back%]",
|
||||
@@ -214,14 +236,17 @@
|
||||
"knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data::knxkeys_password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"knxkeys_file": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_file%]",
|
||||
"knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_password%]"
|
||||
}
|
||||
},
|
||||
"knxkeys_tunnel_select": {
|
||||
"title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]",
|
||||
"description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]",
|
||||
"title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]",
|
||||
"data": {
|
||||
"user_id": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]"
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]"
|
||||
},
|
||||
"data_description": {
|
||||
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]"
|
||||
}
|
||||
},
|
||||
"secure_tunnel_manual": {
|
||||
@@ -262,6 +287,9 @@
|
||||
},
|
||||
"data_description": {
|
||||
"individual_address": "[%key:component::knx::config::step::routing::data_description::individual_address%]",
|
||||
"routing_secure": "[%key:component::knx::config::step::routing::data_description::routing_secure%]",
|
||||
"multicast_group": "[%key:component::knx::config::step::routing::data_description::multicast_group%]",
|
||||
"multicast_port": "[%key:component::knx::config::step::routing::data_description::multicast_port%]",
|
||||
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data_description::local_ip%]"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==1.4.2"]
|
||||
"requirements": ["pylamarzocco==1.4.6"]
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["demetriek"],
|
||||
"requirements": ["demetriek==1.1.0"],
|
||||
"requirements": ["demetriek==1.2.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:LaMetric:1"
|
||||
|
||||
@@ -6,7 +6,7 @@ from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from demetriek import Device, LaMetricDevice
|
||||
from demetriek import Device, LaMetricDevice, Range
|
||||
|
||||
from homeassistant.components.number import NumberEntity, NumberEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -25,6 +25,7 @@ class LaMetricNumberEntityDescription(NumberEntityDescription):
|
||||
"""Class describing LaMetric number entities."""
|
||||
|
||||
value_fn: Callable[[Device], int | None]
|
||||
range_fn: Callable[[Device], Range | None]
|
||||
has_fn: Callable[[Device], bool] = lambda device: True
|
||||
set_value_fn: Callable[[LaMetricDevice, float], Awaitable[Any]]
|
||||
|
||||
@@ -33,11 +34,9 @@ NUMBERS = [
|
||||
LaMetricNumberEntityDescription(
|
||||
key="brightness",
|
||||
translation_key="brightness",
|
||||
name="Brightness",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
native_step=1,
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
range_fn=lambda device: device.display.brightness_limit,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda device: device.display.brightness,
|
||||
set_value_fn=lambda device, bri: device.display(brightness=int(bri)),
|
||||
@@ -45,12 +44,11 @@ NUMBERS = [
|
||||
LaMetricNumberEntityDescription(
|
||||
key="volume",
|
||||
translation_key="volume",
|
||||
name="Volume",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
native_step=1,
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
has_fn=lambda device: bool(device.audio),
|
||||
range_fn=lambda device: device.audio.volume_range if device.audio else None,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
has_fn=lambda device: bool(device.audio and device.audio.available),
|
||||
value_fn=lambda device: device.audio.volume if device.audio else 0,
|
||||
set_value_fn=lambda api, volume: api.audio(volume=int(volume)),
|
||||
),
|
||||
@@ -93,6 +91,20 @@ class LaMetricNumberEntity(LaMetricEntity, NumberEntity):
|
||||
"""Return the number value."""
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
||||
|
||||
@property
|
||||
def native_min_value(self) -> int:
|
||||
"""Return the min range."""
|
||||
if limits := self.entity_description.range_fn(self.coordinator.data):
|
||||
return limits.range_min
|
||||
return 0
|
||||
|
||||
@property
|
||||
def native_max_value(self) -> int:
|
||||
"""Return the max range."""
|
||||
if limits := self.entity_description.range_fn(self.coordinator.data):
|
||||
return limits.range_max
|
||||
return 100
|
||||
|
||||
@lametric_exception_handler
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Change to new number value."""
|
||||
|
||||
@@ -66,6 +66,14 @@
|
||||
"name": "Dismiss all notifications"
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"brightness": {
|
||||
"name": "Brightness"
|
||||
},
|
||||
"volume": {
|
||||
"name": "Volume"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"rssi": {
|
||||
"name": "Wi-Fi signal"
|
||||
|
||||
@@ -53,6 +53,6 @@
|
||||
"requirements": [
|
||||
"aiolifx==1.1.2",
|
||||
"aiolifx-effects==0.3.2",
|
||||
"aiolifx-themes==0.5.5"
|
||||
"aiolifx-themes==0.6.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -354,7 +354,7 @@ def filter_turn_off_params(
|
||||
if not params:
|
||||
return params
|
||||
|
||||
supported_features = light.supported_features
|
||||
supported_features = light.supported_features_compat
|
||||
|
||||
if LightEntityFeature.FLASH not in supported_features:
|
||||
params.pop(ATTR_FLASH, None)
|
||||
@@ -366,7 +366,7 @@ def filter_turn_off_params(
|
||||
|
||||
def filter_turn_on_params(light: LightEntity, params: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Filter out params not supported by the light."""
|
||||
supported_features = light.supported_features
|
||||
supported_features = light.supported_features_compat
|
||||
|
||||
if LightEntityFeature.EFFECT not in supported_features:
|
||||
params.pop(ATTR_EFFECT, None)
|
||||
@@ -1093,7 +1093,7 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def capability_attributes(self) -> dict[str, Any]:
|
||||
"""Return capability attributes."""
|
||||
data: dict[str, Any] = {}
|
||||
supported_features = self.supported_features
|
||||
supported_features = self.supported_features_compat
|
||||
supported_color_modes = self._light_internal_supported_color_modes
|
||||
|
||||
if ColorMode.COLOR_TEMP in supported_color_modes:
|
||||
@@ -1255,11 +1255,12 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return state attributes."""
|
||||
data: dict[str, Any] = {}
|
||||
supported_features = self.supported_features
|
||||
supported_features = self.supported_features_compat
|
||||
supported_color_modes = self.supported_color_modes
|
||||
legacy_supported_color_modes = (
|
||||
supported_color_modes or self._light_internal_supported_color_modes
|
||||
)
|
||||
supported_features_value = supported_features.value
|
||||
_is_on = self.is_on
|
||||
color_mode = self._light_internal_color_mode if _is_on else None
|
||||
|
||||
@@ -1278,6 +1279,13 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
data[ATTR_BRIGHTNESS] = self.brightness
|
||||
else:
|
||||
data[ATTR_BRIGHTNESS] = None
|
||||
elif supported_features_value & _DEPRECATED_SUPPORT_BRIGHTNESS.value:
|
||||
# Backwards compatibility for ambiguous / incomplete states
|
||||
# Warning is printed by supported_features_compat, remove in 2025.1
|
||||
if _is_on:
|
||||
data[ATTR_BRIGHTNESS] = self.brightness
|
||||
else:
|
||||
data[ATTR_BRIGHTNESS] = None
|
||||
|
||||
if color_temp_supported(supported_color_modes):
|
||||
if color_mode == ColorMode.COLOR_TEMP:
|
||||
@@ -1292,6 +1300,21 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
else:
|
||||
data[ATTR_COLOR_TEMP_KELVIN] = None
|
||||
data[_DEPRECATED_ATTR_COLOR_TEMP.value] = None
|
||||
elif supported_features_value & _DEPRECATED_SUPPORT_COLOR_TEMP.value:
|
||||
# Backwards compatibility
|
||||
# Warning is printed by supported_features_compat, remove in 2025.1
|
||||
if _is_on:
|
||||
color_temp_kelvin = self.color_temp_kelvin
|
||||
data[ATTR_COLOR_TEMP_KELVIN] = color_temp_kelvin
|
||||
if color_temp_kelvin:
|
||||
data[_DEPRECATED_ATTR_COLOR_TEMP.value] = (
|
||||
color_util.color_temperature_kelvin_to_mired(color_temp_kelvin)
|
||||
)
|
||||
else:
|
||||
data[_DEPRECATED_ATTR_COLOR_TEMP.value] = None
|
||||
else:
|
||||
data[ATTR_COLOR_TEMP_KELVIN] = None
|
||||
data[_DEPRECATED_ATTR_COLOR_TEMP.value] = None
|
||||
|
||||
if color_supported(legacy_supported_color_modes) or color_temp_supported(
|
||||
legacy_supported_color_modes
|
||||
@@ -1329,7 +1352,24 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
type(self),
|
||||
report_issue,
|
||||
)
|
||||
return {ColorMode.ONOFF}
|
||||
supported_features = self.supported_features_compat
|
||||
supported_features_value = supported_features.value
|
||||
supported_color_modes: set[ColorMode] = set()
|
||||
|
||||
if supported_features_value & _DEPRECATED_SUPPORT_COLOR_TEMP.value:
|
||||
supported_color_modes.add(ColorMode.COLOR_TEMP)
|
||||
if supported_features_value & _DEPRECATED_SUPPORT_COLOR.value:
|
||||
supported_color_modes.add(ColorMode.HS)
|
||||
if (
|
||||
not supported_color_modes
|
||||
and supported_features_value & _DEPRECATED_SUPPORT_BRIGHTNESS.value
|
||||
):
|
||||
supported_color_modes = {ColorMode.BRIGHTNESS}
|
||||
|
||||
if not supported_color_modes:
|
||||
supported_color_modes = {ColorMode.ONOFF}
|
||||
|
||||
return supported_color_modes
|
||||
|
||||
@cached_property
|
||||
def supported_color_modes(self) -> set[ColorMode] | set[str] | None:
|
||||
@@ -1341,6 +1381,37 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Flag supported features."""
|
||||
return self._attr_supported_features
|
||||
|
||||
@property
|
||||
def supported_features_compat(self) -> LightEntityFeature:
|
||||
"""Return the supported features as LightEntityFeature.
|
||||
|
||||
Remove this compatibility shim in 2025.1 or later.
|
||||
"""
|
||||
features = self.supported_features
|
||||
if type(features) is not int: # noqa: E721
|
||||
return features
|
||||
new_features = LightEntityFeature(features)
|
||||
if self._deprecated_supported_features_reported is True:
|
||||
return new_features
|
||||
self._deprecated_supported_features_reported = True
|
||||
report_issue = self._suggest_report_issue()
|
||||
report_issue += (
|
||||
" and reference "
|
||||
"https://developers.home-assistant.io/blog/2023/12/28/support-feature-magic-numbers-deprecation"
|
||||
)
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"Entity %s (%s) is using deprecated supported features"
|
||||
" values which will be removed in HA Core 2025.1. Instead it should use"
|
||||
" %s and color modes, please %s"
|
||||
),
|
||||
self.entity_id,
|
||||
type(self),
|
||||
repr(new_features),
|
||||
report_issue,
|
||||
)
|
||||
return new_features
|
||||
|
||||
def __should_report_light_issue(self) -> bool:
|
||||
"""Return if light color mode issues should be reported."""
|
||||
if not self.platform:
|
||||
|
||||
@@ -44,9 +44,15 @@ class LinkPlayBaseEntity(Entity):
|
||||
if model != MANUFACTURER_GENERIC:
|
||||
model_id = bridge.device.properties["project"]
|
||||
|
||||
connections: set[tuple[str, str]] = set()
|
||||
if "MAC" in bridge.device.properties:
|
||||
connections.add(
|
||||
(dr.CONNECTION_NETWORK_MAC, bridge.device.properties["MAC"])
|
||||
)
|
||||
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
configuration_url=bridge.endpoint,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, bridge.device.properties["MAC"])},
|
||||
connections=connections,
|
||||
hw_version=bridge.device.properties["hardware"],
|
||||
identifiers={(DOMAIN, bridge.device.uuid)},
|
||||
manufacturer=manufacturer,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user