forked from home-assistant/core
Compare commits
144 Commits
2023.8.0b2
...
2023.8.3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bdd202b873 | ||
|
|
ce0f957ce4 | ||
|
|
9291fab0b4 | ||
|
|
7c5b1c8cd2 | ||
|
|
1c069539f5 | ||
|
|
8dcb04eab1 | ||
|
|
88352b6ca1 | ||
|
|
1c7e3005aa | ||
|
|
aaefc29e32 | ||
|
|
c81c0149a2 | ||
|
|
a835d07773 | ||
|
|
06d36983fe | ||
|
|
c55b96eb65 | ||
|
|
d44d84b430 | ||
|
|
9bbf855a7d | ||
|
|
a66e308857 | ||
|
|
b76a4e0787 | ||
|
|
a962d2b28d | ||
|
|
69c61a2632 | ||
|
|
9f69ab1604 | ||
|
|
07bb0fc16a | ||
|
|
16d2c80437 | ||
|
|
3da1a611c2 | ||
|
|
0070a5e83d | ||
|
|
d842b2574a | ||
|
|
0daa972217 | ||
|
|
2a0c121f65 | ||
|
|
cf839d0ce4 | ||
|
|
350e5ee9a7 | ||
|
|
73898daff3 | ||
|
|
f641c0ba58 | ||
|
|
c72a184923 | ||
|
|
0ce0050068 | ||
|
|
63b2fa6d1b | ||
|
|
819cea0ea0 | ||
|
|
773ec3adb6 | ||
|
|
1ffbc8118d | ||
|
|
a2dec234c2 | ||
|
|
32d4c2607b | ||
|
|
b584eb757b | ||
|
|
d488bb3dca | ||
|
|
84d185a075 | ||
|
|
a2637ac75a | ||
|
|
8bdac8fb30 | ||
|
|
61cafb26ac | ||
|
|
73f0ac0542 | ||
|
|
4e6c839065 | ||
|
|
dbbe1456a4 | ||
|
|
21f0210abd | ||
|
|
955c1ec215 | ||
|
|
a2227079e4 | ||
|
|
504d164349 | ||
|
|
4b09e3b4a3 | ||
|
|
c9f474a417 | ||
|
|
a276fd444e | ||
|
|
79bb857e06 | ||
|
|
c417b1e611 | ||
|
|
a8c181b7fc | ||
|
|
80a473682e | ||
|
|
2b3bf36364 | ||
|
|
5aa96aa6f6 | ||
|
|
379e144f84 | ||
|
|
60da270372 | ||
|
|
9b090d8c7b | ||
|
|
871ab7a818 | ||
|
|
70730e4170 | ||
|
|
922f3f2816 | ||
|
|
19d6c0c949 | ||
|
|
136b91b539 | ||
|
|
cdc1de08ad | ||
|
|
01fad9b41c | ||
|
|
f7dbe88ee0 | ||
|
|
2e98c3f077 | ||
|
|
a397782ae3 | ||
|
|
489860a28b | ||
|
|
b00fc9660b | ||
|
|
3b48c8a0f5 | ||
|
|
d71f16c8b1 | ||
|
|
9479a59a2f | ||
|
|
39fc557189 | ||
|
|
cdabf76d15 | ||
|
|
9bec7ea93b | ||
|
|
53ee178545 | ||
|
|
79486f1880 | ||
|
|
5283afc116 | ||
|
|
2b876fa485 | ||
|
|
047a210786 | ||
|
|
16e1212572 | ||
|
|
6a9318b90a | ||
|
|
f066b4645a | ||
|
|
0617363f53 | ||
|
|
7a244e30a4 | ||
|
|
694d28ae3b | ||
|
|
12d3bce3a7 | ||
|
|
0a24299e23 | ||
|
|
1945275b7c | ||
|
|
79729043ed | ||
|
|
6a8484b6da | ||
|
|
2f5f15b345 | ||
|
|
a36e746eb4 | ||
|
|
5da94f40ec | ||
|
|
d131b8c80b | ||
|
|
455ac084ec | ||
|
|
62421d9ec3 | ||
|
|
72533ad292 | ||
|
|
c5747dffbc | ||
|
|
20bddec328 | ||
|
|
b6671385bc | ||
|
|
21f878bfa4 | ||
|
|
77eec53f63 | ||
|
|
f325122da5 | ||
|
|
111510b11a | ||
|
|
445aaa0267 | ||
|
|
598dece947 | ||
|
|
f81acc567b | ||
|
|
641b5ee7e4 | ||
|
|
f0e640346f | ||
|
|
14850a23f3 | ||
|
|
d115a372ae | ||
|
|
f7688c5e3b | ||
|
|
80e0bcfaea | ||
|
|
97e28acfc9 | ||
|
|
c3bcffdce7 | ||
|
|
2b26e20528 | ||
|
|
116b026768 | ||
|
|
87c11ca419 | ||
|
|
2f6aea450e | ||
|
|
8261a769a5 | ||
|
|
dfd5c74de0 | ||
|
|
20cf5f0f2c | ||
|
|
f780397c2d | ||
|
|
c600d07a9d | ||
|
|
c412cf9a5e | ||
|
|
d891f1a5eb | ||
|
|
3f22c74ffa | ||
|
|
8355230433 | ||
|
|
ec1b24f8d6 | ||
|
|
e473131a2c | ||
|
|
c99bf90ec7 | ||
|
|
00c1f3d85e | ||
|
|
278f02c86f | ||
|
|
c950abd323 | ||
|
|
da401d5ad6 | ||
|
|
877c30c3a0 |
@@ -8,5 +8,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyairvisual", "pysmb"],
|
||||
"requirements": ["pyairvisual==2022.12.1"]
|
||||
"requirements": ["pyairvisual==2023.08.1"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyairvisual", "pysmb"],
|
||||
"requirements": ["pyairvisual==2022.12.1"]
|
||||
"requirements": ["pyairvisual==2023.08.1"]
|
||||
}
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==0.6.4"]
|
||||
"requirements": ["aioairzone==0.6.5"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aladdin_connect",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aladdin_connect"],
|
||||
"requirements": ["AIOAladdinConnect==0.1.56"]
|
||||
"requirements": ["AIOAladdinConnect==0.1.57"]
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ from homeassistant.const import (
|
||||
STATE_ON,
|
||||
)
|
||||
from homeassistant.core import HassJob, HomeAssistant
|
||||
from homeassistant.exceptions import ServiceNotFound
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
@@ -293,9 +294,15 @@ class Alert(Entity):
|
||||
LOGGER.debug(msg_payload)
|
||||
|
||||
for target in self._notifiers:
|
||||
await self.hass.services.async_call(
|
||||
DOMAIN_NOTIFY, target, msg_payload, context=self._context
|
||||
)
|
||||
try:
|
||||
await self.hass.services.async_call(
|
||||
DOMAIN_NOTIFY, target, msg_payload, context=self._context
|
||||
)
|
||||
except ServiceNotFound:
|
||||
LOGGER.error(
|
||||
"Failed to call notify.%s, retrying at next notification interval",
|
||||
target,
|
||||
)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Async Unacknowledge alert."""
|
||||
|
||||
@@ -58,6 +58,7 @@ class AndroidTVRemoteConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
assert self.host
|
||||
api = create_api(self.hass, self.host, enable_ime=False)
|
||||
try:
|
||||
await api.async_generate_cert_if_missing()
|
||||
self.name, self.mac = await api.async_get_name_and_mac()
|
||||
assert self.mac
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["androidtvremote2"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["androidtvremote2==0.0.12"],
|
||||
"requirements": ["androidtvremote2==0.0.13"],
|
||||
"zeroconf": ["_androidtvremote2._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apple_tv",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyatv", "srptools"],
|
||||
"requirements": ["pyatv==0.13.2"],
|
||||
"requirements": ["pyatv==0.13.4"],
|
||||
"zeroconf": [
|
||||
"_mediaremotetv._tcp.local.",
|
||||
"_companion-link._tcp.local.",
|
||||
|
||||
@@ -19,6 +19,6 @@
|
||||
"bluetooth-adapters==0.16.0",
|
||||
"bluetooth-auto-recovery==1.2.1",
|
||||
"bluetooth-data-tools==1.6.1",
|
||||
"dbus-fast==1.87.5"
|
||||
"dbus-fast==1.90.1"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["bimmer_connected"],
|
||||
"requirements": ["bimmer-connected==0.13.8"]
|
||||
"requirements": ["bimmer-connected==0.13.9"]
|
||||
}
|
||||
|
||||
@@ -45,6 +45,7 @@ class BroadlinkLight(BroadlinkEntity, LightEntity):
|
||||
"""Representation of a Broadlink light."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, device):
|
||||
"""Initialize the light."""
|
||||
|
||||
@@ -15,9 +15,11 @@ from homeassistant.components.sensor import (
|
||||
DOMAIN as SENSOR_DOMAIN,
|
||||
PLATFORM_SCHEMA,
|
||||
STATE_CLASSES_SCHEMA,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.components.sensor.helpers import async_parse_date_datetime
|
||||
from homeassistant.const import (
|
||||
CONF_COMMAND,
|
||||
CONF_DEVICE_CLASS,
|
||||
@@ -205,15 +207,25 @@ class CommandSensor(ManualTriggerEntity, SensorEntity):
|
||||
self._process_manual_data(value)
|
||||
return
|
||||
|
||||
if self._value_template is not None:
|
||||
self._attr_native_value = (
|
||||
self._value_template.async_render_with_possible_json_value(
|
||||
value,
|
||||
None,
|
||||
)
|
||||
self._attr_native_value = None
|
||||
if self._value_template is not None and value is not None:
|
||||
value = self._value_template.async_render_with_possible_json_value(
|
||||
value,
|
||||
None,
|
||||
)
|
||||
else:
|
||||
|
||||
if self.device_class not in {
|
||||
SensorDeviceClass.DATE,
|
||||
SensorDeviceClass.TIMESTAMP,
|
||||
}:
|
||||
self._attr_native_value = value
|
||||
self._process_manual_data(value)
|
||||
return
|
||||
|
||||
if value is not None:
|
||||
self._attr_native_value = async_parse_date_datetime(
|
||||
value, self.entity_id, self.device_class
|
||||
)
|
||||
self._process_manual_data(value)
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==1.2.5", "home-assistant-intents==2023.7.25"]
|
||||
"requirements": ["hassil==1.2.5", "home-assistant-intents==2023.8.2"]
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
{
|
||||
"domain": "duotecno",
|
||||
"name": "duotecno",
|
||||
"name": "Duotecno",
|
||||
"codeowners": ["@cereal2nd"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/duotecno",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["pyduotecno==2023.7.3"]
|
||||
"requirements": ["pyduotecno==2023.8.3"]
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import logging
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.http import HomeAssistantAccessLogger
|
||||
from homeassistant.components.network import async_get_source_ip
|
||||
from homeassistant.const import (
|
||||
CONF_ENTITIES,
|
||||
@@ -100,7 +101,7 @@ async def start_emulated_hue_bridge(
|
||||
config.advertise_port or config.listen_port,
|
||||
)
|
||||
|
||||
runner = web.AppRunner(app)
|
||||
runner = web.AppRunner(app, access_log_class=HomeAssistantAccessLogger)
|
||||
await runner.setup()
|
||||
|
||||
site = web.TCPSite(runner, config.host_ip_addr, config.listen_port)
|
||||
|
||||
@@ -11,7 +11,7 @@ DEFAULT_ALLOW_SERVICE_CALLS = True
|
||||
DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS = False
|
||||
|
||||
|
||||
STABLE_BLE_VERSION_STR = "2023.6.0"
|
||||
STABLE_BLE_VERSION_STR = "2023.8.0"
|
||||
STABLE_BLE_VERSION = AwesomeVersion(STABLE_BLE_VERSION_STR)
|
||||
PROJECT_URLS = {
|
||||
"esphome.bluetooth-proxy": "https://esphome.github.io/bluetooth-proxies/",
|
||||
|
||||
@@ -161,14 +161,29 @@ class EsphomeEntity(Entity, Generic[_InfoT, _StateT]):
|
||||
assert entry_data.device_info is not None
|
||||
device_info = entry_data.device_info
|
||||
self._device_info = device_info
|
||||
if object_id := entity_info.object_id:
|
||||
# Use the object_id to suggest the entity_id
|
||||
self.entity_id = f"{domain}.{device_info.name}_{object_id}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, device_info.mac_address)}
|
||||
)
|
||||
self._entry_id = entry_data.entry_id
|
||||
self._attr_has_entity_name = bool(device_info.friendly_name)
|
||||
#
|
||||
# If `friendly_name` is set, we use the Friendly naming rules, if
|
||||
# `friendly_name` is not set we make an exception to the naming rules for
|
||||
# backwards compatibility and use the Legacy naming rules.
|
||||
#
|
||||
# Friendly naming
|
||||
# - Friendly name is prepended to entity names
|
||||
# - Device Name is prepended to entity ids
|
||||
# - Entity id is constructed from device name and object id
|
||||
#
|
||||
# Legacy naming
|
||||
# - Device name is not prepended to entity names
|
||||
# - Device name is not prepended to entity ids
|
||||
# - Entity id is constructed from entity name
|
||||
#
|
||||
if not device_info.friendly_name:
|
||||
return
|
||||
self._attr_has_entity_name = True
|
||||
self.entity_id = f"{domain}.{device_info.name}_{entity_info.object_id}"
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
|
||||
@@ -54,5 +54,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["flux_led"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["flux-led==1.0.1"]
|
||||
"requirements": ["flux-led==1.0.2"]
|
||||
}
|
||||
|
||||
@@ -18,9 +18,10 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Remove forked-daapd component."""
|
||||
status = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if status and hass.data.get(DOMAIN) and hass.data[DOMAIN].get(entry.entry_id):
|
||||
hass.data[DOMAIN][entry.entry_id][
|
||||
if websocket_handler := hass.data[DOMAIN][entry.entry_id][
|
||||
HASS_DATA_UPDATER_KEY
|
||||
].websocket_handler.cancel()
|
||||
].websocket_handler:
|
||||
websocket_handler.cancel()
|
||||
for remove_listener in hass.data[DOMAIN][entry.entry_id][
|
||||
HASS_DATA_REMOVE_LISTENERS_KEY
|
||||
]:
|
||||
|
||||
@@ -31,6 +31,7 @@ from homeassistant.components.spotify import (
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
@@ -127,10 +128,10 @@ async def async_setup_entry(
|
||||
forked_daapd_updater = ForkedDaapdUpdater(
|
||||
hass, forked_daapd_api, config_entry.entry_id
|
||||
)
|
||||
await forked_daapd_updater.async_init()
|
||||
hass.data[DOMAIN][config_entry.entry_id][
|
||||
HASS_DATA_UPDATER_KEY
|
||||
] = forked_daapd_updater
|
||||
await forked_daapd_updater.async_init()
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
@@ -914,7 +915,8 @@ class ForkedDaapdUpdater:
|
||||
|
||||
async def async_init(self):
|
||||
"""Perform async portion of class initialization."""
|
||||
server_config = await self._api.get_request("config")
|
||||
if not (server_config := await self._api.get_request("config")):
|
||||
raise PlatformNotReady
|
||||
if websocket_port := server_config.get("websocket_port"):
|
||||
self.websocket_handler = asyncio.create_task(
|
||||
self._api.start_websocket_handler(
|
||||
|
||||
@@ -161,10 +161,13 @@ class FreeboxRouter:
|
||||
async def _update_raids_sensors(self) -> None:
|
||||
"""Update Freebox raids."""
|
||||
# None at first request
|
||||
fbx_raids: list[dict[str, Any]] = await self._api.storage.get_raids() or []
|
||||
|
||||
for fbx_raid in fbx_raids:
|
||||
self.raids[fbx_raid["id"]] = fbx_raid
|
||||
try:
|
||||
fbx_raids: list[dict[str, Any]] = await self._api.storage.get_raids() or []
|
||||
except HttpRequestError:
|
||||
_LOGGER.warning("Unable to enumerate raid disks")
|
||||
else:
|
||||
for fbx_raid in fbx_raids:
|
||||
self.raids[fbx_raid["id"]] = fbx_raid
|
||||
|
||||
async def update_home_devices(self) -> None:
|
||||
"""Update Home devices (alarm, light, sensor, switch, remote ...)."""
|
||||
|
||||
@@ -160,6 +160,15 @@ HostAttributes = TypedDict(
|
||||
)
|
||||
|
||||
|
||||
class HostInfo(TypedDict):
|
||||
"""FRITZ!Box host info class."""
|
||||
|
||||
mac: str
|
||||
name: str
|
||||
ip: str
|
||||
status: bool
|
||||
|
||||
|
||||
class UpdateCoordinatorDataType(TypedDict):
|
||||
"""Update coordinator data type."""
|
||||
|
||||
@@ -380,16 +389,86 @@ class FritzBoxTools(
|
||||
"""Event specific per FRITZ!Box entry to signal updates in devices."""
|
||||
return f"{DOMAIN}-device-update-{self._unique_id}"
|
||||
|
||||
async def _async_update_hosts_info(self) -> list[HostAttributes]:
|
||||
"""Retrieve latest hosts information from the FRITZ!Box."""
|
||||
async def _async_get_wan_access(self, ip_address: str) -> bool | None:
|
||||
"""Get WAN access rule for given IP address."""
|
||||
try:
|
||||
return await self.hass.async_add_executor_job(
|
||||
self.fritz_hosts.get_hosts_attributes
|
||||
wan_access = await self.hass.async_add_executor_job(
|
||||
partial(
|
||||
self.connection.call_action,
|
||||
"X_AVM-DE_HostFilter:1",
|
||||
"GetWANAccessByIP",
|
||||
NewIPv4Address=ip_address,
|
||||
)
|
||||
)
|
||||
return not wan_access.get("NewDisallow")
|
||||
except FRITZ_EXCEPTIONS as ex:
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"could not get WAN access rule for client device with IP '%s',"
|
||||
" error: %s"
|
||||
),
|
||||
ip_address,
|
||||
ex,
|
||||
)
|
||||
return None
|
||||
|
||||
async def _async_update_hosts_info(self) -> dict[str, Device]:
|
||||
"""Retrieve latest hosts information from the FRITZ!Box."""
|
||||
hosts_attributes: list[HostAttributes] = []
|
||||
hosts_info: list[HostInfo] = []
|
||||
try:
|
||||
try:
|
||||
hosts_attributes = await self.hass.async_add_executor_job(
|
||||
self.fritz_hosts.get_hosts_attributes
|
||||
)
|
||||
except FritzActionError:
|
||||
hosts_info = await self.hass.async_add_executor_job(
|
||||
self.fritz_hosts.get_hosts_info
|
||||
)
|
||||
except Exception as ex: # pylint: disable=[broad-except]
|
||||
if not self.hass.is_stopping:
|
||||
raise HomeAssistantError("Error refreshing hosts info") from ex
|
||||
return []
|
||||
|
||||
hosts: dict[str, Device] = {}
|
||||
if hosts_attributes:
|
||||
for attributes in hosts_attributes:
|
||||
if not attributes.get("MACAddress"):
|
||||
continue
|
||||
|
||||
if (wan_access := attributes.get("X_AVM-DE_WANAccess")) is not None:
|
||||
wan_access_result = "granted" in wan_access
|
||||
else:
|
||||
wan_access_result = None
|
||||
|
||||
hosts[attributes["MACAddress"]] = Device(
|
||||
name=attributes["HostName"],
|
||||
connected=attributes["Active"],
|
||||
connected_to="",
|
||||
connection_type="",
|
||||
ip_address=attributes["IPAddress"],
|
||||
ssid=None,
|
||||
wan_access=wan_access_result,
|
||||
)
|
||||
else:
|
||||
for info in hosts_info:
|
||||
if not info.get("mac"):
|
||||
continue
|
||||
|
||||
if info["ip"]:
|
||||
wan_access_result = await self._async_get_wan_access(info["ip"])
|
||||
else:
|
||||
wan_access_result = None
|
||||
|
||||
hosts[info["mac"]] = Device(
|
||||
name=info["name"],
|
||||
connected=info["status"],
|
||||
connected_to="",
|
||||
connection_type="",
|
||||
ip_address=info["ip"],
|
||||
ssid=None,
|
||||
wan_access=wan_access_result,
|
||||
)
|
||||
return hosts
|
||||
|
||||
def _update_device_info(self) -> tuple[bool, str | None, str | None]:
|
||||
"""Retrieve latest device information from the FRITZ!Box."""
|
||||
@@ -464,20 +543,7 @@ class FritzBoxTools(
|
||||
consider_home = _default_consider_home
|
||||
|
||||
new_device = False
|
||||
hosts = {}
|
||||
for host in await self._async_update_hosts_info():
|
||||
if not host.get("MACAddress"):
|
||||
continue
|
||||
|
||||
hosts[host["MACAddress"]] = Device(
|
||||
name=host["HostName"],
|
||||
connected=host["Active"],
|
||||
connected_to="",
|
||||
connection_type="",
|
||||
ip_address=host["IPAddress"],
|
||||
ssid=None,
|
||||
wan_access="granted" in host["X_AVM-DE_WANAccess"],
|
||||
)
|
||||
hosts = await self._async_update_hosts_info()
|
||||
|
||||
if not self.fritz_status.device_has_mesh_support or (
|
||||
self._options
|
||||
@@ -579,9 +645,7 @@ class FritzBoxTools(
|
||||
self, config_entry: ConfigEntry | None = None
|
||||
) -> None:
|
||||
"""Trigger device trackers cleanup."""
|
||||
device_hosts_list = await self.hass.async_add_executor_job(
|
||||
self.fritz_hosts.get_hosts_attributes
|
||||
)
|
||||
device_hosts = await self._async_update_hosts_info()
|
||||
entity_reg: er.EntityRegistry = er.async_get(self.hass)
|
||||
|
||||
if config_entry is None:
|
||||
@@ -596,9 +660,9 @@ class FritzBoxTools(
|
||||
|
||||
device_hosts_macs = set()
|
||||
device_hosts_names = set()
|
||||
for device in device_hosts_list:
|
||||
device_hosts_macs.add(device["MACAddress"])
|
||||
device_hosts_names.add(device["HostName"])
|
||||
for mac, device in device_hosts.items():
|
||||
device_hosts_macs.add(mac)
|
||||
device_hosts_names.add(device.name)
|
||||
|
||||
for entry in ha_entity_reg_list:
|
||||
if entry.original_name is None:
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20230725.0"]
|
||||
"requirements": ["home-assistant-frontend==20230802.1"]
|
||||
}
|
||||
|
||||
@@ -62,11 +62,11 @@ DESCRIPTIONS = (
|
||||
GardenaBluetoothNumberEntityDescription(
|
||||
key=DeviceConfiguration.rain_pause.uuid,
|
||||
translation_key="rain_pause",
|
||||
native_unit_of_measurement=UnitOfTime.DAYS,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=0.0,
|
||||
native_max_value=127.0,
|
||||
native_step=1.0,
|
||||
native_max_value=7 * 24 * 60,
|
||||
native_step=6 * 60.0,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
char=DeviceConfiguration.rain_pause,
|
||||
),
|
||||
|
||||
@@ -117,3 +117,8 @@ class GardenaBluetoothRemainSensor(GardenaBluetoothEntity, SensorEntity):
|
||||
self._attr_native_value = time
|
||||
super()._handle_coordinator_update()
|
||||
return
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Sensor only available when open."""
|
||||
return super().available and self._attr_native_value is not None
|
||||
|
||||
@@ -15,7 +15,8 @@
|
||||
"cannot_connect": "Failed to connect: {error}"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
||||
@@ -113,9 +113,10 @@ class GoGoGate2Entity(CoordinatorEntity[DeviceDataUpdateCoordinator]):
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Device info for the controller."""
|
||||
data = self.coordinator.data
|
||||
configuration_url = (
|
||||
f"https://{data.remoteaccess}" if data.remoteaccess else None
|
||||
)
|
||||
if data.remoteaccessenabled:
|
||||
configuration_url = f"https://{data.remoteaccess}"
|
||||
else:
|
||||
configuration_url = f"http://{self._config_entry.data[CONF_IP_ADDRESS]}"
|
||||
return DeviceInfo(
|
||||
configuration_url=configuration_url,
|
||||
identifiers={(DOMAIN, str(self._config_entry.unique_id))},
|
||||
|
||||
@@ -174,7 +174,10 @@ class HistoryStatsSensor(HistoryStatsSensorBase):
|
||||
return
|
||||
|
||||
if self._type == CONF_TYPE_TIME:
|
||||
self._attr_native_value = state.seconds_matched / 3600
|
||||
value = state.seconds_matched / 3600
|
||||
if self._attr_unique_id is None:
|
||||
value = round(value, 2)
|
||||
self._attr_native_value = value
|
||||
elif self._type == CONF_TYPE_RATIO:
|
||||
self._attr_native_value = pretty_ratio(state.seconds_matched, state.period)
|
||||
elif self._type == CONF_TYPE_COUNT:
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyhap"],
|
||||
"requirements": [
|
||||
"HAP-python==4.7.0",
|
||||
"HAP-python==4.7.1",
|
||||
"fnv-hash-fast==0.4.0",
|
||||
"PyQRCode==1.2.1",
|
||||
"base36==0.1.1"
|
||||
|
||||
@@ -80,11 +80,11 @@ class TriggerSource:
|
||||
self._iid_trigger_keys.setdefault(iid, set()).add(trigger_key)
|
||||
await connection.add_watchable_characteristics([(aid, iid)])
|
||||
|
||||
def fire(self, iid: int, value: dict[str, Any]) -> None:
|
||||
def fire(self, iid: int, ev: dict[str, Any]) -> None:
|
||||
"""Process events that have been received from a HomeKit accessory."""
|
||||
for trigger_key in self._iid_trigger_keys.get(iid, set()):
|
||||
for event_handler in self._callbacks.get(trigger_key, []):
|
||||
event_handler(value)
|
||||
event_handler(ev)
|
||||
|
||||
def async_get_triggers(self) -> Generator[tuple[str, str], None, None]:
|
||||
"""List device triggers for HomeKit devices."""
|
||||
@@ -99,20 +99,23 @@ class TriggerSource:
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach a trigger."""
|
||||
trigger_data = trigger_info["trigger_data"]
|
||||
trigger_key = (config[CONF_TYPE], config[CONF_SUBTYPE])
|
||||
type_: str = config[CONF_TYPE]
|
||||
sub_type: str = config[CONF_SUBTYPE]
|
||||
trigger_key = (type_, sub_type)
|
||||
job = HassJob(action)
|
||||
trigger_callbacks = self._callbacks.setdefault(trigger_key, [])
|
||||
hass = self._hass
|
||||
|
||||
@callback
|
||||
def event_handler(char: dict[str, Any]) -> None:
|
||||
if config[CONF_SUBTYPE] != HK_TO_HA_INPUT_EVENT_VALUES[char["value"]]:
|
||||
def event_handler(ev: dict[str, Any]) -> None:
|
||||
if sub_type != HK_TO_HA_INPUT_EVENT_VALUES[ev["value"]]:
|
||||
return
|
||||
self._hass.async_run_hass_job(job, {"trigger": {**trigger_data, **config}})
|
||||
hass.async_run_hass_job(job, {"trigger": {**trigger_data, **config}})
|
||||
|
||||
self._callbacks.setdefault(trigger_key, []).append(event_handler)
|
||||
trigger_callbacks.append(event_handler)
|
||||
|
||||
def async_remove_handler():
|
||||
if trigger_key in self._callbacks:
|
||||
self._callbacks[trigger_key].remove(event_handler)
|
||||
trigger_callbacks.remove(event_handler)
|
||||
|
||||
return async_remove_handler
|
||||
|
||||
@@ -262,7 +265,10 @@ def async_fire_triggers(conn: HKDevice, events: dict[tuple[int, int], dict[str,
|
||||
if aid in conn.devices:
|
||||
device_id = conn.devices[aid]
|
||||
if source := trigger_sources.get(device_id):
|
||||
source.fire(iid, ev)
|
||||
# If the value is None, we received the event via polling
|
||||
# and we don't want to trigger on that
|
||||
if ev.get("value") is not None:
|
||||
source.fire(iid, ev)
|
||||
|
||||
|
||||
async def async_get_triggers(
|
||||
|
||||
@@ -5,6 +5,7 @@ from typing import Any
|
||||
|
||||
from aiohomekit.model import Accessory
|
||||
from aiohomekit.model.characteristics import (
|
||||
EVENT_CHARACTERISTICS,
|
||||
Characteristic,
|
||||
CharacteristicPermissions,
|
||||
CharacteristicsTypes,
|
||||
@@ -111,7 +112,10 @@ class HomeKitEntity(Entity):
|
||||
def _setup_characteristic(self, char: Characteristic) -> None:
|
||||
"""Configure an entity based on a HomeKit characteristics metadata."""
|
||||
# Build up a list of (aid, iid) tuples to poll on update()
|
||||
if CharacteristicPermissions.paired_read in char.perms:
|
||||
if (
|
||||
CharacteristicPermissions.paired_read in char.perms
|
||||
and char.type not in EVENT_CHARACTERISTICS
|
||||
):
|
||||
self.pollable_characteristics.append((self._aid, char.iid))
|
||||
|
||||
# Build up a list of (aid, iid) tuples to subscribe to
|
||||
|
||||
@@ -14,6 +14,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohomekit", "commentjson"],
|
||||
"requirements": ["aiohomekit==2.6.12"],
|
||||
"requirements": ["aiohomekit==2.6.16"],
|
||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
||||
}
|
||||
|
||||
@@ -146,13 +146,13 @@ class HoneywellUSThermostat(ClimateEntity):
|
||||
| ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
||||
)
|
||||
|
||||
if device._data["canControlHumidification"]:
|
||||
if device._data.get("canControlHumidification"):
|
||||
self._attr_supported_features |= ClimateEntityFeature.TARGET_HUMIDITY
|
||||
|
||||
if device.raw_ui_data["SwitchEmergencyHeatAllowed"]:
|
||||
if device.raw_ui_data.get("SwitchEmergencyHeatAllowed"):
|
||||
self._attr_supported_features |= ClimateEntityFeature.AUX_HEAT
|
||||
|
||||
if not device._data["hasFan"]:
|
||||
if not device._data.get("hasFan"):
|
||||
return
|
||||
|
||||
# not all honeywell fans support all modes
|
||||
|
||||
@@ -20,6 +20,7 @@ from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from . import HoneywellData
|
||||
from .const import DOMAIN, HUMIDITY_STATUS_KEY, TEMPERATURE_STATUS_KEY
|
||||
|
||||
|
||||
@@ -71,7 +72,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Honeywell thermostat."""
|
||||
data = hass.data[DOMAIN][config_entry.entry_id]
|
||||
data: HoneywellData = hass.data[DOMAIN][config_entry.entry_id]
|
||||
sensors = []
|
||||
|
||||
for device in data.devices.values():
|
||||
|
||||
@@ -89,6 +89,7 @@ class HueLight(HueBaseEntity, LightEntity):
|
||||
self._supported_color_modes.add(ColorMode.BRIGHTNESS)
|
||||
# support transition if brightness control
|
||||
self._attr_supported_features |= LightEntityFeature.TRANSITION
|
||||
self._color_temp_active: bool = False
|
||||
# get list of supported effects (combine effects and timed_effects)
|
||||
self._attr_effect_list = []
|
||||
if effects := resource.effects:
|
||||
@@ -121,10 +122,8 @@ class HueLight(HueBaseEntity, LightEntity):
|
||||
@property
|
||||
def color_mode(self) -> ColorMode:
|
||||
"""Return the color mode of the light."""
|
||||
if color_temp := self.resource.color_temperature:
|
||||
# Hue lights return `mired_valid` to indicate CT is active
|
||||
if color_temp.mirek is not None:
|
||||
return ColorMode.COLOR_TEMP
|
||||
if self.color_temp_active:
|
||||
return ColorMode.COLOR_TEMP
|
||||
if self.resource.supports_color:
|
||||
return ColorMode.XY
|
||||
if self.resource.supports_dimming:
|
||||
@@ -132,6 +131,18 @@ class HueLight(HueBaseEntity, LightEntity):
|
||||
# fallback to on_off
|
||||
return ColorMode.ONOFF
|
||||
|
||||
@property
|
||||
def color_temp_active(self) -> bool:
|
||||
"""Return if the light is in Color Temperature mode."""
|
||||
color_temp = self.resource.color_temperature
|
||||
if color_temp is None or color_temp.mirek is None:
|
||||
return False
|
||||
# Official Hue lights return `mirek_valid` to indicate CT is active
|
||||
# while non-official lights do not.
|
||||
if self.device.product_data.certified:
|
||||
return self.resource.color_temperature.mirek_valid
|
||||
return self._color_temp_active
|
||||
|
||||
@property
|
||||
def xy_color(self) -> tuple[float, float] | None:
|
||||
"""Return the xy color."""
|
||||
@@ -193,6 +204,7 @@ class HueLight(HueBaseEntity, LightEntity):
|
||||
xy_color = kwargs.get(ATTR_XY_COLOR)
|
||||
color_temp = normalize_hue_colortemp(kwargs.get(ATTR_COLOR_TEMP))
|
||||
brightness = normalize_hue_brightness(kwargs.get(ATTR_BRIGHTNESS))
|
||||
self._color_temp_active = color_temp is not None
|
||||
flash = kwargs.get(ATTR_FLASH)
|
||||
effect = effect_str = kwargs.get(ATTR_EFFECT)
|
||||
if effect_str in (EFFECT_NONE, EFFECT_NONE.lower()):
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["deepmerge", "pyipp"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyipp==0.14.2"],
|
||||
"requirements": ["pyipp==0.14.3"],
|
||||
"zeroconf": ["_ipps._tcp.local.", "_ipp._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -138,7 +138,7 @@ class MicroBotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await self._client.connect(init=True)
|
||||
return self.async_show_form(step_id="link")
|
||||
|
||||
if not self._client.is_connected():
|
||||
if not await self._client.is_connected():
|
||||
errors["base"] = "linking"
|
||||
else:
|
||||
await self._client.disconnect()
|
||||
|
||||
@@ -342,10 +342,13 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(
|
||||
entry,
|
||||
[
|
||||
platform
|
||||
for platform in SUPPORTED_PLATFORMS
|
||||
if platform in hass.data[DATA_KNX_CONFIG]
|
||||
and platform is not Platform.NOTIFY
|
||||
Platform.SENSOR, # always unload system entities (telegram counter, etc.)
|
||||
*[
|
||||
platform
|
||||
for platform in SUPPORTED_PLATFORMS
|
||||
if platform in hass.data[DATA_KNX_CONFIG]
|
||||
and platform not in (Platform.SENSOR, Platform.NOTIFY)
|
||||
],
|
||||
],
|
||||
)
|
||||
if unload_ok:
|
||||
|
||||
@@ -111,7 +111,7 @@ class PlenticoreDataSelect(
|
||||
self.platform_name = platform_name
|
||||
self.module_id = description.module_id
|
||||
self.data_id = description.key
|
||||
self._device_info = device_info
|
||||
self._attr_device_info = device_info
|
||||
self._attr_unique_id = f"{entry_id}_{description.module_id}"
|
||||
|
||||
@property
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/life360",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["life360"],
|
||||
"requirements": ["life360==5.5.0"]
|
||||
"requirements": ["life360==6.0.0"]
|
||||
}
|
||||
|
||||
@@ -218,10 +218,12 @@ def parse_api_response(response):
|
||||
for authority in AUTHORITIES:
|
||||
for entry in response["HourlyAirQualityIndex"]["LocalAuthority"]:
|
||||
if entry["@LocalAuthorityName"] == authority:
|
||||
if isinstance(entry["Site"], dict):
|
||||
entry_sites_data = [entry["Site"]]
|
||||
else:
|
||||
entry_sites_data = entry["Site"]
|
||||
entry_sites_data = []
|
||||
if "Site" in entry:
|
||||
if isinstance(entry["Site"], dict):
|
||||
entry_sites_data = [entry["Site"]]
|
||||
else:
|
||||
entry_sites_data = entry["Site"]
|
||||
|
||||
data[authority] = parse_site(entry_sites_data)
|
||||
|
||||
|
||||
@@ -21,7 +21,12 @@ from homeassistant.components.climate import (
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_TEMPERATURE
|
||||
from homeassistant.const import (
|
||||
ATTR_TEMPERATURE,
|
||||
PRECISION_HALVES,
|
||||
PRECISION_WHOLE,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_platform
|
||||
@@ -113,7 +118,6 @@ async def async_setup_entry(
|
||||
),
|
||||
location,
|
||||
device,
|
||||
hass.config.units.temperature_unit,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -140,10 +144,15 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity):
|
||||
description: ClimateEntityDescription,
|
||||
location: LyricLocation,
|
||||
device: LyricDevice,
|
||||
temperature_unit: str,
|
||||
) -> None:
|
||||
"""Initialize Honeywell Lyric climate entity."""
|
||||
self._temperature_unit = temperature_unit
|
||||
# Use the native temperature unit from the device settings
|
||||
if device.units == "Fahrenheit":
|
||||
self._attr_temperature_unit = UnitOfTemperature.FAHRENHEIT
|
||||
self._attr_precision = PRECISION_WHOLE
|
||||
else:
|
||||
self._attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
self._attr_precision = PRECISION_HALVES
|
||||
|
||||
# Setup supported hvac modes
|
||||
self._attr_hvac_modes = [HVACMode.OFF]
|
||||
@@ -176,11 +185,6 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity):
|
||||
return SUPPORT_FLAGS_LCC
|
||||
return SUPPORT_FLAGS_TCC
|
||||
|
||||
@property
|
||||
def temperature_unit(self) -> str:
|
||||
"""Return the unit of measurement."""
|
||||
return self._temperature_unit
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
|
||||
@@ -17,7 +17,7 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import PERCENTAGE
|
||||
from homeassistant.const import PERCENTAGE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
@@ -76,6 +76,11 @@ async def async_setup_entry(
|
||||
for location in coordinator.data.locations:
|
||||
for device in location.devices:
|
||||
if device.indoorTemperature:
|
||||
if device.units == "Fahrenheit":
|
||||
native_temperature_unit = UnitOfTemperature.FAHRENHEIT
|
||||
else:
|
||||
native_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
entities.append(
|
||||
LyricSensor(
|
||||
coordinator,
|
||||
@@ -84,7 +89,7 @@ async def async_setup_entry(
|
||||
name="Indoor Temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=hass.config.units.temperature_unit,
|
||||
native_unit_of_measurement=native_temperature_unit,
|
||||
value=lambda device: device.indoorTemperature,
|
||||
),
|
||||
location,
|
||||
@@ -108,6 +113,11 @@ async def async_setup_entry(
|
||||
)
|
||||
)
|
||||
if device.outdoorTemperature:
|
||||
if device.units == "Fahrenheit":
|
||||
native_temperature_unit = UnitOfTemperature.FAHRENHEIT
|
||||
else:
|
||||
native_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
entities.append(
|
||||
LyricSensor(
|
||||
coordinator,
|
||||
@@ -116,7 +126,7 @@ async def async_setup_entry(
|
||||
name="Outdoor Temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=hass.config.units.temperature_unit,
|
||||
native_unit_of_measurement=native_temperature_unit,
|
||||
value=lambda device: device.outdoorTemperature,
|
||||
),
|
||||
location,
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pymazda"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pymazda==0.3.10"]
|
||||
"requirements": ["pymazda==0.3.11"]
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
"fields": {
|
||||
"position": {
|
||||
"name": "Position",
|
||||
"description": "Horizontal vane position. Possible options can be found in the vane_horizontal_positions state attribute.\n."
|
||||
"description": "Horizontal vane position. Possible options can be found in the vane_horizontal_positions state attribute."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -36,7 +36,7 @@
|
||||
"fields": {
|
||||
"position": {
|
||||
"name": "Position",
|
||||
"description": "Vertical vane position. Possible options can be found in the vane_vertical_positions state attribute.\n."
|
||||
"description": "Vertical vane position. Possible options can be found in the vane_vertical_positions state attribute."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,14 +51,12 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="name",
|
||||
name="Station name",
|
||||
device_class=None,
|
||||
icon="mdi:label-outline",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="weather",
|
||||
name="Weather",
|
||||
device_class=None,
|
||||
icon="mdi:weather-sunny", # but will adapt to current conditions
|
||||
entity_registry_enabled_default=True,
|
||||
),
|
||||
@@ -107,7 +105,6 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="visibility",
|
||||
name="Visibility",
|
||||
device_class=None,
|
||||
icon="mdi:eye",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
@@ -115,14 +112,12 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
key="visibility_distance",
|
||||
name="Visibility distance",
|
||||
native_unit_of_measurement=UnitOfLength.KILOMETERS,
|
||||
device_class=SensorDeviceClass.DISTANCE,
|
||||
icon="mdi:eye",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="uv",
|
||||
name="UV index",
|
||||
device_class=None,
|
||||
native_unit_of_measurement=UV_INDEX,
|
||||
icon="mdi:weather-sunny-alert",
|
||||
entity_registry_enabled_default=True,
|
||||
@@ -130,7 +125,6 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="precipitation",
|
||||
name="Probability of precipitation",
|
||||
device_class=None,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
icon="mdi:weather-rainy",
|
||||
entity_registry_enabled_default=True,
|
||||
|
||||
@@ -48,10 +48,12 @@ from .const import (
|
||||
CONF_MIN_VALUE,
|
||||
CONF_PRECISION,
|
||||
CONF_SCALE,
|
||||
CONF_SLAVE_COUNT,
|
||||
CONF_STATE_OFF,
|
||||
CONF_STATE_ON,
|
||||
CONF_SWAP,
|
||||
CONF_SWAP_BYTE,
|
||||
CONF_SWAP_NONE,
|
||||
CONF_SWAP_WORD,
|
||||
CONF_SWAP_WORD_BYTE,
|
||||
CONF_VERIFY,
|
||||
@@ -155,15 +157,25 @@ class BaseStructPlatform(BasePlatform, RestoreEntity):
|
||||
"""Initialize the switch."""
|
||||
super().__init__(hub, config)
|
||||
self._swap = config[CONF_SWAP]
|
||||
if self._swap == CONF_SWAP_NONE:
|
||||
self._swap = None
|
||||
self._data_type = config[CONF_DATA_TYPE]
|
||||
self._structure: str = config[CONF_STRUCTURE]
|
||||
self._precision = config[CONF_PRECISION]
|
||||
self._scale = config[CONF_SCALE]
|
||||
self._offset = config[CONF_OFFSET]
|
||||
self._count = config[CONF_COUNT]
|
||||
self._slave_count = config.get(CONF_SLAVE_COUNT, 0)
|
||||
self._slave_size = self._count = config[CONF_COUNT]
|
||||
|
||||
def _swap_registers(self, registers: list[int]) -> list[int]:
|
||||
def _swap_registers(self, registers: list[int], slave_count: int) -> list[int]:
|
||||
"""Do swap as needed."""
|
||||
if slave_count:
|
||||
swapped = []
|
||||
for i in range(0, self._slave_count + 1):
|
||||
inx = i * self._slave_size
|
||||
inx2 = inx + self._slave_size
|
||||
swapped.extend(self._swap_registers(registers[inx:inx2], 0))
|
||||
return swapped
|
||||
if self._swap in (CONF_SWAP_BYTE, CONF_SWAP_WORD_BYTE):
|
||||
# convert [12][34] --> [21][43]
|
||||
for i, register in enumerate(registers):
|
||||
@@ -191,7 +203,8 @@ class BaseStructPlatform(BasePlatform, RestoreEntity):
|
||||
def unpack_structure_result(self, registers: list[int]) -> str | None:
|
||||
"""Convert registers to proper result."""
|
||||
|
||||
registers = self._swap_registers(registers)
|
||||
if self._swap:
|
||||
registers = self._swap_registers(registers, self._slave_count)
|
||||
byte_string = b"".join([x.to_bytes(2, byteorder="big") for x in registers])
|
||||
if self._data_type == DataType.STRING:
|
||||
return byte_string.decode()
|
||||
|
||||
@@ -206,7 +206,7 @@ class ModbusThermostat(BaseStructPlatform, RestoreEntity, ClimateEntity):
|
||||
int.from_bytes(as_bytes[i : i + 2], "big")
|
||||
for i in range(0, len(as_bytes), 2)
|
||||
]
|
||||
registers = self._swap_registers(raw_regs)
|
||||
registers = self._swap_registers(raw_regs, 0)
|
||||
|
||||
if self._data_type in (
|
||||
DataType.INT16,
|
||||
|
||||
@@ -49,7 +49,7 @@ async def async_setup_platform(
|
||||
hub = get_hub(hass, discovery_info[CONF_NAME])
|
||||
for entry in discovery_info[CONF_SENSORS]:
|
||||
slave_count = entry.get(CONF_SLAVE_COUNT, 0)
|
||||
sensor = ModbusRegisterSensor(hub, entry)
|
||||
sensor = ModbusRegisterSensor(hub, entry, slave_count)
|
||||
if slave_count > 0:
|
||||
sensors.extend(await sensor.async_setup_slaves(hass, slave_count, entry))
|
||||
sensors.append(sensor)
|
||||
@@ -63,9 +63,12 @@ class ModbusRegisterSensor(BaseStructPlatform, RestoreSensor, SensorEntity):
|
||||
self,
|
||||
hub: ModbusHub,
|
||||
entry: dict[str, Any],
|
||||
slave_count: int,
|
||||
) -> None:
|
||||
"""Initialize the modbus register sensor."""
|
||||
super().__init__(hub, entry)
|
||||
if slave_count:
|
||||
self._count = self._count * (slave_count + 1)
|
||||
self._coordinator: DataUpdateCoordinator[list[int] | None] | None = None
|
||||
self._attr_native_unit_of_measurement = entry.get(CONF_UNIT_OF_MEASUREMENT)
|
||||
self._attr_state_class = entry.get(CONF_STATE_CLASS)
|
||||
@@ -129,10 +132,7 @@ class ModbusRegisterSensor(BaseStructPlatform, RestoreSensor, SensorEntity):
|
||||
self._coordinator.async_set_updated_data(None)
|
||||
else:
|
||||
self._attr_native_value = result
|
||||
if self._attr_native_value is None:
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = True
|
||||
self._attr_available = self._attr_native_value is not None
|
||||
self._lazy_errors = self._lazy_error_count
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -65,25 +65,25 @@ def struct_validator(config: dict[str, Any]) -> dict[str, Any]:
|
||||
name = config[CONF_NAME]
|
||||
structure = config.get(CONF_STRUCTURE)
|
||||
slave_count = config.get(CONF_SLAVE_COUNT, 0) + 1
|
||||
swap_type = config.get(CONF_SWAP)
|
||||
slave = config.get(CONF_SLAVE, 0)
|
||||
swap_type = config.get(CONF_SWAP, CONF_SWAP_NONE)
|
||||
if (
|
||||
slave_count > 1
|
||||
and count > 1
|
||||
and data_type not in (DataType.CUSTOM, DataType.STRING)
|
||||
):
|
||||
error = f"{name} {CONF_COUNT} cannot be mixed with {data_type}"
|
||||
raise vol.Invalid(error)
|
||||
if config[CONF_DATA_TYPE] != DataType.CUSTOM:
|
||||
if structure:
|
||||
error = f"{name} structure: cannot be mixed with {data_type}"
|
||||
raise vol.Invalid(error)
|
||||
if data_type not in DEFAULT_STRUCT_FORMAT:
|
||||
error = f"Error in sensor {name}. data_type `{data_type}` not supported"
|
||||
raise vol.Invalid(error)
|
||||
|
||||
structure = f">{DEFAULT_STRUCT_FORMAT[data_type].struct_id}"
|
||||
if CONF_COUNT not in config:
|
||||
config[CONF_COUNT] = DEFAULT_STRUCT_FORMAT[data_type].register_count
|
||||
if slave_count > 1:
|
||||
structure = f">{slave_count}{DEFAULT_STRUCT_FORMAT[data_type].struct_id}"
|
||||
else:
|
||||
structure = f">{DEFAULT_STRUCT_FORMAT[data_type].struct_id}"
|
||||
else:
|
||||
if slave_count > 1:
|
||||
error = f"{name} structure: cannot be mixed with {CONF_SLAVE_COUNT}"
|
||||
if config[CONF_DATA_TYPE] == DataType.CUSTOM:
|
||||
if slave or slave_count > 1:
|
||||
error = f"{name}: `{CONF_STRUCTURE}` illegal with `{CONF_SLAVE_COUNT}` / `{CONF_SLAVE}`"
|
||||
raise vol.Invalid(error)
|
||||
if swap_type != CONF_SWAP_NONE:
|
||||
error = f"{name}: `{CONF_STRUCTURE}` illegal with `{CONF_SWAP}`"
|
||||
raise vol.Invalid(error)
|
||||
if not structure:
|
||||
error = (
|
||||
@@ -102,19 +102,37 @@ def struct_validator(config: dict[str, Any]) -> dict[str, Any]:
|
||||
f"Structure request {size} bytes, "
|
||||
f"but {count} registers have a size of {bytecount} bytes"
|
||||
)
|
||||
return {
|
||||
**config,
|
||||
CONF_STRUCTURE: structure,
|
||||
CONF_SWAP: swap_type,
|
||||
}
|
||||
if data_type not in DEFAULT_STRUCT_FORMAT:
|
||||
error = f"Error in sensor {name}. data_type `{data_type}` not supported"
|
||||
raise vol.Invalid(error)
|
||||
if slave_count > 1 and data_type == DataType.STRING:
|
||||
error = f"{name}: `{data_type}` illegal with `{CONF_SLAVE_COUNT}`"
|
||||
raise vol.Invalid(error)
|
||||
|
||||
if swap_type != CONF_SWAP_NONE:
|
||||
if swap_type == CONF_SWAP_BYTE:
|
||||
regs_needed = 1
|
||||
else: # CONF_SWAP_WORD_BYTE, CONF_SWAP_WORD
|
||||
regs_needed = 2
|
||||
if count < regs_needed or (count % regs_needed) != 0:
|
||||
raise vol.Invalid(
|
||||
f"Error in sensor {name} swap({swap_type}) "
|
||||
"not possible due to the registers "
|
||||
f"count: {count}, needed: {regs_needed}"
|
||||
)
|
||||
|
||||
if CONF_COUNT not in config:
|
||||
config[CONF_COUNT] = DEFAULT_STRUCT_FORMAT[data_type].register_count
|
||||
if swap_type != CONF_SWAP_NONE:
|
||||
if swap_type == CONF_SWAP_BYTE:
|
||||
regs_needed = 1
|
||||
else: # CONF_SWAP_WORD_BYTE, CONF_SWAP_WORD
|
||||
regs_needed = 2
|
||||
count = config[CONF_COUNT]
|
||||
if count < regs_needed or (count % regs_needed) != 0:
|
||||
raise vol.Invalid(
|
||||
f"Error in sensor {name} swap({swap_type}) "
|
||||
"not possible due to the registers "
|
||||
f"count: {count}, needed: {regs_needed}"
|
||||
)
|
||||
structure = f">{DEFAULT_STRUCT_FORMAT[data_type].struct_id}"
|
||||
if slave_count > 1:
|
||||
structure = f">{slave_count}{DEFAULT_STRUCT_FORMAT[data_type].struct_id}"
|
||||
else:
|
||||
structure = f">{DEFAULT_STRUCT_FORMAT[data_type].struct_id}"
|
||||
return {
|
||||
**config,
|
||||
CONF_STRUCTURE: structure,
|
||||
|
||||
@@ -36,6 +36,7 @@ from homeassistant.core import (
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util import dt as dt_util
|
||||
@@ -64,6 +65,7 @@ from .const import (
|
||||
DEFAULT_WILL,
|
||||
DEFAULT_WS_HEADERS,
|
||||
DEFAULT_WS_PATH,
|
||||
DOMAIN,
|
||||
MQTT_CONNECTED,
|
||||
MQTT_DISCONNECTED,
|
||||
PROTOCOL_5,
|
||||
@@ -93,6 +95,10 @@ SUBSCRIBE_COOLDOWN = 0.1
|
||||
UNSUBSCRIBE_COOLDOWN = 0.1
|
||||
TIMEOUT_ACK = 10
|
||||
|
||||
MQTT_ENTRIES_NAMING_BLOG_URL = (
|
||||
"https://developers.home-assistant.io/blog/2023-057-21-change-naming-mqtt-entities/"
|
||||
)
|
||||
|
||||
SubscribePayloadType = str | bytes # Only bytes if encoding is None
|
||||
|
||||
|
||||
@@ -404,6 +410,7 @@ class MQTT:
|
||||
|
||||
@callback
|
||||
def ha_started(_: Event) -> None:
|
||||
self.register_naming_issues()
|
||||
self._ha_started.set()
|
||||
|
||||
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, ha_started)
|
||||
@@ -416,6 +423,25 @@ class MQTT:
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_stop_mqtt)
|
||||
)
|
||||
|
||||
def register_naming_issues(self) -> None:
|
||||
"""Register issues with MQTT entity naming."""
|
||||
mqtt_data = get_mqtt_data(self.hass)
|
||||
for issue_key, items in mqtt_data.issues.items():
|
||||
config_list = "\n".join([f"- {item}" for item in items])
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
issue_key,
|
||||
breaks_in_ha_version="2024.2.0",
|
||||
is_fixable=False,
|
||||
translation_key=issue_key,
|
||||
translation_placeholders={
|
||||
"config": config_list,
|
||||
},
|
||||
learn_more_url=MQTT_ENTRIES_NAMING_BLOG_URL,
|
||||
severity=IssueSeverity.WARNING,
|
||||
)
|
||||
|
||||
def start(
|
||||
self,
|
||||
mqtt_data: MqttData,
|
||||
|
||||
@@ -1014,6 +1014,7 @@ class MqttEntity(
|
||||
_attr_should_poll = False
|
||||
_default_name: str | None
|
||||
_entity_id_format: str
|
||||
_issue_key: str | None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -1027,6 +1028,7 @@ class MqttEntity(
|
||||
self._config: ConfigType = config
|
||||
self._attr_unique_id = config.get(CONF_UNIQUE_ID)
|
||||
self._sub_state: dict[str, EntitySubscription] = {}
|
||||
self._discovery = discovery_data is not None
|
||||
|
||||
# Load config
|
||||
self._setup_from_config(self._config)
|
||||
@@ -1050,6 +1052,7 @@ class MqttEntity(
|
||||
@final
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to MQTT events."""
|
||||
self.collect_issues()
|
||||
await super().async_added_to_hass()
|
||||
self._prepare_subscribe_topics()
|
||||
await self._subscribe_topics()
|
||||
@@ -1122,6 +1125,7 @@ class MqttEntity(
|
||||
|
||||
def _set_entity_name(self, config: ConfigType) -> None:
|
||||
"""Help setting the entity name if needed."""
|
||||
self._issue_key = None
|
||||
entity_name: str | None | UndefinedType = config.get(CONF_NAME, UNDEFINED)
|
||||
# Only set _attr_name if it is needed
|
||||
if entity_name is not UNDEFINED:
|
||||
@@ -1130,6 +1134,7 @@ class MqttEntity(
|
||||
# Assign the default name
|
||||
self._attr_name = self._default_name
|
||||
if CONF_DEVICE in config:
|
||||
device_name: str
|
||||
if CONF_NAME not in config[CONF_DEVICE]:
|
||||
_LOGGER.info(
|
||||
"MQTT device information always needs to include a name, got %s, "
|
||||
@@ -1137,14 +1142,47 @@ class MqttEntity(
|
||||
"name must be included in each entity's device configuration",
|
||||
config,
|
||||
)
|
||||
elif config[CONF_DEVICE][CONF_NAME] == entity_name:
|
||||
elif (device_name := config[CONF_DEVICE][CONF_NAME]) == entity_name:
|
||||
self._attr_name = None
|
||||
self._issue_key = (
|
||||
"entity_name_is_device_name_discovery"
|
||||
if self._discovery
|
||||
else "entity_name_is_device_name_yaml"
|
||||
)
|
||||
_LOGGER.warning(
|
||||
"MQTT device name is equal to entity name in your config %s, "
|
||||
"this is not expected. Please correct your configuration. "
|
||||
"The entity name will be set to `null`",
|
||||
config,
|
||||
)
|
||||
self._attr_name = None
|
||||
elif isinstance(entity_name, str) and entity_name.startswith(device_name):
|
||||
self._attr_name = (
|
||||
new_entity_name := entity_name[len(device_name) :].lstrip()
|
||||
)
|
||||
if device_name[:1].isupper():
|
||||
# Ensure a capital if the device name first char is a capital
|
||||
new_entity_name = new_entity_name[:1].upper() + new_entity_name[1:]
|
||||
self._issue_key = (
|
||||
"entity_name_startswith_device_name_discovery"
|
||||
if self._discovery
|
||||
else "entity_name_startswith_device_name_yaml"
|
||||
)
|
||||
_LOGGER.warning(
|
||||
"MQTT entity name starts with the device name in your config %s, "
|
||||
"this is not expected. Please correct your configuration. "
|
||||
"The device name prefix will be stripped off the entity name "
|
||||
"and becomes '%s'",
|
||||
config,
|
||||
new_entity_name,
|
||||
)
|
||||
|
||||
def collect_issues(self) -> None:
|
||||
"""Process issues for MQTT entities."""
|
||||
if self._issue_key is None:
|
||||
return
|
||||
mqtt_data = get_mqtt_data(self.hass)
|
||||
issues = mqtt_data.issues.setdefault(self._issue_key, set())
|
||||
issues.add(self.entity_id)
|
||||
|
||||
def _setup_common_attributes_from_config(self, config: ConfigType) -> None:
|
||||
"""(Re)Setup the common attributes for the entity."""
|
||||
|
||||
@@ -305,6 +305,7 @@ class MqttData:
|
||||
)
|
||||
discovery_unsubscribe: list[CALLBACK_TYPE] = field(default_factory=list)
|
||||
integration_unsubscribe: dict[str, CALLBACK_TYPE] = field(default_factory=dict)
|
||||
issues: dict[str, set[str]] = field(default_factory=dict)
|
||||
last_discovery: float = 0.0
|
||||
reload_dispatchers: list[CALLBACK_TYPE] = field(default_factory=list)
|
||||
reload_handlers: dict[str, Callable[[], Coroutine[Any, Any, None]]] = field(
|
||||
|
||||
@@ -7,6 +7,22 @@
|
||||
"deprecation_mqtt_legacy_vacuum_discovery": {
|
||||
"title": "MQTT vacuum entities with legacy schema added through MQTT discovery",
|
||||
"description": "MQTT vacuum entities that use the legacy schema are deprecated, please adjust your devices to use the correct schema and restart Home Assistant to fix this issue."
|
||||
},
|
||||
"entity_name_is_device_name_yaml": {
|
||||
"title": "Manual configured MQTT entities with a name that is equal to the device name",
|
||||
"description": "Some MQTT entities have an entity name equal to the device name. This is not expected. The entity name is set to `null` as a work-a-round to avoid a duplicate name. Please update your configuration and restart Home Assistant to fix this issue.\n\nList of affected entities:\n\n{config}"
|
||||
},
|
||||
"entity_name_startswith_device_name_yaml": {
|
||||
"title": "Manual configured MQTT entities with a name that starts with the device name",
|
||||
"description": "Some MQTT entities have an entity name that starts with the device name. This is not expected. To avoid a duplicate name the device name prefix is stripped of the entity name as a work-a-round. Please update your configuration and restart Home Assistant to fix this issue. \n\nList of affected entities:\n\n{config}"
|
||||
},
|
||||
"entity_name_is_device_name_discovery": {
|
||||
"title": "Discovered MQTT entities with a name that is equal to the device name",
|
||||
"description": "Some MQTT entities have an entity name equal to the device name. This is not expected. The entity name is set to `null` as a work-a-round to avoid a duplicate name. Please inform the maintainer of the software application that supplies the affected entities to fix this issue.\n\nList of affected entities:\n\n{config}"
|
||||
},
|
||||
"entity_name_startswith_device_name_discovery": {
|
||||
"title": "Discovered entities with a name that starts with the device name",
|
||||
"description": "Some MQTT entities have an entity name that starts with the device name. This is not expected. To avoid a duplicate name the device name prefix is stripped of the entity name as a work-a-round. Please inform the maintainer of the software application that supplies the affected entities to fix this issue. \n\nList of affected entities:\n\n{config}"
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
|
||||
@@ -3,7 +3,7 @@ from collections import namedtuple
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
from nessclient import ArmingState, Client
|
||||
from nessclient import ArmingMode, ArmingState, Client
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
@@ -136,9 +136,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass, SIGNAL_ZONE_CHANGED, ZoneChangedData(zone_id=zone_id, state=state)
|
||||
)
|
||||
|
||||
def on_state_change(arming_state: ArmingState):
|
||||
def on_state_change(arming_state: ArmingState, arming_mode: ArmingMode | None):
|
||||
"""Receives and propagates arming state updates."""
|
||||
async_dispatcher_send(hass, SIGNAL_ARMING_STATE_CHANGED, arming_state)
|
||||
async_dispatcher_send(
|
||||
hass, SIGNAL_ARMING_STATE_CHANGED, arming_state, arming_mode
|
||||
)
|
||||
|
||||
client.on_zone_change(on_zone_change)
|
||||
client.on_state_change(on_state_change)
|
||||
|
||||
@@ -3,12 +3,15 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from nessclient import ArmingState, Client
|
||||
from nessclient import ArmingMode, ArmingState, Client
|
||||
|
||||
import homeassistant.components.alarm_control_panel as alarm
|
||||
from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature
|
||||
from homeassistant.const import (
|
||||
STATE_ALARM_ARMED_AWAY,
|
||||
STATE_ALARM_ARMED_HOME,
|
||||
STATE_ALARM_ARMED_NIGHT,
|
||||
STATE_ALARM_ARMED_VACATION,
|
||||
STATE_ALARM_ARMING,
|
||||
STATE_ALARM_DISARMED,
|
||||
STATE_ALARM_PENDING,
|
||||
@@ -23,6 +26,15 @@ from . import DATA_NESS, SIGNAL_ARMING_STATE_CHANGED
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ARMING_MODE_TO_STATE = {
|
||||
ArmingMode.ARMED_AWAY: STATE_ALARM_ARMED_AWAY,
|
||||
ArmingMode.ARMED_HOME: STATE_ALARM_ARMED_HOME,
|
||||
ArmingMode.ARMED_DAY: STATE_ALARM_ARMED_AWAY, # no applicable state, fallback to away
|
||||
ArmingMode.ARMED_NIGHT: STATE_ALARM_ARMED_NIGHT,
|
||||
ArmingMode.ARMED_VACATION: STATE_ALARM_ARMED_VACATION,
|
||||
ArmingMode.ARMED_HIGHEST: STATE_ALARM_ARMED_AWAY, # no applicable state, fallback to away
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
@@ -79,7 +91,9 @@ class NessAlarmPanel(alarm.AlarmControlPanelEntity):
|
||||
await self._client.panic(code)
|
||||
|
||||
@callback
|
||||
def _handle_arming_state_change(self, arming_state: ArmingState) -> None:
|
||||
def _handle_arming_state_change(
|
||||
self, arming_state: ArmingState, arming_mode: ArmingMode | None
|
||||
) -> None:
|
||||
"""Handle arming state update."""
|
||||
|
||||
if arming_state == ArmingState.UNKNOWN:
|
||||
@@ -91,7 +105,9 @@ class NessAlarmPanel(alarm.AlarmControlPanelEntity):
|
||||
elif arming_state == ArmingState.EXIT_DELAY:
|
||||
self._attr_state = STATE_ALARM_ARMING
|
||||
elif arming_state == ArmingState.ARMED:
|
||||
self._attr_state = STATE_ALARM_ARMED_AWAY
|
||||
self._attr_state = ARMING_MODE_TO_STATE.get(
|
||||
arming_mode, STATE_ALARM_ARMED_AWAY
|
||||
)
|
||||
elif arming_state == ArmingState.ENTRY_DELAY:
|
||||
self._attr_state = STATE_ALARM_PENDING
|
||||
elif arming_state == ArmingState.TRIGGERED:
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ness_alarm",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["nessclient"],
|
||||
"requirements": ["nessclient==0.10.0"]
|
||||
"requirements": ["nessclient==1.0.0"]
|
||||
}
|
||||
|
||||
@@ -62,6 +62,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
|
||||
configuration_url = None
|
||||
if host := entry.data[CONF_HOST]:
|
||||
configuration_url = f"http://{host}/"
|
||||
|
||||
assert entry.unique_id
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
@@ -72,7 +76,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
model=router.model,
|
||||
sw_version=router.firmware_version,
|
||||
hw_version=router.hardware_version,
|
||||
configuration_url=f"http://{entry.data[CONF_HOST]}/",
|
||||
configuration_url=configuration_url,
|
||||
)
|
||||
|
||||
async def async_update_devices() -> bool:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nina",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pynina"],
|
||||
"requirements": ["PyNINA==0.3.1"]
|
||||
"requirements": ["PyNINA==0.3.2"]
|
||||
}
|
||||
|
||||
@@ -26,7 +26,6 @@ CONF_STATION = "station"
|
||||
ATTRIBUTION = "Data from National Weather Service/NOAA"
|
||||
|
||||
ATTR_FORECAST_DETAILED_DESCRIPTION = "detailed_description"
|
||||
ATTR_FORECAST_DAYTIME = "daytime"
|
||||
|
||||
CONDITION_CLASSES: dict[str, list[str]] = {
|
||||
ATTR_CONDITION_EXCEPTIONAL: [
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["metar", "pynws"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pynws==1.5.0"]
|
||||
"requirements": ["pynws==1.5.1"]
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ from homeassistant.components.weather import (
|
||||
ATTR_CONDITION_SUNNY,
|
||||
ATTR_FORECAST_CONDITION,
|
||||
ATTR_FORECAST_HUMIDITY,
|
||||
ATTR_FORECAST_IS_DAYTIME,
|
||||
ATTR_FORECAST_NATIVE_DEW_POINT,
|
||||
ATTR_FORECAST_NATIVE_TEMP,
|
||||
ATTR_FORECAST_NATIVE_WIND_SPEED,
|
||||
@@ -36,7 +37,6 @@ from homeassistant.util.unit_system import UnitSystem
|
||||
|
||||
from . import base_unique_id, device_info
|
||||
from .const import (
|
||||
ATTR_FORECAST_DAYTIME,
|
||||
ATTR_FORECAST_DETAILED_DESCRIPTION,
|
||||
ATTRIBUTION,
|
||||
CONDITION_CLASSES,
|
||||
@@ -101,7 +101,6 @@ if TYPE_CHECKING:
|
||||
"""Forecast with extra fields needed for NWS."""
|
||||
|
||||
detailed_description: str | None
|
||||
daytime: bool | None
|
||||
|
||||
|
||||
class NWSWeather(WeatherEntity):
|
||||
@@ -268,7 +267,7 @@ class NWSWeather(WeatherEntity):
|
||||
data[ATTR_FORECAST_HUMIDITY] = forecast_entry.get("relativeHumidity")
|
||||
|
||||
if self.mode == DAYNIGHT:
|
||||
data[ATTR_FORECAST_DAYTIME] = forecast_entry.get("isDaytime")
|
||||
data[ATTR_FORECAST_IS_DAYTIME] = forecast_entry.get("isDaytime")
|
||||
|
||||
time = forecast_entry.get("iconTime")
|
||||
weather = forecast_entry.get("iconWeather")
|
||||
|
||||
@@ -12,6 +12,7 @@ from opower import (
|
||||
InvalidAuth,
|
||||
MeterType,
|
||||
Opower,
|
||||
ReadResolution,
|
||||
)
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
@@ -69,12 +70,12 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
|
||||
raise ConfigEntryAuthFailed from err
|
||||
forecasts: list[Forecast] = await self.api.async_get_forecast()
|
||||
_LOGGER.debug("Updating sensor data with: %s", forecasts)
|
||||
await self._insert_statistics([forecast.account for forecast in forecasts])
|
||||
await self._insert_statistics()
|
||||
return {forecast.account.utility_account_id: forecast for forecast in forecasts}
|
||||
|
||||
async def _insert_statistics(self, accounts: list[Account]) -> None:
|
||||
async def _insert_statistics(self) -> None:
|
||||
"""Insert Opower statistics."""
|
||||
for account in accounts:
|
||||
for account in await self.api.async_get_accounts():
|
||||
id_prefix = "_".join(
|
||||
(
|
||||
self.api.utility.subdomain(),
|
||||
@@ -177,44 +178,55 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
|
||||
"""Get all cost reads since account activation but at different resolutions depending on age.
|
||||
|
||||
- month resolution for all years (since account activation)
|
||||
- day resolution for past 3 years
|
||||
- hour resolution for past 2 months, only for electricity, not gas
|
||||
- day resolution for past 3 years (if account's read resolution supports it)
|
||||
- hour resolution for past 2 months (if account's read resolution supports it)
|
||||
"""
|
||||
cost_reads = []
|
||||
|
||||
start = None
|
||||
end = datetime.now() - timedelta(days=3 * 365)
|
||||
end = datetime.now()
|
||||
if account.read_resolution != ReadResolution.BILLING:
|
||||
end -= timedelta(days=3 * 365)
|
||||
cost_reads += await self.api.async_get_cost_reads(
|
||||
account, AggregateType.BILL, start, end
|
||||
)
|
||||
if account.read_resolution == ReadResolution.BILLING:
|
||||
return cost_reads
|
||||
|
||||
start = end if not cost_reads else cost_reads[-1].end_time
|
||||
end = (
|
||||
datetime.now() - timedelta(days=2 * 30)
|
||||
if account.meter_type == MeterType.ELEC
|
||||
else datetime.now()
|
||||
)
|
||||
end = datetime.now()
|
||||
if account.read_resolution != ReadResolution.DAY:
|
||||
end -= timedelta(days=2 * 30)
|
||||
cost_reads += await self.api.async_get_cost_reads(
|
||||
account, AggregateType.DAY, start, end
|
||||
)
|
||||
if account.meter_type == MeterType.ELEC:
|
||||
start = end if not cost_reads else cost_reads[-1].end_time
|
||||
end = datetime.now()
|
||||
cost_reads += await self.api.async_get_cost_reads(
|
||||
account, AggregateType.HOUR, start, end
|
||||
)
|
||||
if account.read_resolution == ReadResolution.DAY:
|
||||
return cost_reads
|
||||
|
||||
start = end if not cost_reads else cost_reads[-1].end_time
|
||||
end = datetime.now()
|
||||
cost_reads += await self.api.async_get_cost_reads(
|
||||
account, AggregateType.HOUR, start, end
|
||||
)
|
||||
return cost_reads
|
||||
|
||||
async def _async_get_recent_cost_reads(
|
||||
self, account: Account, last_stat_time: float
|
||||
) -> list[CostRead]:
|
||||
"""Get cost reads within the past 30 days to allow corrections in data from utilities.
|
||||
|
||||
Hourly for electricity, daily for gas.
|
||||
"""
|
||||
"""Get cost reads within the past 30 days to allow corrections in data from utilities."""
|
||||
if account.read_resolution in [
|
||||
ReadResolution.HOUR,
|
||||
ReadResolution.HALF_HOUR,
|
||||
ReadResolution.QUARTER_HOUR,
|
||||
]:
|
||||
aggregate_type = AggregateType.HOUR
|
||||
elif account.read_resolution == ReadResolution.DAY:
|
||||
aggregate_type = AggregateType.DAY
|
||||
else:
|
||||
aggregate_type = AggregateType.BILL
|
||||
return await self.api.async_get_cost_reads(
|
||||
account,
|
||||
AggregateType.HOUR
|
||||
if account.meter_type == MeterType.ELEC
|
||||
else AggregateType.DAY,
|
||||
aggregate_type,
|
||||
datetime.fromtimestamp(last_stat_time) - timedelta(days=30),
|
||||
datetime.now(),
|
||||
)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["recorder"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/opower",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["opower==0.0.16"]
|
||||
"requirements": ["opower==0.0.29"]
|
||||
}
|
||||
|
||||
@@ -188,7 +188,7 @@ async def async_setup_entry(
|
||||
sensors = ELEC_SENSORS
|
||||
elif (
|
||||
forecast.account.meter_type == MeterType.GAS
|
||||
and forecast.unit_of_measure == UnitOfMeasure.THERM
|
||||
and forecast.unit_of_measure in [UnitOfMeasure.THERM, UnitOfMeasure.CCF]
|
||||
):
|
||||
sensors = GAS_SENSORS
|
||||
for sensor in sensors:
|
||||
|
||||
@@ -67,7 +67,7 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
icon="mdi:battery",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["full", "normal", "low", "verylow"],
|
||||
options=["full", "normal", "medium", "low", "verylow"],
|
||||
translation_key="battery",
|
||||
),
|
||||
OverkizSensorDescription(
|
||||
|
||||
@@ -77,6 +77,7 @@
|
||||
"full": "Full",
|
||||
"low": "Low",
|
||||
"normal": "Normal",
|
||||
"medium": "Medium",
|
||||
"verylow": "Very low"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -7,7 +7,12 @@ from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from haphilipsjs import AutenticationFailure, ConnectionFailure, PhilipsTV
|
||||
from haphilipsjs import (
|
||||
AutenticationFailure,
|
||||
ConnectionFailure,
|
||||
GeneralFailure,
|
||||
PhilipsTV,
|
||||
)
|
||||
from haphilipsjs.typing import SystemType
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -22,7 +27,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import CONF_ALLOW_NOTIFY, CONF_SYSTEM, DOMAIN
|
||||
|
||||
@@ -187,3 +192,5 @@ class PhilipsTVDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
pass
|
||||
except AutenticationFailure as exception:
|
||||
raise ConfigEntryAuthFailed(str(exception)) from exception
|
||||
except GeneralFailure as exception:
|
||||
raise UpdateFailed(str(exception)) from exception
|
||||
|
||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import timedelta
|
||||
from typing import Generic, TypeVar, cast
|
||||
from typing import Generic, TypeVar
|
||||
|
||||
from aiopyarr import Health, RadarrMovie, RootFolder, SystemStatus, exceptions
|
||||
from aiopyarr.models.host_configuration import PyArrHostConfiguration
|
||||
@@ -71,7 +71,10 @@ class DiskSpaceDataUpdateCoordinator(RadarrDataUpdateCoordinator[list[RootFolder
|
||||
|
||||
async def _fetch_data(self) -> list[RootFolder]:
|
||||
"""Fetch the data."""
|
||||
return cast(list[RootFolder], await self.api_client.async_get_root_folders())
|
||||
root_folders = await self.api_client.async_get_root_folders()
|
||||
if isinstance(root_folders, RootFolder):
|
||||
root_folders = [root_folders]
|
||||
return root_folders
|
||||
|
||||
|
||||
class HealthDataUpdateCoordinator(RadarrDataUpdateCoordinator[list[Health]]):
|
||||
@@ -87,4 +90,7 @@ class MoviesDataUpdateCoordinator(RadarrDataUpdateCoordinator[int]):
|
||||
|
||||
async def _fetch_data(self) -> int:
|
||||
"""Fetch the movies data."""
|
||||
return len(cast(list[RadarrMovie], await self.api_client.async_get_movies()))
|
||||
movies = await self.api_client.async_get_movies()
|
||||
if isinstance(movies, RadarrMovie):
|
||||
return 1
|
||||
return len(movies)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/rainbird",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyrainbird"],
|
||||
"requirements": ["pyrainbird==3.0.0"]
|
||||
"requirements": ["pyrainbird==4.0.0"]
|
||||
}
|
||||
|
||||
@@ -26,7 +26,10 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
raise ConfigEntryAuthFailed()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
await renault_hub.async_initialise(config_entry)
|
||||
try:
|
||||
await renault_hub.async_initialise(config_entry)
|
||||
except aiohttp.ClientResponseError as exc:
|
||||
raise ConfigEntryNotReady() from exc
|
||||
|
||||
hass.data[DOMAIN][config_entry.entry_id] = renault_hub
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ import logging
|
||||
from typing import Literal
|
||||
|
||||
import async_timeout
|
||||
from reolink_aio.api import RETRY_ATTEMPTS
|
||||
from reolink_aio.exceptions import CredentialsInvalidError, ReolinkError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -77,15 +78,13 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
|
||||
async def async_device_config_update() -> None:
|
||||
"""Update the host state cache and renew the ONVIF-subscription."""
|
||||
async with async_timeout.timeout(host.api.timeout):
|
||||
async with async_timeout.timeout(host.api.timeout * (RETRY_ATTEMPTS + 2)):
|
||||
try:
|
||||
await host.update_states()
|
||||
except ReolinkError as err:
|
||||
raise UpdateFailed(
|
||||
f"Error updating Reolink {host.api.nvr_name}"
|
||||
) from err
|
||||
raise UpdateFailed(str(err)) from err
|
||||
|
||||
async with async_timeout.timeout(host.api.timeout):
|
||||
async with async_timeout.timeout(host.api.timeout * (RETRY_ATTEMPTS + 2)):
|
||||
await host.renew()
|
||||
|
||||
async def async_check_firmware_update() -> str | Literal[False]:
|
||||
@@ -93,7 +92,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
if not host.api.supported(None, "update"):
|
||||
return False
|
||||
|
||||
async with async_timeout.timeout(host.api.timeout):
|
||||
async with async_timeout.timeout(host.api.timeout * (RETRY_ATTEMPTS + 2)):
|
||||
try:
|
||||
return await host.api.check_new_firmware()
|
||||
except (ReolinkError, asyncio.exceptions.CancelledError) as err:
|
||||
|
||||
@@ -5,6 +5,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from reolink_aio.api import (
|
||||
DUAL_LENS_DUAL_MOTION_MODELS,
|
||||
FACE_DETECTION_TYPE,
|
||||
PERSON_DETECTION_TYPE,
|
||||
PET_DETECTION_TYPE,
|
||||
@@ -128,6 +129,9 @@ class ReolinkBinarySensorEntity(ReolinkChannelCoordinatorEntity, BinarySensorEnt
|
||||
super().__init__(reolink_data, channel)
|
||||
self.entity_description = entity_description
|
||||
|
||||
if self._host.api.model in DUAL_LENS_DUAL_MOTION_MODELS:
|
||||
self._attr_name = f"{entity_description.name} lens {self._channel}"
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{self._host.unique_id}_{self._channel}_{entity_description.key}"
|
||||
)
|
||||
|
||||
@@ -24,8 +24,9 @@ from homeassistant.helpers.network import NoURLAvailableError, get_url
|
||||
from .const import CONF_PROTOCOL, CONF_USE_HTTPS, DOMAIN
|
||||
from .exceptions import ReolinkSetupException, ReolinkWebhookException, UserNotAdmin
|
||||
|
||||
DEFAULT_TIMEOUT = 60
|
||||
DEFAULT_TIMEOUT = 30
|
||||
FIRST_ONVIF_TIMEOUT = 10
|
||||
FIRST_ONVIF_LONG_POLL_TIMEOUT = 90
|
||||
SUBSCRIPTION_RENEW_THRESHOLD = 300
|
||||
POLL_INTERVAL_NO_PUSH = 5
|
||||
LONG_POLL_COOLDOWN = 0.75
|
||||
@@ -162,7 +163,7 @@ class ReolinkHost:
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Camera model %s most likely does not push its initial state"
|
||||
"upon ONVIF subscription, do not check",
|
||||
" upon ONVIF subscription, do not check",
|
||||
self._api.model,
|
||||
)
|
||||
self._cancel_onvif_check = async_call_later(
|
||||
@@ -205,7 +206,7 @@ class ReolinkHost:
|
||||
# ONVIF push is not received, start long polling and schedule check
|
||||
await self._async_start_long_polling()
|
||||
self._cancel_long_poll_check = async_call_later(
|
||||
self._hass, FIRST_ONVIF_TIMEOUT, self._async_check_onvif_long_poll
|
||||
self._hass, FIRST_ONVIF_LONG_POLL_TIMEOUT, self._async_check_onvif_long_poll
|
||||
)
|
||||
|
||||
self._cancel_onvif_check = None
|
||||
@@ -215,7 +216,7 @@ class ReolinkHost:
|
||||
if not self._long_poll_received:
|
||||
_LOGGER.debug(
|
||||
"Did not receive state through ONVIF long polling after %i seconds",
|
||||
FIRST_ONVIF_TIMEOUT,
|
||||
FIRST_ONVIF_LONG_POLL_TIMEOUT,
|
||||
)
|
||||
ir.async_create_issue(
|
||||
self._hass,
|
||||
@@ -230,8 +231,24 @@ class ReolinkHost:
|
||||
"network_link": "https://my.home-assistant.io/redirect/network/",
|
||||
},
|
||||
)
|
||||
if self._base_url.startswith("https"):
|
||||
ir.async_create_issue(
|
||||
self._hass,
|
||||
DOMAIN,
|
||||
"https_webhook",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="https_webhook",
|
||||
translation_placeholders={
|
||||
"base_url": self._base_url,
|
||||
"network_link": "https://my.home-assistant.io/redirect/network/",
|
||||
},
|
||||
)
|
||||
else:
|
||||
ir.async_delete_issue(self._hass, DOMAIN, "https_webhook")
|
||||
else:
|
||||
ir.async_delete_issue(self._hass, DOMAIN, "webhook_url")
|
||||
ir.async_delete_issue(self._hass, DOMAIN, "https_webhook")
|
||||
|
||||
# If no ONVIF push or long polling state is received, start fast polling
|
||||
await self._async_poll_all_motion()
|
||||
@@ -426,22 +443,6 @@ class ReolinkHost:
|
||||
webhook_path = webhook.async_generate_path(event_id)
|
||||
self._webhook_url = f"{self._base_url}{webhook_path}"
|
||||
|
||||
if self._base_url.startswith("https"):
|
||||
ir.async_create_issue(
|
||||
self._hass,
|
||||
DOMAIN,
|
||||
"https_webhook",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="https_webhook",
|
||||
translation_placeholders={
|
||||
"base_url": self._base_url,
|
||||
"network_link": "https://my.home-assistant.io/redirect/network/",
|
||||
},
|
||||
)
|
||||
else:
|
||||
ir.async_delete_issue(self._hass, DOMAIN, "https_webhook")
|
||||
|
||||
_LOGGER.debug("Registered webhook: %s", event_id)
|
||||
|
||||
def unregister_webhook(self):
|
||||
@@ -469,7 +470,9 @@ class ReolinkHost:
|
||||
await asyncio.sleep(LONG_POLL_ERROR_COOLDOWN)
|
||||
continue
|
||||
except Exception as ex:
|
||||
_LOGGER.exception("Error while requesting ONVIF pull point: %s", ex)
|
||||
_LOGGER.exception(
|
||||
"Unexpected exception while requesting ONVIF pull point: %s", ex
|
||||
)
|
||||
await self._api.unsubscribe(sub_type=SubType.long_poll)
|
||||
raise ex
|
||||
|
||||
|
||||
@@ -18,5 +18,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/reolink",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"requirements": ["reolink-aio==0.7.5"]
|
||||
"requirements": ["reolink-aio==0.7.7"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/roborock",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["roborock"],
|
||||
"requirements": ["python-roborock==0.30.2"]
|
||||
"requirements": ["python-roborock==0.32.3"]
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["rokuecp"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["rokuecp==0.18.0"],
|
||||
"requirements": ["rokuecp==0.18.1"],
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "roku:ecp",
|
||||
|
||||
@@ -20,7 +20,7 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import Throttle
|
||||
from homeassistant.util.dt import get_time_zone, now
|
||||
from homeassistant.util.dt import get_time_zone
|
||||
|
||||
# Config for rova requests.
|
||||
CONF_ZIP_CODE = "zip_code"
|
||||
@@ -150,8 +150,7 @@ class RovaData:
|
||||
tzinfo=get_time_zone("Europe/Amsterdam")
|
||||
)
|
||||
code = item["GarbageTypeCode"].lower()
|
||||
|
||||
if code not in self.data and date > now():
|
||||
if code not in self.data:
|
||||
self.data[code] = date
|
||||
|
||||
_LOGGER.debug("Updated Rova calendar: %s", self.data)
|
||||
|
||||
@@ -15,5 +15,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pysensibo"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pysensibo==1.0.32"]
|
||||
"requirements": ["pysensibo==1.0.33"]
|
||||
}
|
||||
|
||||
@@ -533,7 +533,8 @@ RPC_SENSORS: Final = {
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
available=lambda status: status["n_current"] is not None,
|
||||
available=lambda status: (status and status["n_current"]) is not None,
|
||||
removal_condition=lambda _config, status, _key: "n_current" not in status,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
"total_current": RpcSensorDescription(
|
||||
|
||||
@@ -218,6 +218,8 @@ async def async_setup_entry(
|
||||
class SolarlogSensor(CoordinatorEntity[SolarlogData], SensorEntity):
|
||||
"""Representation of a Sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
entity_description: SolarLogSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
@@ -228,7 +230,6 @@ class SolarlogSensor(CoordinatorEntity[SolarlogData], SensorEntity):
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_name = f"{coordinator.name} {description.name}"
|
||||
self._attr_unique_id = f"{coordinator.unique_id}_{description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, coordinator.unique_id)},
|
||||
|
||||
@@ -130,6 +130,6 @@ SENSORS: tuple[StarlinkSensorEntityDescription, ...] = (
|
||||
translation_key="ping_drop_rate",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda data: data.status["pop_ping_drop_rate"],
|
||||
value_fn=lambda data: data.status["pop_ping_drop_rate"] * 100,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"roaming_mode": {
|
||||
"roaming": {
|
||||
"name": "Roaming mode"
|
||||
},
|
||||
"currently_obstructed": {
|
||||
|
||||
@@ -67,7 +67,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
config_entry_id=entry.entry_id,
|
||||
configuration_url=printer.url,
|
||||
connections=device_connections(printer),
|
||||
default_manufacturer="Samsung",
|
||||
manufacturer="Samsung",
|
||||
identifiers=device_identifiers(printer),
|
||||
model=printer.model(),
|
||||
name=printer.hostname(),
|
||||
|
||||
@@ -57,25 +57,29 @@ def async_discovery_data_from_service(
|
||||
except UnicodeDecodeError:
|
||||
return None
|
||||
|
||||
ext_addr = service.properties.get(b"xa")
|
||||
ext_pan_id = service.properties.get(b"xp")
|
||||
network_name = try_decode(service.properties.get(b"nn"))
|
||||
model_name = try_decode(service.properties.get(b"mn"))
|
||||
# Service properties are always bytes if they are set from the network.
|
||||
# For legacy backwards compatibility zeroconf allows properties to be set
|
||||
# as strings but we never do that so we can safely cast here.
|
||||
service_properties = cast(dict[bytes, bytes | None], service.properties)
|
||||
ext_addr = service_properties.get(b"xa")
|
||||
ext_pan_id = service_properties.get(b"xp")
|
||||
network_name = try_decode(service_properties.get(b"nn"))
|
||||
model_name = try_decode(service_properties.get(b"mn"))
|
||||
server = service.server
|
||||
vendor_name = try_decode(service.properties.get(b"vn"))
|
||||
thread_version = try_decode(service.properties.get(b"tv"))
|
||||
vendor_name = try_decode(service_properties.get(b"vn"))
|
||||
thread_version = try_decode(service_properties.get(b"tv"))
|
||||
unconfigured = None
|
||||
brand = KNOWN_BRANDS.get(vendor_name)
|
||||
if brand == "homeassistant":
|
||||
# Attempt to detect incomplete configuration
|
||||
if (state_bitmap_b := service.properties.get(b"sb")) is not None:
|
||||
if (state_bitmap_b := service_properties.get(b"sb")) is not None:
|
||||
try:
|
||||
state_bitmap = StateBitmap.from_bytes(state_bitmap_b)
|
||||
if not state_bitmap.is_active:
|
||||
unconfigured = True
|
||||
except ValueError:
|
||||
_LOGGER.debug("Failed to decode state bitmap in service %s", service)
|
||||
if service.properties.get(b"at") is None:
|
||||
if service_properties.get(b"at") is None:
|
||||
unconfigured = True
|
||||
|
||||
return ThreadRouterDiscoveryData(
|
||||
@@ -168,10 +172,19 @@ class ThreadRouterDiscovery:
|
||||
return
|
||||
|
||||
_LOGGER.debug("_add_update_service %s %s", name, service)
|
||||
# Service properties are always bytes if they are set from the network.
|
||||
# For legacy backwards compatibility zeroconf allows properties to be set
|
||||
# as strings but we never do that so we can safely cast here.
|
||||
service_properties = cast(dict[bytes, bytes | None], service.properties)
|
||||
|
||||
if not (xa := service_properties.get(b"xa")):
|
||||
_LOGGER.debug("_add_update_service failed to find xa in %s", service)
|
||||
return
|
||||
|
||||
# We use the extended mac address as key, bail out if it's missing
|
||||
try:
|
||||
extended_mac_address = service.properties[b"xa"].hex()
|
||||
except (KeyError, UnicodeDecodeError) as err:
|
||||
extended_mac_address = xa.hex()
|
||||
except UnicodeDecodeError as err:
|
||||
_LOGGER.debug("_add_update_service failed to parse service %s", err)
|
||||
return
|
||||
|
||||
|
||||
@@ -102,7 +102,7 @@ class TomorrowioOptionsConfigFlow(config_entries.OptionsFlow):
|
||||
vol.Required(
|
||||
CONF_TIMESTEP,
|
||||
default=self._config_entry.options[CONF_TIMESTEP],
|
||||
): vol.In([1, 5, 15, 30]),
|
||||
): vol.In([1, 5, 15, 30, 60]),
|
||||
}
|
||||
|
||||
return self.async_show_form(
|
||||
|
||||
@@ -25,7 +25,7 @@ LOGGER = logging.getLogger(__package__)
|
||||
CONF_TIMESTEP = "timestep"
|
||||
FORECAST_TYPES = [DAILY, HOURLY, NOWCAST]
|
||||
|
||||
DEFAULT_TIMESTEP = 15
|
||||
DEFAULT_TIMESTEP = 60
|
||||
DEFAULT_FORECAST_TYPE = DAILY
|
||||
DOMAIN = "tomorrowio"
|
||||
INTEGRATION_NAME = "Tomorrow.io"
|
||||
|
||||
@@ -154,7 +154,7 @@
|
||||
},
|
||||
{
|
||||
"hostname": "k[lps]*",
|
||||
"macaddress": "788C5B*"
|
||||
"macaddress": "788CB5*"
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/tplink",
|
||||
|
||||
@@ -50,7 +50,6 @@ ENERGY_SENSORS: tuple[TPLinkSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
name="Current Consumption",
|
||||
emeter_attr="power",
|
||||
precision=1,
|
||||
),
|
||||
@@ -60,7 +59,6 @@ ENERGY_SENSORS: tuple[TPLinkSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
name="Total Consumption",
|
||||
emeter_attr="total",
|
||||
precision=3,
|
||||
),
|
||||
@@ -70,7 +68,6 @@ ENERGY_SENSORS: tuple[TPLinkSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
name="Today's Consumption",
|
||||
precision=3,
|
||||
),
|
||||
TPLinkSensorEntityDescription(
|
||||
|
||||
@@ -116,7 +116,7 @@ class SmartPlugSwitchChild(SmartPlugSwitch):
|
||||
coordinator: TPLinkDataUpdateCoordinator,
|
||||
plug: SmartDevice,
|
||||
) -> None:
|
||||
"""Initialize the switch."""
|
||||
"""Initialize the child switch."""
|
||||
super().__init__(device, coordinator)
|
||||
self._plug = plug
|
||||
self._attr_unique_id = legacy_device_id(plug)
|
||||
@@ -124,10 +124,15 @@ class SmartPlugSwitchChild(SmartPlugSwitch):
|
||||
|
||||
@async_refresh_after
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
"""Turn the child switch on."""
|
||||
await self._plug.turn_on()
|
||||
|
||||
@async_refresh_after
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
"""Turn the child switch off."""
|
||||
await self._plug.turn_off()
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if child switch is on."""
|
||||
return bool(self._plug.is_on)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/tplink_omada",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["tplink_omada_client==1.2.4"]
|
||||
"requirements": ["tplink_omada_client==1.3.2"]
|
||||
}
|
||||
|
||||
@@ -43,7 +43,7 @@ class TractiveDeviceTracker(TractiveEntity, TrackerEntity):
|
||||
"""Initialize tracker entity."""
|
||||
super().__init__(user_id, item.trackable, item.tracker_details)
|
||||
|
||||
self._battery_level: int = item.hw_info["battery_level"]
|
||||
self._battery_level: int | None = item.hw_info.get("battery_level")
|
||||
self._latitude: float = item.pos_report["latlong"][0]
|
||||
self._longitude: float = item.pos_report["latlong"][1]
|
||||
self._accuracy: int = item.pos_report["pos_uncertainty"]
|
||||
@@ -75,7 +75,7 @@ class TractiveDeviceTracker(TractiveEntity, TrackerEntity):
|
||||
return self._accuracy
|
||||
|
||||
@property
|
||||
def battery_level(self) -> int:
|
||||
def battery_level(self) -> int | None:
|
||||
"""Return the battery level of the device."""
|
||||
return self._battery_level
|
||||
|
||||
|
||||
@@ -20,16 +20,19 @@ ENTITY_LEGACY_PROVIDER_GROUP = "entity_or_legacy_provider"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Exclusive(CONF_TTS_SERVICE, ENTITY_LEGACY_PROVIDER_GROUP): cv.entity_id,
|
||||
vol.Exclusive(CONF_ENTITY_ID, ENTITY_LEGACY_PROVIDER_GROUP): cv.entities_domain(
|
||||
DOMAIN
|
||||
),
|
||||
vol.Required(CONF_MEDIA_PLAYER): cv.entity_id,
|
||||
vol.Optional(ATTR_LANGUAGE): cv.string,
|
||||
}
|
||||
PLATFORM_SCHEMA = vol.All(
|
||||
cv.has_at_least_one_key(CONF_TTS_SERVICE, CONF_ENTITY_ID),
|
||||
PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Exclusive(CONF_TTS_SERVICE, ENTITY_LEGACY_PROVIDER_GROUP): cv.entity_id,
|
||||
vol.Exclusive(
|
||||
CONF_ENTITY_ID, ENTITY_LEGACY_PROVIDER_GROUP
|
||||
): cv.entities_domain(DOMAIN),
|
||||
vol.Required(CONF_MEDIA_PLAYER): cv.entity_id,
|
||||
vol.Optional(ATTR_LANGUAGE): cv.string,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -44,6 +44,7 @@ COVERS: dict[str, tuple[TuyaCoverEntityDescription, ...]] = {
|
||||
"cl": (
|
||||
TuyaCoverEntityDescription(
|
||||
key=DPCode.CONTROL,
|
||||
translation_key="curtain",
|
||||
current_state=DPCode.SITUATION_SET,
|
||||
current_position=(DPCode.PERCENT_CONTROL, DPCode.PERCENT_STATE),
|
||||
set_position=DPCode.PERCENT_CONTROL,
|
||||
@@ -65,6 +66,7 @@ COVERS: dict[str, tuple[TuyaCoverEntityDescription, ...]] = {
|
||||
),
|
||||
TuyaCoverEntityDescription(
|
||||
key=DPCode.MACH_OPERATE,
|
||||
translation_key="curtain",
|
||||
current_position=DPCode.POSITION,
|
||||
set_position=DPCode.POSITION,
|
||||
device_class=CoverDeviceClass.CURTAIN,
|
||||
@@ -76,6 +78,7 @@ COVERS: dict[str, tuple[TuyaCoverEntityDescription, ...]] = {
|
||||
# It is used by the Kogan Smart Blinds Driver
|
||||
TuyaCoverEntityDescription(
|
||||
key=DPCode.SWITCH_1,
|
||||
translation_key="blind",
|
||||
current_position=DPCode.PERCENT_CONTROL,
|
||||
set_position=DPCode.PERCENT_CONTROL,
|
||||
device_class=CoverDeviceClass.BLIND,
|
||||
@@ -111,6 +114,7 @@ COVERS: dict[str, tuple[TuyaCoverEntityDescription, ...]] = {
|
||||
"clkg": (
|
||||
TuyaCoverEntityDescription(
|
||||
key=DPCode.CONTROL,
|
||||
translation_key="curtain",
|
||||
current_position=DPCode.PERCENT_CONTROL,
|
||||
set_position=DPCode.PERCENT_CONTROL,
|
||||
device_class=CoverDeviceClass.CURTAIN,
|
||||
@@ -128,6 +132,7 @@ COVERS: dict[str, tuple[TuyaCoverEntityDescription, ...]] = {
|
||||
"jdcljqr": (
|
||||
TuyaCoverEntityDescription(
|
||||
key=DPCode.CONTROL,
|
||||
translation_key="curtain",
|
||||
current_position=DPCode.PERCENT_STATE,
|
||||
set_position=DPCode.PERCENT_CONTROL,
|
||||
device_class=CoverDeviceClass.CURTAIN,
|
||||
|
||||
@@ -71,6 +71,12 @@
|
||||
}
|
||||
},
|
||||
"cover": {
|
||||
"blind": {
|
||||
"name": "[%key:component::cover::entity_component::blind::name%]"
|
||||
},
|
||||
"curtain": {
|
||||
"name": "[%key:component::cover::entity_component::curtain::name%]"
|
||||
},
|
||||
"curtain_2": {
|
||||
"name": "Curtain 2"
|
||||
},
|
||||
|
||||
@@ -105,11 +105,11 @@ SWITCHES: dict[str, tuple[SwitchEntityDescription, ...]] = {
|
||||
translation_key="plug",
|
||||
),
|
||||
),
|
||||
# Cirquit Breaker
|
||||
# Circuit Breaker
|
||||
"dlq": (
|
||||
SwitchEntityDescription(
|
||||
key=DPCode.CHILD_LOCK,
|
||||
translation_key="asd",
|
||||
translation_key="child_lock",
|
||||
icon="mdi:account-lock",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
|
||||
@@ -41,7 +41,7 @@ class UnifiImageEntityDescriptionMixin(Generic[HandlerT, ApiItemT]):
|
||||
"""Validate and load entities from different UniFi handlers."""
|
||||
|
||||
image_fn: Callable[[UniFiController, ApiItemT], bytes]
|
||||
value_fn: Callable[[ApiItemT], str]
|
||||
value_fn: Callable[[ApiItemT], str | None]
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -83,6 +83,10 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up image platform for UniFi Network integration."""
|
||||
controller: UniFiController = hass.data[UNIFI_DOMAIN][config_entry.entry_id]
|
||||
|
||||
if controller.site_role != "admin":
|
||||
return
|
||||
|
||||
controller.register_platform_add_entities(
|
||||
UnifiImageEntity, ENTITY_DESCRIPTIONS, async_add_entities
|
||||
)
|
||||
@@ -95,7 +99,7 @@ class UnifiImageEntity(UnifiEntity[HandlerT, ApiItemT], ImageEntity):
|
||||
_attr_content_type = "image/png"
|
||||
|
||||
current_image: bytes | None = None
|
||||
previous_value = ""
|
||||
previous_value: str | None = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiounifi"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiounifi==50"],
|
||||
"requirements": ["aiounifi==52"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Ubiquiti Networks",
|
||||
|
||||
@@ -79,6 +79,8 @@ def async_wlan_client_value_fn(controller: UniFiController, wlan: Wlan) -> int:
|
||||
client.mac
|
||||
for client in controller.api.clients.values()
|
||||
if client.essid == wlan.name
|
||||
and dt_util.utcnow() - dt_util.utc_from_timestamp(client.last_seen or 0)
|
||||
< controller.option_detection_time
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -83,13 +83,16 @@ class VerisureDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
raise UpdateFailed("Could not read overview") from err
|
||||
|
||||
def unpack(overview: list, value: str) -> dict | list:
|
||||
return next(
|
||||
(
|
||||
item["data"]["installation"][value]
|
||||
for item in overview
|
||||
if value in item.get("data", {}).get("installation", {})
|
||||
),
|
||||
[],
|
||||
return (
|
||||
next(
|
||||
(
|
||||
item["data"]["installation"][value]
|
||||
for item in overview
|
||||
if value in item.get("data", {}).get("installation", {})
|
||||
),
|
||||
[],
|
||||
)
|
||||
or []
|
||||
)
|
||||
|
||||
# Store data in a way Home Assistant can easily consume it
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user