forked from home-assistant/core
Compare commits
132 Commits
2024.4.0b3
...
2024.4.3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
efe91815fb | ||
|
|
62eee52aed | ||
|
|
7f6514b03c | ||
|
|
2ed1cfd68d | ||
|
|
a455e142ac | ||
|
|
5fa06e5a9c | ||
|
|
4aca39b49e | ||
|
|
d055f98736 | ||
|
|
98bc7c0ed2 | ||
|
|
0d62e2e92a | ||
|
|
db5343164f | ||
|
|
5c2e9142fa | ||
|
|
f941e5d5bb | ||
|
|
150145c9b1 | ||
|
|
db2005d4ec | ||
|
|
08bd269696 | ||
|
|
5723ed28d3 | ||
|
|
14da34cd4d | ||
|
|
f284273ef6 | ||
|
|
fc60426213 | ||
|
|
922cc81a62 | ||
|
|
4c6fad8dc3 | ||
|
|
733e2ec57a | ||
|
|
04072cb3c1 | ||
|
|
05082fcceb | ||
|
|
8c0b44d6d5 | ||
|
|
9a342f87c0 | ||
|
|
d062ef357b | ||
|
|
265d04c593 | ||
|
|
fc9653581a | ||
|
|
7bea6eface | ||
|
|
19f3ef763d | ||
|
|
fa88975055 | ||
|
|
1322f38911 | ||
|
|
75127105b9 | ||
|
|
d6793a756f | ||
|
|
20e88255df | ||
|
|
2f9f1008a5 | ||
|
|
90bc21b7f6 | ||
|
|
b29eb317bd | ||
|
|
dcd9d987a7 | ||
|
|
e26ea40570 | ||
|
|
0a2d79f63e | ||
|
|
d1b1d6388f | ||
|
|
93569e3827 | ||
|
|
61a359e4d2 | ||
|
|
b1fb77cb4d | ||
|
|
95606135a6 | ||
|
|
47d9879c0c | ||
|
|
e3c111b1dd | ||
|
|
9937743863 | ||
|
|
ed3daed869 | ||
|
|
5d5dc24b33 | ||
|
|
c39d6f0730 | ||
|
|
87ffd5ac56 | ||
|
|
71877fdeda | ||
|
|
2434a22e4e | ||
|
|
618fa08ab2 | ||
|
|
96003e3562 | ||
|
|
411e55d059 | ||
|
|
58533f02af | ||
|
|
aa14793479 | ||
|
|
0191d3e41b | ||
|
|
319f76cdc8 | ||
|
|
530725bbfa | ||
|
|
d8ae7d6955 | ||
|
|
3d0bafbdc9 | ||
|
|
ef8e54877f | ||
|
|
a39e1a6428 | ||
|
|
450be67406 | ||
|
|
25289e0ca1 | ||
|
|
d983fa6da7 | ||
|
|
b61397656c | ||
|
|
590546a9a5 | ||
|
|
9ba4d26abd | ||
|
|
aa33da546d | ||
|
|
3845523a27 | ||
|
|
6a7fad0228 | ||
|
|
33f07ce035 | ||
|
|
4302c5c273 | ||
|
|
b2df1b1c03 | ||
|
|
0aa134459b | ||
|
|
0ca3700c16 | ||
|
|
35ff633d99 | ||
|
|
7a2f6ce430 | ||
|
|
7cb603a226 | ||
|
|
43562289e4 | ||
|
|
79fa7caa41 | ||
|
|
8bdb27c88b | ||
|
|
f676448f27 | ||
|
|
639c4a843b | ||
|
|
02dee34338 | ||
|
|
4e0290ce0e | ||
|
|
fa2f49693c | ||
|
|
2ce784105d | ||
|
|
85fb4a27a3 | ||
|
|
8cbedbe26b | ||
|
|
5bd52da13a | ||
|
|
d53848aae4 | ||
|
|
4e0d6f287e | ||
|
|
5af5f3694e | ||
|
|
b539b25682 | ||
|
|
ca31479d29 | ||
|
|
92dfec3c98 | ||
|
|
230c29edbe | ||
|
|
559fe65471 | ||
|
|
384d10a51d | ||
|
|
e5a620545c | ||
|
|
7b84e86f89 | ||
|
|
18b6de567d | ||
|
|
a6076a0d33 | ||
|
|
7164993562 | ||
|
|
bc21836e7e | ||
|
|
52612b10fd | ||
|
|
623d85ecaa | ||
|
|
43631d5944 | ||
|
|
112aab47fb | ||
|
|
ea13f102e0 | ||
|
|
bb33725e7f | ||
|
|
bd6890ab83 | ||
|
|
25c611ffc4 | ||
|
|
fc24b61859 | ||
|
|
71588b5c22 | ||
|
|
14dfb6a255 | ||
|
|
ef97255d9c | ||
|
|
e8afdd67d0 | ||
|
|
008e4413b5 | ||
|
|
c373d40e34 | ||
|
|
bdf51553ef | ||
|
|
f2edc15687 | ||
|
|
286a09d737 | ||
|
|
e8ee2fd25c |
@@ -1249,6 +1249,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/sms/ @ocalvo
|
||||
/homeassistant/components/snapcast/ @luar123
|
||||
/tests/components/snapcast/ @luar123
|
||||
/homeassistant/components/snmp/ @nmaggioni
|
||||
/tests/components/snmp/ @nmaggioni
|
||||
/homeassistant/components/snooz/ @AustinBrunkhorst
|
||||
/tests/components/snooz/ @AustinBrunkhorst
|
||||
/homeassistant/components/solaredge/ @frenck
|
||||
|
||||
@@ -93,6 +93,11 @@ from .util.async_ import create_eager_task
|
||||
from .util.logging import async_activate_log_queue_handler
|
||||
from .util.package import async_get_user_site, is_virtual_env
|
||||
|
||||
with contextlib.suppress(ImportError):
|
||||
# Ensure anyio backend is imported to avoid it being imported in the event loop
|
||||
from anyio._backends import _asyncio # noqa: F401
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .runner import RuntimeConfig
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioairzone_cloud"],
|
||||
"requirements": ["aioairzone-cloud==0.4.6"]
|
||||
"requirements": ["aioairzone-cloud==0.4.7"]
|
||||
}
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aranet4.client import Aranet4Advertisement, Version as AranetVersion
|
||||
from bluetooth_data_tools import human_readable_name
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
@@ -18,11 +18,15 @@ from homeassistant.data_entry_flow import AbortFlow
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
MIN_VERSION = AranetVersion(1, 2, 0)
|
||||
|
||||
|
||||
def _title(discovery_info: BluetoothServiceInfoBleak) -> str:
|
||||
return discovery_info.device.name or human_readable_name(
|
||||
None, "Aranet", discovery_info.address
|
||||
)
|
||||
|
||||
|
||||
class AranetConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Aranet."""
|
||||
|
||||
@@ -61,11 +65,8 @@ class AranetConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm discovery."""
|
||||
assert self._discovered_device is not None
|
||||
adv = self._discovered_device
|
||||
assert self._discovery_info is not None
|
||||
discovery_info = self._discovery_info
|
||||
title = adv.readings.name if adv.readings else discovery_info.name
|
||||
title = _title(self._discovery_info)
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(title=title, data={})
|
||||
|
||||
@@ -101,10 +102,7 @@ class AranetConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
discovery_info.device, discovery_info.advertisement
|
||||
)
|
||||
if adv.manufacturer_data:
|
||||
self._discovered_devices[address] = (
|
||||
adv.readings.name if adv.readings else discovery_info.name,
|
||||
adv,
|
||||
)
|
||||
self._discovered_devices[address] = (_title(discovery_info), adv)
|
||||
|
||||
if not self._discovered_devices:
|
||||
return self.async_abort(reason="no_devices_found")
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aranet",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aranet4==2.2.2"]
|
||||
"requirements": ["aranet4==2.3.3"]
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
import logging
|
||||
from time import monotonic
|
||||
|
||||
from aiohttp import ClientError
|
||||
from yalexs.activity import Activity, ActivityType
|
||||
@@ -26,9 +27,11 @@ _LOGGER = logging.getLogger(__name__)
|
||||
ACTIVITY_STREAM_FETCH_LIMIT = 10
|
||||
ACTIVITY_CATCH_UP_FETCH_LIMIT = 2500
|
||||
|
||||
INITIAL_LOCK_RESYNC_TIME = 60
|
||||
|
||||
# If there is a storm of activity (ie lock, unlock, door open, door close, etc)
|
||||
# we want to debounce the updates so we don't hammer the activity api too much.
|
||||
ACTIVITY_DEBOUNCE_COOLDOWN = 3
|
||||
ACTIVITY_DEBOUNCE_COOLDOWN = 4
|
||||
|
||||
|
||||
@callback
|
||||
@@ -62,6 +65,7 @@ class ActivityStream(AugustSubscriberMixin):
|
||||
self.pubnub = pubnub
|
||||
self._update_debounce: dict[str, Debouncer] = {}
|
||||
self._update_debounce_jobs: dict[str, HassJob] = {}
|
||||
self._start_time: float | None = None
|
||||
|
||||
@callback
|
||||
def _async_update_house_id_later(self, debouncer: Debouncer, _: datetime) -> None:
|
||||
@@ -70,6 +74,7 @@ class ActivityStream(AugustSubscriberMixin):
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Token refresh check and catch up the activity stream."""
|
||||
self._start_time = monotonic()
|
||||
update_debounce = self._update_debounce
|
||||
update_debounce_jobs = self._update_debounce_jobs
|
||||
for house_id in self._house_ids:
|
||||
@@ -140,11 +145,25 @@ class ActivityStream(AugustSubscriberMixin):
|
||||
|
||||
debouncer = self._update_debounce[house_id]
|
||||
debouncer.async_schedule_call()
|
||||
|
||||
# Schedule two updates past the debounce time
|
||||
# to ensure we catch the case where the activity
|
||||
# api does not update right away and we need to poll
|
||||
# it again. Sometimes the lock operator or a doorbell
|
||||
# will not show up in the activity stream right away.
|
||||
# Only do additional polls if we are past
|
||||
# the initial lock resync time to avoid a storm
|
||||
# of activity at setup.
|
||||
if (
|
||||
not self._start_time
|
||||
or monotonic() - self._start_time < INITIAL_LOCK_RESYNC_TIME
|
||||
):
|
||||
_LOGGER.debug(
|
||||
"Skipping additional updates due to ongoing initial lock resync time"
|
||||
)
|
||||
return
|
||||
|
||||
_LOGGER.debug("Scheduling additional updates for house id %s", house_id)
|
||||
job = self._update_debounce_jobs[house_id]
|
||||
for step in (1, 2):
|
||||
future_updates.append(
|
||||
|
||||
@@ -40,7 +40,7 @@ ATTR_OPERATION_TAG = "tag"
|
||||
# Limit battery, online, and hardware updates to hourly
|
||||
# in order to reduce the number of api requests and
|
||||
# avoid hitting rate limits
|
||||
MIN_TIME_BETWEEN_DETAIL_UPDATES = timedelta(hours=1)
|
||||
MIN_TIME_BETWEEN_DETAIL_UPDATES = timedelta(hours=24)
|
||||
|
||||
# Activity needs to be checked more frequently as the
|
||||
# doorbell motion and rings are included here
|
||||
|
||||
@@ -49,9 +49,17 @@ class AugustSubscriberMixin:
|
||||
"""Call the refresh method."""
|
||||
self._hass.async_create_task(self._async_refresh(now), eager_start=True)
|
||||
|
||||
@callback
|
||||
def _async_cancel_update_interval(self, _: Event | None = None) -> None:
|
||||
"""Cancel the scheduled update."""
|
||||
if self._unsub_interval:
|
||||
self._unsub_interval()
|
||||
self._unsub_interval = None
|
||||
|
||||
@callback
|
||||
def _async_setup_listeners(self) -> None:
|
||||
"""Create interval and stop listeners."""
|
||||
self._async_cancel_update_interval()
|
||||
self._unsub_interval = async_track_time_interval(
|
||||
self._hass,
|
||||
self._async_scheduled_refresh,
|
||||
@@ -59,17 +67,12 @@ class AugustSubscriberMixin:
|
||||
name="august refresh",
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_cancel_update_interval(_: Event) -> None:
|
||||
self._stop_interval = None
|
||||
if self._unsub_interval:
|
||||
self._unsub_interval()
|
||||
|
||||
self._stop_interval = self._hass.bus.async_listen(
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
_async_cancel_update_interval,
|
||||
run_immediately=True,
|
||||
)
|
||||
if not self._stop_interval:
|
||||
self._stop_interval = self._hass.bus.async_listen(
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
self._async_cancel_update_interval,
|
||||
run_immediately=True,
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_unsubscribe_device_id(
|
||||
@@ -82,13 +85,7 @@ class AugustSubscriberMixin:
|
||||
|
||||
if self._subscriptions:
|
||||
return
|
||||
|
||||
if self._unsub_interval:
|
||||
self._unsub_interval()
|
||||
self._unsub_interval = None
|
||||
if self._stop_interval:
|
||||
self._stop_interval()
|
||||
self._stop_interval = None
|
||||
self._async_cancel_update_interval()
|
||||
|
||||
@callback
|
||||
def async_signal_device_id_update(self, device_id: str) -> None:
|
||||
|
||||
@@ -812,6 +812,22 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Log helper callback."""
|
||||
self._logger.log(level, "%s %s", msg, self.name, **kwargs)
|
||||
|
||||
async def _async_trigger_if_enabled(
|
||||
self,
|
||||
run_variables: dict[str, Any],
|
||||
context: Context | None = None,
|
||||
skip_condition: bool = False,
|
||||
) -> ScriptRunResult | None:
|
||||
"""Trigger automation if enabled.
|
||||
|
||||
If the trigger starts but has a delay, the automation will be triggered
|
||||
when the delay has passed so we need to make sure its still enabled before
|
||||
executing the action.
|
||||
"""
|
||||
if not self._is_enabled:
|
||||
return None
|
||||
return await self.async_trigger(run_variables, context, skip_condition)
|
||||
|
||||
async def _async_attach_triggers(
|
||||
self, home_assistant_start: bool
|
||||
) -> Callable[[], None] | None:
|
||||
@@ -835,7 +851,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
return await async_initialize_triggers(
|
||||
self.hass,
|
||||
self._trigger_config,
|
||||
self.async_trigger,
|
||||
self._async_trigger_if_enabled,
|
||||
DOMAIN,
|
||||
str(self.name),
|
||||
self._log_callback,
|
||||
|
||||
@@ -56,6 +56,7 @@ class AxisCamera(AxisEntity, MjpegCamera):
|
||||
mjpeg_url=self.mjpeg_source,
|
||||
still_image_url=self.image_source,
|
||||
authentication=HTTP_DIGEST_AUTHENTICATION,
|
||||
verify_ssl=False,
|
||||
unique_id=f"{hub.unique_id}-camera",
|
||||
)
|
||||
|
||||
@@ -74,16 +75,18 @@ class AxisCamera(AxisEntity, MjpegCamera):
|
||||
|
||||
Additionally used when device change IP address.
|
||||
"""
|
||||
proto = self.hub.config.protocol
|
||||
host = self.hub.config.host
|
||||
port = self.hub.config.port
|
||||
|
||||
image_options = self.generate_options(skip_stream_profile=True)
|
||||
self._still_image_url = (
|
||||
f"http://{self.hub.config.host}:{self.hub.config.port}/axis-cgi"
|
||||
f"/jpg/image.cgi{image_options}"
|
||||
f"{proto}://{host}:{port}/axis-cgi/jpg/image.cgi{image_options}"
|
||||
)
|
||||
|
||||
mjpeg_options = self.generate_options()
|
||||
self._mjpeg_url = (
|
||||
f"http://{self.hub.config.host}:{self.hub.config.port}/axis-cgi"
|
||||
f"/mjpg/video.cgi{mjpeg_options}"
|
||||
f"{proto}://{host}:{port}/axis-cgi/mjpg/video.cgi{mjpeg_options}"
|
||||
)
|
||||
|
||||
stream_options = self.generate_options(add_video_codec_h264=True)
|
||||
@@ -95,10 +98,7 @@ class AxisCamera(AxisEntity, MjpegCamera):
|
||||
self.hub.additional_diagnostics["camera_sources"] = {
|
||||
"Image": self._still_image_url,
|
||||
"MJPEG": self._mjpeg_url,
|
||||
"Stream": (
|
||||
f"rtsp://user:pass@{self.hub.config.host}/axis-media"
|
||||
f"/media.amp{stream_options}"
|
||||
),
|
||||
"Stream": (f"rtsp://user:pass@{host}/axis-media/media.amp{stream_options}"),
|
||||
}
|
||||
|
||||
@property
|
||||
|
||||
@@ -168,16 +168,13 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
self, entry_data: Mapping[str, Any], keep_password: bool
|
||||
) -> ConfigFlowResult:
|
||||
"""Re-run configuration step."""
|
||||
protocol = entry_data.get(CONF_PROTOCOL, "http")
|
||||
password = entry_data[CONF_PASSWORD] if keep_password else ""
|
||||
self.discovery_schema = {
|
||||
vol.Required(
|
||||
CONF_PROTOCOL, default=entry_data.get(CONF_PROTOCOL, "http")
|
||||
): str,
|
||||
vol.Required(CONF_PROTOCOL, default=protocol): vol.In(PROTOCOL_CHOICES),
|
||||
vol.Required(CONF_HOST, default=entry_data[CONF_HOST]): str,
|
||||
vol.Required(CONF_USERNAME, default=entry_data[CONF_USERNAME]): str,
|
||||
vol.Required(
|
||||
CONF_PASSWORD,
|
||||
default=entry_data[CONF_PASSWORD] if keep_password else "",
|
||||
): str,
|
||||
vol.Required(CONF_PASSWORD, default=password): str,
|
||||
vol.Required(CONF_PORT, default=entry_data[CONF_PORT]): int,
|
||||
}
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ from homeassistant.const import (
|
||||
CONF_NAME,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_PROTOCOL,
|
||||
CONF_TRIGGER_TIME,
|
||||
CONF_USERNAME,
|
||||
)
|
||||
@@ -31,6 +32,7 @@ class AxisConfig:
|
||||
|
||||
entry: ConfigEntry
|
||||
|
||||
protocol: str
|
||||
host: str
|
||||
port: int
|
||||
username: str
|
||||
@@ -54,6 +56,7 @@ class AxisConfig:
|
||||
options = config_entry.options
|
||||
return cls(
|
||||
entry=config_entry,
|
||||
protocol=config.get(CONF_PROTOCOL, "http"),
|
||||
host=config[CONF_HOST],
|
||||
username=config[CONF_USERNAME],
|
||||
password=config[CONF_PASSWORD],
|
||||
|
||||
@@ -116,7 +116,7 @@ class AxisHub:
|
||||
if status.status.state == ClientState.ACTIVE:
|
||||
self.config.entry.async_on_unload(
|
||||
await mqtt.async_subscribe(
|
||||
hass, f"{self.api.vapix.serial_number}/#", self.mqtt_message
|
||||
hass, f"{status.config.device_topic_prefix}/#", self.mqtt_message
|
||||
)
|
||||
)
|
||||
|
||||
@@ -124,7 +124,8 @@ class AxisHub:
|
||||
def mqtt_message(self, message: ReceiveMessage) -> None:
|
||||
"""Receive Axis MQTT message."""
|
||||
self.disconnect_from_stream()
|
||||
|
||||
if message.topic.endswith("event/connection"):
|
||||
return
|
||||
event = mqtt_json_to_event(message.payload)
|
||||
self.api.event.handler(event)
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["axis"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["axis==59"],
|
||||
"requirements": ["axis==61"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "AXIS"
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"bleak==0.21.1",
|
||||
"bleak-retry-connector==3.4.0",
|
||||
"bleak-retry-connector==3.5.0",
|
||||
"bluetooth-adapters==0.18.0",
|
||||
"bluetooth-auto-recovery==1.4.0",
|
||||
"bluetooth-data-tools==1.19.0",
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["brother==4.0.2"],
|
||||
"requirements": ["brother==4.1.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_printer._tcp.local.",
|
||||
|
||||
@@ -34,6 +34,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
ssl_verify_cert=entry.data[CONF_VERIFY_SSL],
|
||||
timeout=10,
|
||||
)
|
||||
try:
|
||||
await hass.async_add_executor_job(client.principal)
|
||||
|
||||
@@ -58,6 +58,7 @@ class GetTemperatureIntent(intent.IntentHandler):
|
||||
raise intent.NoStatesMatchedError(
|
||||
name=entity_text or entity_name,
|
||||
area=area_name or area_id,
|
||||
floor=None,
|
||||
domains={DOMAIN},
|
||||
device_classes=None,
|
||||
)
|
||||
@@ -75,6 +76,7 @@ class GetTemperatureIntent(intent.IntentHandler):
|
||||
raise intent.NoStatesMatchedError(
|
||||
name=entity_name,
|
||||
area=None,
|
||||
floor=None,
|
||||
domains={DOMAIN},
|
||||
device_classes=None,
|
||||
)
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["hass_nabucasa"],
|
||||
"requirements": ["hass-nabucasa==0.79.0"]
|
||||
"requirements": ["hass-nabucasa==0.78.0"]
|
||||
}
|
||||
|
||||
@@ -34,6 +34,7 @@ from homeassistant.helpers import (
|
||||
area_registry as ar,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
floor_registry as fr,
|
||||
intent,
|
||||
start,
|
||||
template,
|
||||
@@ -163,7 +164,12 @@ class DefaultAgent(AbstractConversationAgent):
|
||||
|
||||
self.hass.bus.async_listen(
|
||||
ar.EVENT_AREA_REGISTRY_UPDATED,
|
||||
self._async_handle_area_registry_changed,
|
||||
self._async_handle_area_floor_registry_changed,
|
||||
run_immediately=True,
|
||||
)
|
||||
self.hass.bus.async_listen(
|
||||
fr.EVENT_FLOOR_REGISTRY_UPDATED,
|
||||
self._async_handle_area_floor_registry_changed,
|
||||
run_immediately=True,
|
||||
)
|
||||
self.hass.bus.async_listen(
|
||||
@@ -696,10 +702,13 @@ class DefaultAgent(AbstractConversationAgent):
|
||||
return lang_intents
|
||||
|
||||
@core.callback
|
||||
def _async_handle_area_registry_changed(
|
||||
self, event: core.Event[ar.EventAreaRegistryUpdatedData]
|
||||
def _async_handle_area_floor_registry_changed(
|
||||
self,
|
||||
event: core.Event[
|
||||
ar.EventAreaRegistryUpdatedData | fr.EventFloorRegistryUpdatedData
|
||||
],
|
||||
) -> None:
|
||||
"""Clear area area cache when the area registry has changed."""
|
||||
"""Clear area/floor list cache when the area registry has changed."""
|
||||
self._slot_lists = None
|
||||
|
||||
@core.callback
|
||||
@@ -773,6 +782,8 @@ class DefaultAgent(AbstractConversationAgent):
|
||||
# Default name
|
||||
entity_names.append((state.name, state.name, context))
|
||||
|
||||
_LOGGER.debug("Exposed entities: %s", entity_names)
|
||||
|
||||
# Expose all areas.
|
||||
#
|
||||
# We pass in area id here with the expectation that no two areas will
|
||||
@@ -788,11 +799,25 @@ class DefaultAgent(AbstractConversationAgent):
|
||||
|
||||
area_names.append((alias, area.id))
|
||||
|
||||
_LOGGER.debug("Exposed entities: %s", entity_names)
|
||||
# Expose all floors.
|
||||
#
|
||||
# We pass in floor id here with the expectation that no two floors will
|
||||
# share the same name or alias.
|
||||
floors = fr.async_get(self.hass)
|
||||
floor_names = []
|
||||
for floor in floors.async_list_floors():
|
||||
floor_names.append((floor.name, floor.floor_id))
|
||||
if floor.aliases:
|
||||
for alias in floor.aliases:
|
||||
if not alias.strip():
|
||||
continue
|
||||
|
||||
floor_names.append((alias, floor.floor_id))
|
||||
|
||||
self._slot_lists = {
|
||||
"area": TextSlotList.from_tuples(area_names, allow_template=False),
|
||||
"name": TextSlotList.from_tuples(entity_names, allow_template=False),
|
||||
"floor": TextSlotList.from_tuples(floor_names, allow_template=False),
|
||||
}
|
||||
|
||||
return self._slot_lists
|
||||
@@ -953,6 +978,10 @@ def _get_unmatched_response(result: RecognizeResult) -> tuple[ErrorKey, dict[str
|
||||
# area only
|
||||
return ErrorKey.NO_AREA, {"area": unmatched_area}
|
||||
|
||||
if unmatched_floor := unmatched_text.get("floor"):
|
||||
# floor only
|
||||
return ErrorKey.NO_FLOOR, {"floor": unmatched_floor}
|
||||
|
||||
# Area may still have matched
|
||||
matched_area: str | None = None
|
||||
if matched_area_entity := result.entities.get("area"):
|
||||
@@ -1000,6 +1029,13 @@ def _get_no_states_matched_response(
|
||||
"area": no_states_error.area,
|
||||
}
|
||||
|
||||
if no_states_error.floor:
|
||||
# domain in floor
|
||||
return ErrorKey.NO_DOMAIN_IN_FLOOR, {
|
||||
"domain": domain,
|
||||
"floor": no_states_error.floor,
|
||||
}
|
||||
|
||||
# domain only
|
||||
return ErrorKey.NO_DOMAIN, {"domain": domain}
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==1.6.1", "home-assistant-intents==2024.3.27"]
|
||||
"requirements": ["hassil==1.6.1", "home-assistant-intents==2024.4.3"]
|
||||
}
|
||||
|
||||
@@ -11,7 +11,11 @@ import requests
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import (
|
||||
DOMAIN as HOMEASSISTANT_DOMAIN,
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
)
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
@@ -43,6 +47,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
|
||||
hass.async_create_task(_async_import_config(hass, config))
|
||||
return True
|
||||
|
||||
|
||||
async def _async_import_config(hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Import the Downloader component from the YAML file."""
|
||||
|
||||
import_result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
@@ -51,28 +62,40 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
},
|
||||
)
|
||||
|
||||
translation_key = "deprecated_yaml"
|
||||
if (
|
||||
import_result["type"] == FlowResultType.ABORT
|
||||
and import_result["reason"] == "import_failed"
|
||||
and import_result["reason"] != "single_instance_allowed"
|
||||
):
|
||||
translation_key = "import_failed"
|
||||
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
breaks_in_ha_version="2024.9.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key=translation_key,
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Downloader",
|
||||
},
|
||||
)
|
||||
return True
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
breaks_in_ha_version="2024.10.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="directory_does_not_exist",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Downloader",
|
||||
"url": "/config/integrations/dashboard/add?domain=downloader",
|
||||
},
|
||||
)
|
||||
else:
|
||||
async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
breaks_in_ha_version="2024.10.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Downloader",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
@@ -83,7 +106,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
if not os.path.isabs(download_path):
|
||||
download_path = hass.config.path(download_path)
|
||||
|
||||
if not os.path.isdir(download_path):
|
||||
if not await hass.async_add_executor_job(os.path.isdir, download_path):
|
||||
_LOGGER.error(
|
||||
"Download path %s does not exist. File Downloader not active", download_path
|
||||
)
|
||||
|
||||
@@ -46,19 +46,24 @@ class DownloaderConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_import(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Handle a flow initiated by configuration file."""
|
||||
if self._async_current_entries():
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
return await self.async_step_user(user_input)
|
||||
try:
|
||||
await self._validate_input(user_input)
|
||||
except DirectoryDoesNotExist:
|
||||
return self.async_abort(reason="directory_does_not_exist")
|
||||
return self.async_create_entry(title=DEFAULT_NAME, data=user_input)
|
||||
|
||||
async def _validate_input(self, user_input: dict[str, Any]) -> None:
|
||||
"""Validate the user input if the directory exists."""
|
||||
if not os.path.isabs(user_input[CONF_DOWNLOAD_DIR]):
|
||||
download_path = self.hass.config.path(user_input[CONF_DOWNLOAD_DIR])
|
||||
download_path = user_input[CONF_DOWNLOAD_DIR]
|
||||
if not os.path.isabs(download_path):
|
||||
download_path = self.hass.config.path(download_path)
|
||||
|
||||
if not os.path.isdir(download_path):
|
||||
if not await self.hass.async_add_executor_job(os.path.isdir, download_path):
|
||||
_LOGGER.error(
|
||||
"Download path %s does not exist. File Downloader not active",
|
||||
download_path,
|
||||
|
||||
@@ -37,13 +37,9 @@
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_yaml": {
|
||||
"title": "The {integration_title} YAML configuration is being removed",
|
||||
"description": "Configuring {integration_title} using YAML is being removed.\n\nYour configuration is already imported.\n\nRemove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue."
|
||||
},
|
||||
"import_failed": {
|
||||
"directory_does_not_exist": {
|
||||
"title": "The {integration_title} failed to import",
|
||||
"description": "The {integration_title} integration failed to import.\n\nPlease check the logs for more details."
|
||||
"description": "The {integration_title} integration failed to import because the configured directory does not exist.\n\nEnsure the directory exists and restart Home Assistant to try again or remove the {integration_title} configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"services": {
|
||||
"restart": "mdi:restart",
|
||||
"start": "mdi:start",
|
||||
"start": "mdi:play",
|
||||
"stop": "mdi:stop"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -121,6 +121,7 @@ async def async_setup_entry(
|
||||
Platform.COVER,
|
||||
Platform.LIGHT,
|
||||
Platform.LOCK,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
)
|
||||
for device in controller.fibaro_devices[platform]
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/flexit_bacnet",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["flexit_bacnet==2.1.0"]
|
||||
"requirements": ["flexit_bacnet==2.2.1"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["forecast-solar==3.0.0"]
|
||||
"requirements": ["forecast-solar==3.1.0"]
|
||||
}
|
||||
|
||||
@@ -311,6 +311,17 @@ class FritzBoxTools(
|
||||
)
|
||||
return unregister_entity_updates
|
||||
|
||||
def _entity_states_update(self) -> dict:
|
||||
"""Run registered entity update calls."""
|
||||
entity_states = {}
|
||||
for key in list(self._entity_update_functions):
|
||||
if (update_fn := self._entity_update_functions.get(key)) is not None:
|
||||
_LOGGER.debug("update entity %s", key)
|
||||
entity_states[key] = update_fn(
|
||||
self.fritz_status, self.data["entity_states"].get(key)
|
||||
)
|
||||
return entity_states
|
||||
|
||||
async def _async_update_data(self) -> UpdateCoordinatorDataType:
|
||||
"""Update FritzboxTools data."""
|
||||
entity_data: UpdateCoordinatorDataType = {
|
||||
@@ -319,15 +330,9 @@ class FritzBoxTools(
|
||||
}
|
||||
try:
|
||||
await self.async_scan_devices()
|
||||
for key in list(self._entity_update_functions):
|
||||
_LOGGER.debug("update entity %s", key)
|
||||
entity_data["entity_states"][
|
||||
key
|
||||
] = await self.hass.async_add_executor_job(
|
||||
self._entity_update_functions[key],
|
||||
self.fritz_status,
|
||||
self.data["entity_states"].get(key),
|
||||
)
|
||||
entity_data["entity_states"] = await self.hass.async_add_executor_job(
|
||||
self._entity_states_update
|
||||
)
|
||||
if self.has_call_deflections:
|
||||
entity_data[
|
||||
"call_deflections"
|
||||
|
||||
@@ -141,7 +141,7 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_abort(reason="already_in_progress")
|
||||
|
||||
# update old and user-configured config entries
|
||||
for entry in self._async_current_entries():
|
||||
for entry in self._async_current_entries(include_ignore=False):
|
||||
if entry.data[CONF_HOST] == host:
|
||||
if uuid and not entry.unique_id:
|
||||
self.hass.config_entries.async_update_entry(entry, unique_id=uuid)
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20240329.1"]
|
||||
"requirements": ["home-assistant-frontend==20240404.2"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/fyta",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["fyta_cli==0.3.3"]
|
||||
"requirements": ["fyta_cli==0.3.5"]
|
||||
}
|
||||
|
||||
@@ -46,35 +46,35 @@ SENSORS: Final[list[FytaSensorEntityDescription]] = [
|
||||
translation_key="plant_status",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=PLANT_STATUS_LIST,
|
||||
value_fn=lambda value: PLANT_STATUS[value],
|
||||
value_fn=PLANT_STATUS.get,
|
||||
),
|
||||
FytaSensorEntityDescription(
|
||||
key="temperature_status",
|
||||
translation_key="temperature_status",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=PLANT_STATUS_LIST,
|
||||
value_fn=lambda value: PLANT_STATUS[value],
|
||||
value_fn=PLANT_STATUS.get,
|
||||
),
|
||||
FytaSensorEntityDescription(
|
||||
key="light_status",
|
||||
translation_key="light_status",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=PLANT_STATUS_LIST,
|
||||
value_fn=lambda value: PLANT_STATUS[value],
|
||||
value_fn=PLANT_STATUS.get,
|
||||
),
|
||||
FytaSensorEntityDescription(
|
||||
key="moisture_status",
|
||||
translation_key="moisture_status",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=PLANT_STATUS_LIST,
|
||||
value_fn=lambda value: PLANT_STATUS[value],
|
||||
value_fn=PLANT_STATUS.get,
|
||||
),
|
||||
FytaSensorEntityDescription(
|
||||
key="salinity_status",
|
||||
translation_key="salinity_status",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=PLANT_STATUS_LIST,
|
||||
value_fn=lambda value: PLANT_STATUS[value],
|
||||
value_fn=PLANT_STATUS.get,
|
||||
),
|
||||
FytaSensorEntityDescription(
|
||||
key="temperature",
|
||||
|
||||
@@ -112,8 +112,9 @@ class AsyncConfigEntryAuth:
|
||||
raise GoogleTasksApiError(
|
||||
f"Google Tasks API responded with error ({exception.status_code})"
|
||||
) from exception
|
||||
data = json.loads(response)
|
||||
_raise_if_error(data)
|
||||
if response:
|
||||
data = json.loads(response)
|
||||
_raise_if_error(data)
|
||||
|
||||
for task_id in task_ids:
|
||||
batch.add(
|
||||
|
||||
@@ -196,7 +196,7 @@ SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
|
||||
{
|
||||
vol.Optional(ATTR_HOMEASSISTANT): cv.boolean,
|
||||
vol.Optional(ATTR_FOLDERS): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [cv.slug]),
|
||||
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [VALID_ADDON_SLUG]),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -211,7 +211,7 @@ SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
|
||||
{
|
||||
vol.Optional(ATTR_HOMEASSISTANT): cv.boolean,
|
||||
vol.Optional(ATTR_FOLDERS): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [cv.slug]),
|
||||
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [VALID_ADDON_SLUG]),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ from .const import (
|
||||
from .handler import async_apply_suggestion
|
||||
from .issues import Issue, Suggestion
|
||||
|
||||
SUGGESTION_CONFIRMATION_REQUIRED = {"system_execute_reboot"}
|
||||
SUGGESTION_CONFIRMATION_REQUIRED = {"system_adopt_data_disk", "system_execute_reboot"}
|
||||
|
||||
EXTRA_PLACEHOLDERS = {
|
||||
"issue_mount_mount_failed": {
|
||||
|
||||
@@ -51,8 +51,15 @@
|
||||
"title": "Multiple data disks detected",
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"system_rename_data_disk": {
|
||||
"description": "`{reference}` is a filesystem with the name hassos-data and is not the active data disk. This can cause Home Assistant to choose the wrong data disk at system reboot.\n\nUse the fix option to rename the filesystem to prevent this. Alternatively you can move the data disk to the drive (overwriting its contents) or remove the drive from the system."
|
||||
"fix_menu": {
|
||||
"description": "`{reference}` is a filesystem with the name hassos-data and is not the active data disk. This can cause Home Assistant to choose the wrong data disk at system reboot.\n\nUse the 'Rename' option to rename the filesystem to prevent this. Use the 'Adopt' option to make that your data disk and rename the existing one. Alternatively you can move the data disk to the drive (overwriting its contents) or remove the drive from the system.",
|
||||
"menu_options": {
|
||||
"system_rename_data_disk": "Rename",
|
||||
"system_adopt_data_disk": "Adopt"
|
||||
}
|
||||
},
|
||||
"system_adopt_data_disk": {
|
||||
"description": "This fix will initiate a system reboot which will make Home Assistant and all the Add-ons inaccessible for a brief period. After the reboot `{reference}` will be the data disk of Home Assistant and your existing data disk will be renamed and ignored."
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
|
||||
@@ -2,15 +2,36 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
|
||||
from holidays import country_holidays
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.const import CONF_COUNTRY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.setup import SetupPhases, async_pause_setup
|
||||
|
||||
from .const import CONF_PROVINCE
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.CALENDAR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Holiday from a config entry."""
|
||||
country: str = entry.data[CONF_COUNTRY]
|
||||
province: str | None = entry.data.get(CONF_PROVINCE)
|
||||
|
||||
# We only import here to ensure that that its not imported later
|
||||
# in the event loop since the platforms will call country_holidays
|
||||
# which loads python code from disk.
|
||||
with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PACKAGES):
|
||||
# import executor job is used here because multiple integrations use
|
||||
# the holidays library and it is not thread safe to import it in parallel
|
||||
# https://github.com/python/cpython/issues/83065
|
||||
await hass.async_add_import_executor_job(
|
||||
partial(country_holidays, country, subdiv=province)
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.45", "babel==2.13.1"]
|
||||
"requirements": ["holidays==0.46", "babel==2.13.1"]
|
||||
}
|
||||
|
||||
@@ -113,7 +113,11 @@ class HMThermostat(HMDevice, ClimateEntity):
|
||||
@property
|
||||
def preset_modes(self):
|
||||
"""Return a list of available preset modes."""
|
||||
return [HM_PRESET_MAP[mode] for mode in self._hmdevice.ACTIONNODE]
|
||||
return [
|
||||
HM_PRESET_MAP[mode]
|
||||
for mode in self._hmdevice.ACTIONNODE
|
||||
if mode in HM_PRESET_MAP
|
||||
]
|
||||
|
||||
@property
|
||||
def current_humidity(self):
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"services": {
|
||||
"select_next": "mdi:skip",
|
||||
"select_next": "mdi:skip-next",
|
||||
"select_option": "mdi:check",
|
||||
"select_previous": "mdi:skip-previous",
|
||||
"select_first": "mdi:skip-backward",
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"xknx==2.12.2",
|
||||
"xknxproject==3.7.0",
|
||||
"xknxproject==3.7.1",
|
||||
"knx-frontend==2024.1.20.105944"
|
||||
],
|
||||
"single_config_entry": true
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylitterbot"],
|
||||
"requirements": ["pylitterbot==2023.4.9"]
|
||||
"requirements": ["pylitterbot==2023.4.11"]
|
||||
}
|
||||
|
||||
@@ -35,6 +35,7 @@ LITTER_BOX_STATUS_STATE_MAP = {
|
||||
LitterBoxStatus.CLEAN_CYCLE: STATE_CLEANING,
|
||||
LitterBoxStatus.EMPTY_CYCLE: STATE_CLEANING,
|
||||
LitterBoxStatus.CLEAN_CYCLE_COMPLETE: STATE_DOCKED,
|
||||
LitterBoxStatus.CAT_DETECTED: STATE_DOCKED,
|
||||
LitterBoxStatus.CAT_SENSOR_TIMING: STATE_DOCKED,
|
||||
LitterBoxStatus.DRAWER_FULL_1: STATE_DOCKED,
|
||||
LitterBoxStatus.DRAWER_FULL_2: STATE_DOCKED,
|
||||
|
||||
@@ -179,7 +179,7 @@ async def _get_dashboard_info(hass, url_path):
|
||||
"views": views,
|
||||
}
|
||||
|
||||
if config is None:
|
||||
if config is None or "views" not in config:
|
||||
return data
|
||||
|
||||
for idx, view in enumerate(config["views"]):
|
||||
|
||||
@@ -141,7 +141,7 @@ class LutronLight(LutronDevice, LightEntity):
|
||||
else:
|
||||
brightness = self._prev_brightness
|
||||
self._prev_brightness = brightness
|
||||
args = {"new_level": brightness}
|
||||
args = {"new_level": to_lutron_level(brightness)}
|
||||
if ATTR_TRANSITION in kwargs:
|
||||
args["fade_time_seconds"] = kwargs[ATTR_TRANSITION]
|
||||
self._lutron_device.set_level(**args)
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "calculated",
|
||||
"loggers": ["yt_dlp"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["yt-dlp==2024.03.10"]
|
||||
"requirements": ["yt-dlp==2024.04.09"]
|
||||
}
|
||||
|
||||
@@ -52,7 +52,7 @@
|
||||
"unjoin": "mdi:ungroup",
|
||||
"volume_down": "mdi:volume-minus",
|
||||
"volume_mute": "mdi:volume-mute",
|
||||
"volume_set": "mdi:volume",
|
||||
"volume_set": "mdi:volume-medium",
|
||||
"volume_up": "mdi:volume-plus"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -440,6 +440,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def async_reset_platform(hass: HomeAssistant, integration_name: str) -> None:
|
||||
"""Release modbus resources."""
|
||||
if DOMAIN not in hass.data:
|
||||
_LOGGER.error("Modbus cannot reload, because it was never loaded")
|
||||
return
|
||||
_LOGGER.info("Modbus reloading")
|
||||
hubs = hass.data[DOMAIN]
|
||||
for name in hubs:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pymodbus"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pymodbus==3.6.6"]
|
||||
"requirements": ["pymodbus==3.6.7"]
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Motionblinds BLE integration."""
|
||||
"""Motionblinds Bluetooth integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -34,9 +34,9 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Motionblinds BLE integration."""
|
||||
"""Set up Motionblinds Bluetooth integration."""
|
||||
|
||||
_LOGGER.debug("Setting up Motionblinds BLE integration")
|
||||
_LOGGER.debug("Setting up Motionblinds Bluetooth integration")
|
||||
|
||||
# The correct time is needed for encryption
|
||||
_LOGGER.debug("Setting timezone for encryption: %s", hass.config.time_zone)
|
||||
@@ -46,7 +46,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Motionblinds BLE device from a config entry."""
|
||||
"""Set up Motionblinds Bluetooth device from a config entry."""
|
||||
|
||||
_LOGGER.debug("(%s) Setting up device", entry.data[CONF_MAC_CODE])
|
||||
|
||||
@@ -94,7 +94,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload Motionblinds BLE device from a config entry."""
|
||||
"""Unload Motionblinds Bluetooth device from a config entry."""
|
||||
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Button entities for the Motionblinds BLE integration."""
|
||||
"""Button entities for the Motionblinds Bluetooth integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Config flow for Motionblinds BLE integration."""
|
||||
"""Config flow for Motionblinds Bluetooth integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -38,7 +38,7 @@ CONFIG_SCHEMA = vol.Schema({vol.Required(CONF_MAC_CODE): str})
|
||||
|
||||
|
||||
class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Motionblinds BLE."""
|
||||
"""Handle a config flow for Motionblinds Bluetooth."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a ConfigFlow."""
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Constants for the Motionblinds BLE integration."""
|
||||
"""Constants for the Motionblinds Bluetooth integration."""
|
||||
|
||||
ATTR_CONNECT = "connect"
|
||||
ATTR_DISCONNECT = "disconnect"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Cover entities for the Motionblinds BLE integration."""
|
||||
"""Cover entities for the Motionblinds Bluetooth integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Base entities for the Motionblinds BLE integration."""
|
||||
"""Base entities for the Motionblinds Bluetooth integration."""
|
||||
|
||||
import logging
|
||||
|
||||
@@ -16,7 +16,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MotionblindsBLEEntity(Entity):
|
||||
"""Base class for Motionblinds BLE entities."""
|
||||
"""Base class for Motionblinds Bluetooth entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"domain": "motionblinds_ble",
|
||||
"name": "Motionblinds BLE",
|
||||
"name": "Motionblinds Bluetooth",
|
||||
"bluetooth": [
|
||||
{
|
||||
"local_name": "MOTION_*",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Select entities for the Motionblinds BLE integration."""
|
||||
"""Select entities for the Motionblinds Bluetooth integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from http import HTTPStatus
|
||||
|
||||
from aiohttp import ClientError, ClientResponseError
|
||||
from myuplink import MyUplinkAPI, get_manufacturer, get_system_name
|
||||
from myuplink import MyUplinkAPI, get_manufacturer, get_model, get_system_name
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
@@ -92,7 +92,7 @@ def create_devices(
|
||||
identifiers={(DOMAIN, device_id)},
|
||||
name=get_system_name(system),
|
||||
manufacturer=get_manufacturer(device),
|
||||
model=device.productName,
|
||||
model=get_model(device),
|
||||
sw_version=device.firmwareCurrent,
|
||||
serial_number=device.product_serial_number,
|
||||
)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["application_credentials"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/myuplink",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["myuplink==0.5.0"]
|
||||
"requirements": ["myuplink==0.6.0"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/neato",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pybotvac"],
|
||||
"requirements": ["pybotvac==0.0.24"]
|
||||
"requirements": ["pybotvac==0.0.25"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nobo_hub",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["pynobo==1.8.0"]
|
||||
"requirements": ["pynobo==1.8.1"]
|
||||
}
|
||||
|
||||
@@ -108,7 +108,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
(CONF_REFRESH_TOKEN, client.refresh_token),
|
||||
(CONF_USER_UUID, client.user_uuid),
|
||||
):
|
||||
if entry.data[key] == value:
|
||||
if entry.data.get(key) == value:
|
||||
continue
|
||||
entry_updates["data"][key] = value
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/opower",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["opower"],
|
||||
"requirements": ["opower==0.4.1"]
|
||||
"requirements": ["opower==0.4.3"]
|
||||
}
|
||||
|
||||
@@ -298,6 +298,11 @@ class HitachiAirToAirHeatPumpOVP(OverkizEntity, ClimateEntity):
|
||||
OverkizState.OVP_FAN_SPEED,
|
||||
OverkizCommandParam.AUTO,
|
||||
)
|
||||
# Sanitize fan mode: Overkiz is sometimes providing a state that
|
||||
# cannot be used as a command. Convert it to HA space and back to Overkiz
|
||||
if fan_mode not in FAN_MODES_TO_OVERKIZ.values():
|
||||
fan_mode = FAN_MODES_TO_OVERKIZ[OVERKIZ_TO_FAN_MODES[fan_mode]]
|
||||
|
||||
hvac_mode = self._control_backfill(
|
||||
hvac_mode,
|
||||
OverkizState.OVP_MODE_CHANGE,
|
||||
@@ -357,5 +362,5 @@ class HitachiAirToAirHeatPumpOVP(OverkizEntity, ClimateEntity):
|
||||
]
|
||||
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.GLOBAL_CONTROL, command_data
|
||||
OverkizCommand.GLOBAL_CONTROL, *command_data
|
||||
)
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["plexapi", "plexwebsocket"],
|
||||
"requirements": [
|
||||
"PlexAPI==4.15.10",
|
||||
"PlexAPI==4.15.11",
|
||||
"plexauth==0.0.6",
|
||||
"plexwebsocket==0.0.14"
|
||||
],
|
||||
|
||||
@@ -258,7 +258,7 @@ class PrometheusMetrics:
|
||||
self, entity_id: str, friendly_name: str | None = None
|
||||
) -> None:
|
||||
"""Remove labelsets matching the given entity id from all metrics."""
|
||||
for metric in self._metrics.values():
|
||||
for metric in list(self._metrics.values()):
|
||||
for sample in cast(list[prometheus_client.Metric], metric.collect())[
|
||||
0
|
||||
].samples:
|
||||
|
||||
@@ -123,7 +123,8 @@ class RainBirdSwitch(CoordinatorEntity[RainbirdUpdateCoordinator], SwitchEntity)
|
||||
|
||||
# The device reflects the old state for a few moments. Update the
|
||||
# state manually and trigger a refresh after a short debounced delay.
|
||||
self.coordinator.data.active_zones.remove(self._zone)
|
||||
if self.is_on:
|
||||
self.coordinator.data.active_zones.remove(self._zone)
|
||||
self.async_write_ha_state()
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
|
||||
@@ -715,6 +715,7 @@ class Statistics(Base, StatisticsBase):
|
||||
"start_ts",
|
||||
unique=True,
|
||||
),
|
||||
_DEFAULT_TABLE_ARGS,
|
||||
)
|
||||
__tablename__ = TABLE_STATISTICS
|
||||
|
||||
@@ -732,6 +733,7 @@ class StatisticsShortTerm(Base, StatisticsBase):
|
||||
"start_ts",
|
||||
unique=True,
|
||||
),
|
||||
_DEFAULT_TABLE_ARGS,
|
||||
)
|
||||
__tablename__ = TABLE_STATISTICS_SHORT_TERM
|
||||
|
||||
@@ -760,7 +762,10 @@ class StatisticsMeta(Base):
|
||||
class RecorderRuns(Base):
|
||||
"""Representation of recorder run."""
|
||||
|
||||
__table_args__ = (Index("ix_recorder_runs_start_end", "start", "end"),)
|
||||
__table_args__ = (
|
||||
Index("ix_recorder_runs_start_end", "start", "end"),
|
||||
_DEFAULT_TABLE_ARGS,
|
||||
)
|
||||
__tablename__ = TABLE_RECORDER_RUNS
|
||||
run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
|
||||
start: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
|
||||
@@ -789,6 +794,7 @@ class MigrationChanges(Base):
|
||||
"""Representation of migration changes."""
|
||||
|
||||
__tablename__ = TABLE_MIGRATION_CHANGES
|
||||
__table_args__ = (_DEFAULT_TABLE_ARGS,)
|
||||
|
||||
migration_id: Mapped[str] = mapped_column(String(255), primary_key=True)
|
||||
version: Mapped[int] = mapped_column(SmallInteger)
|
||||
@@ -798,6 +804,8 @@ class SchemaChanges(Base):
|
||||
"""Representation of schema version changes."""
|
||||
|
||||
__tablename__ = TABLE_SCHEMA_CHANGES
|
||||
__table_args__ = (_DEFAULT_TABLE_ARGS,)
|
||||
|
||||
change_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
|
||||
schema_version: Mapped[int | None] = mapped_column(Integer)
|
||||
changed: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
|
||||
@@ -816,6 +824,8 @@ class StatisticsRuns(Base):
|
||||
"""Representation of statistics run."""
|
||||
|
||||
__tablename__ = TABLE_STATISTICS_RUNS
|
||||
__table_args__ = (_DEFAULT_TABLE_ARGS,)
|
||||
|
||||
run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
|
||||
start: Mapped[datetime] = mapped_column(DATETIME_TYPE, index=True)
|
||||
|
||||
|
||||
@@ -46,7 +46,6 @@ class ReolinkVODMediaSource(MediaSource):
|
||||
"""Initialize ReolinkVODMediaSource."""
|
||||
super().__init__(DOMAIN)
|
||||
self.hass = hass
|
||||
self.data: dict[str, ReolinkData] = hass.data[DOMAIN]
|
||||
|
||||
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
|
||||
"""Resolve media to a url."""
|
||||
@@ -57,7 +56,8 @@ class ReolinkVODMediaSource(MediaSource):
|
||||
_, config_entry_id, channel_str, stream_res, filename = identifier
|
||||
channel = int(channel_str)
|
||||
|
||||
host = self.data[config_entry_id].host
|
||||
data: dict[str, ReolinkData] = self.hass.data[DOMAIN]
|
||||
host = data[config_entry_id].host
|
||||
|
||||
vod_type = VodRequestType.RTMP
|
||||
if host.api.is_nvr:
|
||||
@@ -130,7 +130,8 @@ class ReolinkVODMediaSource(MediaSource):
|
||||
if config_entry.state != ConfigEntryState.LOADED:
|
||||
continue
|
||||
channels: list[str] = []
|
||||
host = self.data[config_entry.entry_id].host
|
||||
data: dict[str, ReolinkData] = self.hass.data[DOMAIN]
|
||||
host = data[config_entry.entry_id].host
|
||||
entities = er.async_entries_for_config_entry(
|
||||
entity_reg, config_entry.entry_id
|
||||
)
|
||||
@@ -187,7 +188,8 @@ class ReolinkVODMediaSource(MediaSource):
|
||||
self, config_entry_id: str, channel: int
|
||||
) -> BrowseMediaSource:
|
||||
"""Allow the user to select the high or low playback resolution, (low loads faster)."""
|
||||
host = self.data[config_entry_id].host
|
||||
data: dict[str, ReolinkData] = self.hass.data[DOMAIN]
|
||||
host = data[config_entry_id].host
|
||||
|
||||
main_enc = await host.api.get_encoding(channel, "main")
|
||||
if main_enc == "h265":
|
||||
@@ -236,7 +238,8 @@ class ReolinkVODMediaSource(MediaSource):
|
||||
self, config_entry_id: str, channel: int, stream: str
|
||||
) -> BrowseMediaSource:
|
||||
"""Return all days on which recordings are available for a reolink camera."""
|
||||
host = self.data[config_entry_id].host
|
||||
data: dict[str, ReolinkData] = self.hass.data[DOMAIN]
|
||||
host = data[config_entry_id].host
|
||||
|
||||
# We want today of the camera, not necessarily today of the server
|
||||
now = host.api.time() or await host.api.async_get_time()
|
||||
@@ -288,7 +291,8 @@ class ReolinkVODMediaSource(MediaSource):
|
||||
day: int,
|
||||
) -> BrowseMediaSource:
|
||||
"""Return all recording files on a specific day of a Reolink camera."""
|
||||
host = self.data[config_entry_id].host
|
||||
data: dict[str, ReolinkData] = self.hass.data[DOMAIN]
|
||||
host = data[config_entry_id].host
|
||||
|
||||
start = dt.datetime(year, month, day, hour=0, minute=0, second=0)
|
||||
end = dt.datetime(year, month, day, hour=23, minute=59, second=59)
|
||||
|
||||
@@ -13,5 +13,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ring",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["ring_doorbell"],
|
||||
"requirements": ["ring-doorbell[listen]==0.8.8"]
|
||||
"requirements": ["ring-doorbell[listen]==0.8.9"]
|
||||
}
|
||||
|
||||
@@ -38,7 +38,9 @@ from homeassistant.helpers.storage import Store
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import (
|
||||
CONF_CONCURRENCY,
|
||||
DATA_COORDINATOR,
|
||||
DEFAULT_CONCURRENCY,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DOMAIN,
|
||||
EVENTS_COORDINATOR,
|
||||
@@ -85,7 +87,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
async def _async_setup_local_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
data = entry.data
|
||||
risco = RiscoLocal(data[CONF_HOST], data[CONF_PORT], data[CONF_PIN])
|
||||
concurrency = entry.options.get(CONF_CONCURRENCY, DEFAULT_CONCURRENCY)
|
||||
risco = RiscoLocal(
|
||||
data[CONF_HOST], data[CONF_PORT], data[CONF_PIN], concurrency=concurrency
|
||||
)
|
||||
|
||||
try:
|
||||
await risco.connect()
|
||||
@@ -96,7 +101,7 @@ async def _async_setup_local_entry(hass: HomeAssistant, entry: ConfigEntry) -> b
|
||||
return False
|
||||
|
||||
async def _error(error: Exception) -> None:
|
||||
_LOGGER.error("Error in Risco library: %s", error)
|
||||
_LOGGER.error("Error in Risco library", exc_info=error)
|
||||
|
||||
entry.async_on_unload(risco.add_error_handler(_error))
|
||||
|
||||
|
||||
@@ -35,8 +35,10 @@ from .const import (
|
||||
CONF_CODE_ARM_REQUIRED,
|
||||
CONF_CODE_DISARM_REQUIRED,
|
||||
CONF_COMMUNICATION_DELAY,
|
||||
CONF_CONCURRENCY,
|
||||
CONF_HA_STATES_TO_RISCO,
|
||||
CONF_RISCO_STATES_TO_HA,
|
||||
DEFAULT_ADVANCED_OPTIONS,
|
||||
DEFAULT_OPTIONS,
|
||||
DOMAIN,
|
||||
MAX_COMMUNICATION_DELAY,
|
||||
@@ -225,11 +227,8 @@ class RiscoOptionsFlowHandler(OptionsFlow):
|
||||
self._data = {**DEFAULT_OPTIONS, **config_entry.options}
|
||||
|
||||
def _options_schema(self) -> vol.Schema:
|
||||
return vol.Schema(
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_SCAN_INTERVAL, default=self._data[CONF_SCAN_INTERVAL]
|
||||
): int,
|
||||
vol.Required(
|
||||
CONF_CODE_ARM_REQUIRED, default=self._data[CONF_CODE_ARM_REQUIRED]
|
||||
): bool,
|
||||
@@ -239,6 +238,19 @@ class RiscoOptionsFlowHandler(OptionsFlow):
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
if self.show_advanced_options:
|
||||
self._data = {**DEFAULT_ADVANCED_OPTIONS, **self._data}
|
||||
schema = schema.extend(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_SCAN_INTERVAL, default=self._data[CONF_SCAN_INTERVAL]
|
||||
): int,
|
||||
vol.Required(
|
||||
CONF_CONCURRENCY, default=self._data[CONF_CONCURRENCY]
|
||||
): int,
|
||||
}
|
||||
)
|
||||
return schema
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
|
||||
@@ -14,6 +14,7 @@ DATA_COORDINATOR = "risco"
|
||||
EVENTS_COORDINATOR = "risco_events"
|
||||
|
||||
DEFAULT_SCAN_INTERVAL = 30
|
||||
DEFAULT_CONCURRENCY = 4
|
||||
|
||||
TYPE_LOCAL = "local"
|
||||
|
||||
@@ -25,6 +26,7 @@ CONF_CODE_DISARM_REQUIRED = "code_disarm_required"
|
||||
CONF_RISCO_STATES_TO_HA = "risco_states_to_ha"
|
||||
CONF_HA_STATES_TO_RISCO = "ha_states_to_risco"
|
||||
CONF_COMMUNICATION_DELAY = "communication_delay"
|
||||
CONF_CONCURRENCY = "concurrency"
|
||||
|
||||
RISCO_GROUPS = ["A", "B", "C", "D"]
|
||||
RISCO_ARM = "arm"
|
||||
@@ -44,9 +46,13 @@ DEFAULT_HA_STATES_TO_RISCO = {
|
||||
}
|
||||
|
||||
DEFAULT_OPTIONS = {
|
||||
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
|
||||
CONF_CODE_ARM_REQUIRED: False,
|
||||
CONF_CODE_DISARM_REQUIRED: False,
|
||||
CONF_RISCO_STATES_TO_HA: DEFAULT_RISCO_STATES_TO_HA,
|
||||
CONF_HA_STATES_TO_RISCO: DEFAULT_HA_STATES_TO_RISCO,
|
||||
}
|
||||
|
||||
DEFAULT_ADVANCED_OPTIONS = {
|
||||
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
|
||||
CONF_CONCURRENCY: DEFAULT_CONCURRENCY,
|
||||
}
|
||||
|
||||
@@ -36,7 +36,8 @@
|
||||
"init": {
|
||||
"title": "Configure options",
|
||||
"data": {
|
||||
"scan_interval": "How often to poll Risco (in seconds)",
|
||||
"scan_interval": "How often to poll Risco Cloud (in seconds)",
|
||||
"concurrency": "Maximum concurrent requests in Risco local",
|
||||
"code_arm_required": "Require PIN to arm",
|
||||
"code_disarm_required": "Require PIN to disarm"
|
||||
}
|
||||
|
||||
@@ -5,6 +5,6 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/romy",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["romy==0.0.7"],
|
||||
"requirements": ["romy==0.0.10"],
|
||||
"zeroconf": ["_aicu-http._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/roomba",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["paho_mqtt", "roombapy"],
|
||||
"requirements": ["roombapy==1.6.13"],
|
||||
"requirements": ["roombapy==1.8.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_amzn-alexa._tcp.local.",
|
||||
|
||||
@@ -54,7 +54,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
vol.Optional(CONF_HOUSE_NUMBER_SUFFIX, default=""): cv.string,
|
||||
vol.Optional(CONF_NAME, default="Rova"): cv.string,
|
||||
vol.Optional(CONF_MONITORED_CONDITIONS, default=["bio"]): vol.All(
|
||||
cv.ensure_list, [vol.In(SENSOR_TYPES)]
|
||||
cv.ensure_list, [vol.In(["bio", "paper", "plastic", "residual"])]
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -234,3 +234,5 @@ DEVICES_WITHOUT_FIRMWARE_CHANGELOG = (
|
||||
)
|
||||
|
||||
CONF_GEN = "gen"
|
||||
|
||||
SHELLY_PLUS_RGBW_CHANNELS = 4
|
||||
|
||||
@@ -14,6 +14,7 @@ from homeassistant.components.light import (
|
||||
ATTR_RGB_COLOR,
|
||||
ATTR_RGBW_COLOR,
|
||||
ATTR_TRANSITION,
|
||||
DOMAIN as LIGHT_DOMAIN,
|
||||
ColorMode,
|
||||
LightEntity,
|
||||
LightEntityFeature,
|
||||
@@ -34,12 +35,14 @@ from .const import (
|
||||
RGBW_MODELS,
|
||||
RPC_MIN_TRANSITION_TIME_SEC,
|
||||
SHBLB_1_RGB_EFFECTS,
|
||||
SHELLY_PLUS_RGBW_CHANNELS,
|
||||
STANDARD_RGB_EFFECTS,
|
||||
)
|
||||
from .coordinator import ShellyBlockCoordinator, ShellyRpcCoordinator, get_entry_data
|
||||
from .entity import ShellyBlockEntity, ShellyRpcEntity
|
||||
from .utils import (
|
||||
async_remove_shelly_entity,
|
||||
async_remove_shelly_rpc_entities,
|
||||
brightness_to_percentage,
|
||||
get_device_entry_gen,
|
||||
get_rpc_key_ids,
|
||||
@@ -118,14 +121,28 @@ def async_setup_rpc_entry(
|
||||
return
|
||||
|
||||
if light_key_ids := get_rpc_key_ids(coordinator.device.status, "light"):
|
||||
# Light mode remove RGB & RGBW entities, add light entities
|
||||
async_remove_shelly_rpc_entities(
|
||||
hass, LIGHT_DOMAIN, coordinator.mac, ["rgb:0", "rgbw:0"]
|
||||
)
|
||||
async_add_entities(RpcShellyLight(coordinator, id_) for id_ in light_key_ids)
|
||||
return
|
||||
|
||||
light_keys = [f"light:{i}" for i in range(SHELLY_PLUS_RGBW_CHANNELS)]
|
||||
|
||||
if rgb_key_ids := get_rpc_key_ids(coordinator.device.status, "rgb"):
|
||||
# RGB mode remove light & RGBW entities, add RGB entity
|
||||
async_remove_shelly_rpc_entities(
|
||||
hass, LIGHT_DOMAIN, coordinator.mac, [*light_keys, "rgbw:0"]
|
||||
)
|
||||
async_add_entities(RpcShellyRgbLight(coordinator, id_) for id_ in rgb_key_ids)
|
||||
return
|
||||
|
||||
if rgbw_key_ids := get_rpc_key_ids(coordinator.device.status, "rgbw"):
|
||||
# RGBW mode remove light & RGB entities, add RGBW entity
|
||||
async_remove_shelly_rpc_entities(
|
||||
hass, LIGHT_DOMAIN, coordinator.mac, [*light_keys, "rgb:0"]
|
||||
)
|
||||
async_add_entities(RpcShellyRgbwLight(coordinator, id_) for id_ in rgbw_key_ids)
|
||||
|
||||
|
||||
|
||||
@@ -222,7 +222,7 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity):
|
||||
) -> None:
|
||||
"""Initialize update entity."""
|
||||
super().__init__(coordinator, key, attribute, description)
|
||||
self._ota_in_progress: bool = False
|
||||
self._ota_in_progress: bool | int = False
|
||||
self._attr_release_url = get_release_url(
|
||||
coordinator.device.gen, coordinator.model, description.beta
|
||||
)
|
||||
@@ -237,14 +237,13 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity):
|
||||
@callback
|
||||
def _ota_progress_callback(self, event: dict[str, Any]) -> None:
|
||||
"""Handle device OTA progress."""
|
||||
if self._ota_in_progress:
|
||||
if self.in_progress is not False:
|
||||
event_type = event["event"]
|
||||
if event_type == OTA_BEGIN:
|
||||
self._attr_in_progress = 0
|
||||
self._ota_in_progress = 0
|
||||
elif event_type == OTA_PROGRESS:
|
||||
self._attr_in_progress = event["progress_percent"]
|
||||
self._ota_in_progress = event["progress_percent"]
|
||||
elif event_type in (OTA_ERROR, OTA_SUCCESS):
|
||||
self._attr_in_progress = False
|
||||
self._ota_in_progress = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -262,6 +261,11 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity):
|
||||
|
||||
return self.installed_version
|
||||
|
||||
@property
|
||||
def in_progress(self) -> bool | int:
|
||||
"""Update installation in progress."""
|
||||
return self._ota_in_progress
|
||||
|
||||
async def async_install(
|
||||
self, version: str | None, backup: bool, **kwargs: Any
|
||||
) -> None:
|
||||
@@ -292,7 +296,7 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity):
|
||||
await self.coordinator.async_shutdown_device_and_start_reauth()
|
||||
else:
|
||||
self._ota_in_progress = True
|
||||
LOGGER.debug("OTA update call successful")
|
||||
LOGGER.info("OTA update call for %s successful", self.coordinator.name)
|
||||
|
||||
|
||||
class RpcSleepingUpdateEntity(
|
||||
|
||||
@@ -488,3 +488,15 @@ async def async_shutdown_device(device: BlockDevice | RpcDevice) -> None:
|
||||
await device.shutdown()
|
||||
if isinstance(device, BlockDevice):
|
||||
device.shutdown()
|
||||
|
||||
|
||||
@callback
|
||||
def async_remove_shelly_rpc_entities(
|
||||
hass: HomeAssistant, domain: str, mac: str, keys: list[str]
|
||||
) -> None:
|
||||
"""Remove RPC based Shelly entity."""
|
||||
entity_reg = er_async_get(hass)
|
||||
for key in keys:
|
||||
if entity_id := entity_reg.async_get_entity_id(domain, DOMAIN, f"{mac}-{key}"):
|
||||
LOGGER.debug("Removing entity: %s", entity_id)
|
||||
entity_reg.async_remove(entity_id)
|
||||
|
||||
@@ -45,7 +45,7 @@ class SnapcastConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
except OSError:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
await client.stop()
|
||||
client.stop()
|
||||
return self.async_create_entry(title=DEFAULT_TITLE, data=user_input)
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=SNAPCAST_SCHEMA, errors=errors
|
||||
|
||||
@@ -5,8 +5,19 @@ from __future__ import annotations
|
||||
import binascii
|
||||
import logging
|
||||
|
||||
from pysnmp.entity import config as cfg
|
||||
from pysnmp.entity.rfc3413.oneliner import cmdgen
|
||||
from pysnmp.error import PySnmpError
|
||||
from pysnmp.hlapi.asyncio import (
|
||||
CommunityData,
|
||||
ContextData,
|
||||
ObjectIdentity,
|
||||
ObjectType,
|
||||
SnmpEngine,
|
||||
Udp6TransportTarget,
|
||||
UdpTransportTarget,
|
||||
UsmUserData,
|
||||
bulkWalkCmd,
|
||||
isEndOfMib,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.device_tracker import (
|
||||
@@ -24,7 +35,13 @@ from .const import (
|
||||
CONF_BASEOID,
|
||||
CONF_COMMUNITY,
|
||||
CONF_PRIV_KEY,
|
||||
DEFAULT_AUTH_PROTOCOL,
|
||||
DEFAULT_COMMUNITY,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_PRIV_PROTOCOL,
|
||||
DEFAULT_TIMEOUT,
|
||||
DEFAULT_VERSION,
|
||||
SNMP_VERSIONS,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -40,9 +57,12 @@ PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend(
|
||||
)
|
||||
|
||||
|
||||
def get_scanner(hass: HomeAssistant, config: ConfigType) -> SnmpScanner | None:
|
||||
async def async_get_scanner(
|
||||
hass: HomeAssistant, config: ConfigType
|
||||
) -> SnmpScanner | None:
|
||||
"""Validate the configuration and return an SNMP scanner."""
|
||||
scanner = SnmpScanner(config[DOMAIN])
|
||||
await scanner.async_init()
|
||||
|
||||
return scanner if scanner.success_init else None
|
||||
|
||||
@@ -51,39 +71,75 @@ class SnmpScanner(DeviceScanner):
|
||||
"""Queries any SNMP capable Access Point for connected devices."""
|
||||
|
||||
def __init__(self, config):
|
||||
"""Initialize the scanner."""
|
||||
"""Initialize the scanner and test the target device."""
|
||||
host = config[CONF_HOST]
|
||||
community = config[CONF_COMMUNITY]
|
||||
baseoid = config[CONF_BASEOID]
|
||||
authkey = config.get(CONF_AUTH_KEY)
|
||||
authproto = DEFAULT_AUTH_PROTOCOL
|
||||
privkey = config.get(CONF_PRIV_KEY)
|
||||
privproto = DEFAULT_PRIV_PROTOCOL
|
||||
|
||||
self.snmp = cmdgen.CommandGenerator()
|
||||
try:
|
||||
# Try IPv4 first.
|
||||
target = UdpTransportTarget((host, DEFAULT_PORT), timeout=DEFAULT_TIMEOUT)
|
||||
except PySnmpError:
|
||||
# Then try IPv6.
|
||||
try:
|
||||
target = Udp6TransportTarget(
|
||||
(host, DEFAULT_PORT), timeout=DEFAULT_TIMEOUT
|
||||
)
|
||||
except PySnmpError as err:
|
||||
_LOGGER.error("Invalid SNMP host: %s", err)
|
||||
return
|
||||
|
||||
self.host = cmdgen.UdpTransportTarget((config[CONF_HOST], 161))
|
||||
if CONF_AUTH_KEY not in config or CONF_PRIV_KEY not in config:
|
||||
self.auth = cmdgen.CommunityData(config[CONF_COMMUNITY])
|
||||
if authkey is not None or privkey is not None:
|
||||
if not authkey:
|
||||
authproto = "none"
|
||||
if not privkey:
|
||||
privproto = "none"
|
||||
|
||||
request_args = [
|
||||
SnmpEngine(),
|
||||
UsmUserData(
|
||||
community,
|
||||
authKey=authkey or None,
|
||||
privKey=privkey or None,
|
||||
authProtocol=authproto,
|
||||
privProtocol=privproto,
|
||||
),
|
||||
target,
|
||||
ContextData(),
|
||||
]
|
||||
else:
|
||||
self.auth = cmdgen.UsmUserData(
|
||||
config[CONF_COMMUNITY],
|
||||
config[CONF_AUTH_KEY],
|
||||
config[CONF_PRIV_KEY],
|
||||
authProtocol=cfg.usmHMACSHAAuthProtocol,
|
||||
privProtocol=cfg.usmAesCfb128Protocol,
|
||||
)
|
||||
self.baseoid = cmdgen.MibVariable(config[CONF_BASEOID])
|
||||
self.last_results = []
|
||||
request_args = [
|
||||
SnmpEngine(),
|
||||
CommunityData(community, mpModel=SNMP_VERSIONS[DEFAULT_VERSION]),
|
||||
target,
|
||||
ContextData(),
|
||||
]
|
||||
|
||||
# Test the router is accessible
|
||||
data = self.get_snmp_data()
|
||||
self.request_args = request_args
|
||||
self.baseoid = baseoid
|
||||
self.last_results = []
|
||||
self.success_init = False
|
||||
|
||||
async def async_init(self):
|
||||
"""Make a one-off read to check if the target device is reachable and readable."""
|
||||
data = await self.async_get_snmp_data()
|
||||
self.success_init = data is not None
|
||||
|
||||
def scan_devices(self):
|
||||
async def async_scan_devices(self):
|
||||
"""Scan for new devices and return a list with found device IDs."""
|
||||
self._update_info()
|
||||
await self._async_update_info()
|
||||
return [client["mac"] for client in self.last_results if client.get("mac")]
|
||||
|
||||
def get_device_name(self, device):
|
||||
async def async_get_device_name(self, device):
|
||||
"""Return the name of the given device or None if we don't know."""
|
||||
# We have no names
|
||||
return None
|
||||
|
||||
def _update_info(self):
|
||||
async def _async_update_info(self):
|
||||
"""Ensure the information from the device is up to date.
|
||||
|
||||
Return boolean if scanning successful.
|
||||
@@ -91,38 +147,42 @@ class SnmpScanner(DeviceScanner):
|
||||
if not self.success_init:
|
||||
return False
|
||||
|
||||
if not (data := self.get_snmp_data()):
|
||||
if not (data := await self.async_get_snmp_data()):
|
||||
return False
|
||||
|
||||
self.last_results = data
|
||||
return True
|
||||
|
||||
def get_snmp_data(self):
|
||||
async def async_get_snmp_data(self):
|
||||
"""Fetch MAC addresses from access point via SNMP."""
|
||||
devices = []
|
||||
|
||||
errindication, errstatus, errindex, restable = self.snmp.nextCmd(
|
||||
self.auth, self.host, self.baseoid
|
||||
walker = bulkWalkCmd(
|
||||
*self.request_args,
|
||||
0,
|
||||
50,
|
||||
ObjectType(ObjectIdentity(self.baseoid)),
|
||||
lexicographicMode=False,
|
||||
)
|
||||
async for errindication, errstatus, errindex, res in walker:
|
||||
if errindication:
|
||||
_LOGGER.error("SNMPLIB error: %s", errindication)
|
||||
return
|
||||
if errstatus:
|
||||
_LOGGER.error(
|
||||
"SNMP error: %s at %s",
|
||||
errstatus.prettyPrint(),
|
||||
errindex and res[int(errindex) - 1][0] or "?",
|
||||
)
|
||||
return
|
||||
|
||||
if errindication:
|
||||
_LOGGER.error("SNMPLIB error: %s", errindication)
|
||||
return
|
||||
if errstatus:
|
||||
_LOGGER.error(
|
||||
"SNMP error: %s at %s",
|
||||
errstatus.prettyPrint(),
|
||||
errindex and restable[int(errindex) - 1][0] or "?",
|
||||
)
|
||||
return
|
||||
|
||||
for resrow in restable:
|
||||
for _, val in resrow:
|
||||
try:
|
||||
mac = binascii.hexlify(val.asOctets()).decode("utf-8")
|
||||
except AttributeError:
|
||||
continue
|
||||
_LOGGER.debug("Found MAC address: %s", mac)
|
||||
mac = ":".join([mac[i : i + 2] for i in range(0, len(mac), 2)])
|
||||
devices.append({"mac": mac})
|
||||
for _oid, value in res:
|
||||
if not isEndOfMib(res):
|
||||
try:
|
||||
mac = binascii.hexlify(value.asOctets()).decode("utf-8")
|
||||
except AttributeError:
|
||||
continue
|
||||
_LOGGER.debug("Found MAC address: %s", mac)
|
||||
mac = ":".join([mac[i : i + 2] for i in range(0, len(mac), 2)])
|
||||
devices.append({"mac": mac})
|
||||
return devices
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "snmp",
|
||||
"name": "SNMP",
|
||||
"codeowners": [],
|
||||
"codeowners": ["@nmaggioni"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/snmp",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyasn1", "pysmi", "pysnmp"],
|
||||
|
||||
@@ -270,7 +270,7 @@ class SnmpData:
|
||||
"SNMP OID %s received type=%s and data %s",
|
||||
self._baseoid,
|
||||
type(value),
|
||||
bytes(value),
|
||||
value,
|
||||
)
|
||||
if isinstance(value, NoSuchObject):
|
||||
_LOGGER.error(
|
||||
|
||||
@@ -7,6 +7,7 @@ from contextlib import suppress
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import cast
|
||||
import urllib.parse
|
||||
|
||||
from soco.data_structures import DidlObject
|
||||
from soco.ms_data_structures import MusicServiceItem
|
||||
@@ -60,12 +61,14 @@ def get_thumbnail_url_full(
|
||||
media_content_id,
|
||||
media_content_type,
|
||||
)
|
||||
return getattr(item, "album_art_uri", None)
|
||||
return urllib.parse.unquote(getattr(item, "album_art_uri", ""))
|
||||
|
||||
return get_browse_image_url(
|
||||
media_content_type,
|
||||
media_content_id,
|
||||
media_image_id,
|
||||
return urllib.parse.unquote(
|
||||
get_browse_image_url(
|
||||
media_content_type,
|
||||
media_content_id,
|
||||
media_image_id,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -166,6 +169,7 @@ def build_item_response(
|
||||
payload["idstring"] = "A:ALBUMARTIST/" + "/".join(
|
||||
payload["idstring"].split("/")[2:]
|
||||
)
|
||||
payload["idstring"] = urllib.parse.unquote(payload["idstring"])
|
||||
|
||||
try:
|
||||
search_type = MEDIA_TYPES_TO_SONOS[payload["search_type"]]
|
||||
@@ -201,7 +205,7 @@ def build_item_response(
|
||||
|
||||
if not title:
|
||||
try:
|
||||
title = payload["idstring"].split("/")[1]
|
||||
title = urllib.parse.unquote(payload["idstring"].split("/")[1])
|
||||
except IndexError:
|
||||
title = LIBRARY_TITLES_MAPPING[payload["idstring"]]
|
||||
|
||||
@@ -493,10 +497,24 @@ def get_media(
|
||||
"""Fetch media/album."""
|
||||
search_type = MEDIA_TYPES_TO_SONOS.get(search_type, search_type)
|
||||
|
||||
if search_type == "playlists":
|
||||
# Format is S:TITLE or S:ITEM_ID
|
||||
splits = item_id.split(":")
|
||||
title = splits[1] if len(splits) > 1 else None
|
||||
playlist = next(
|
||||
(
|
||||
p
|
||||
for p in media_library.get_playlists()
|
||||
if (item_id == p.item_id or title == p.title)
|
||||
),
|
||||
None,
|
||||
)
|
||||
return playlist
|
||||
|
||||
if not item_id.startswith("A:ALBUM") and search_type == SONOS_ALBUM:
|
||||
item_id = "A:ALBUMARTIST/" + "/".join(item_id.split("/")[2:])
|
||||
|
||||
search_term = item_id.split("/")[-1]
|
||||
search_term = urllib.parse.unquote(item_id.split("/")[-1])
|
||||
matches = media_library.get_music_library_information(
|
||||
search_type, search_term=search_term, full_album_art_uri=True
|
||||
)
|
||||
|
||||
@@ -626,13 +626,13 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
||||
soco.play_uri(media_id, force_radio=is_radio)
|
||||
elif media_type == MediaType.PLAYLIST:
|
||||
if media_id.startswith("S:"):
|
||||
item = media_browser.get_media(self.media.library, media_id, media_type)
|
||||
soco.play_uri(item.get_uri())
|
||||
return
|
||||
try:
|
||||
playlist = media_browser.get_media(
|
||||
self.media.library, media_id, media_type
|
||||
)
|
||||
else:
|
||||
playlists = soco.get_sonos_playlists(complete_result=True)
|
||||
playlist = next(p for p in playlists if p.title == media_id)
|
||||
except StopIteration:
|
||||
playlist = next((p for p in playlists if p.title == media_id), None)
|
||||
if not playlist:
|
||||
_LOGGER.error('Could not find a Sonos playlist named "%s"', media_id)
|
||||
else:
|
||||
soco.clear_queue()
|
||||
|
||||
@@ -25,10 +25,11 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
partial(speedtest.Speedtest, secure=True)
|
||||
)
|
||||
coordinator = SpeedTestDataCoordinator(hass, config_entry, api)
|
||||
await hass.async_add_executor_job(coordinator.update_servers)
|
||||
except speedtest.SpeedtestException as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
hass.data[DOMAIN] = coordinator
|
||||
|
||||
async def _async_finish_startup(hass: HomeAssistant) -> None:
|
||||
"""Run this only when HA has finished its startup."""
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
@@ -36,8 +37,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
# Don't start a speedtest during startup
|
||||
async_at_started(hass, _async_finish_startup)
|
||||
|
||||
hass.data[DOMAIN] = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
config_entry.async_on_unload(config_entry.add_update_listener(update_listener))
|
||||
|
||||
|
||||
@@ -58,14 +58,14 @@ class StarlinkUpdateCoordinator(DataUpdateCoordinator[StarlinkData]):
|
||||
async def _async_update_data(self) -> StarlinkData:
|
||||
async with asyncio.timeout(4):
|
||||
try:
|
||||
status, location, sleep = await asyncio.gather(
|
||||
self.hass.async_add_executor_job(status_data, self.channel_context),
|
||||
self.hass.async_add_executor_job(
|
||||
location_data, self.channel_context
|
||||
),
|
||||
self.hass.async_add_executor_job(
|
||||
get_sleep_config, self.channel_context
|
||||
),
|
||||
status = await self.hass.async_add_executor_job(
|
||||
status_data, self.channel_context
|
||||
)
|
||||
location = await self.hass.async_add_executor_job(
|
||||
location_data, self.channel_context
|
||||
)
|
||||
sleep = await self.hass.async_add_executor_job(
|
||||
get_sleep_config, self.channel_context
|
||||
)
|
||||
return StarlinkData(location, sleep, *status)
|
||||
except GrpcError as exc:
|
||||
|
||||
@@ -10,6 +10,7 @@ import math
|
||||
from homeassistant.components.time import TimeEntity, TimeEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -62,14 +63,22 @@ class StarlinkTimeEntity(StarlinkEntity, TimeEntity):
|
||||
def _utc_minutes_to_time(utc_minutes: int, timezone: tzinfo) -> time:
|
||||
hour = math.floor(utc_minutes / 60)
|
||||
minute = utc_minutes % 60
|
||||
utc = datetime.now(UTC).replace(hour=hour, minute=minute, second=0, microsecond=0)
|
||||
try:
|
||||
utc = datetime.now(UTC).replace(
|
||||
hour=hour, minute=minute, second=0, microsecond=0
|
||||
)
|
||||
except ValueError as exc:
|
||||
raise HomeAssistantError from exc
|
||||
return utc.astimezone(timezone).time()
|
||||
|
||||
|
||||
def _time_to_utc_minutes(t: time, timezone: tzinfo) -> int:
|
||||
zoned_time = datetime.now(timezone).replace(
|
||||
hour=t.hour, minute=t.minute, second=0, microsecond=0
|
||||
)
|
||||
try:
|
||||
zoned_time = datetime.now(timezone).replace(
|
||||
hour=t.hour, minute=t.minute, second=0, microsecond=0
|
||||
)
|
||||
except ValueError as exc:
|
||||
raise HomeAssistantError from exc
|
||||
utc_time = zoned_time.astimezone(UTC).time()
|
||||
return (utc_time.hour * 60) + utc_time.minute
|
||||
|
||||
|
||||
@@ -105,6 +105,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
if (
|
||||
SynoSurveillanceStation.INFO_API_KEY in available_apis
|
||||
and SynoSurveillanceStation.HOME_MODE_API_KEY in available_apis
|
||||
and api.surveillance_station is not None
|
||||
):
|
||||
coordinator_switches = SynologyDSMSwitchUpdateCoordinator(hass, entry, api)
|
||||
await coordinator_switches.async_config_entry_first_refresh()
|
||||
|
||||
@@ -116,7 +116,7 @@ class SynoDSMSecurityBinarySensor(SynoDSMBinarySensor):
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return bool(self._api.security)
|
||||
return bool(self._api.security) and super().available
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, str]:
|
||||
|
||||
@@ -108,7 +108,7 @@ class SynoDSMCamera(SynologyDSMBaseEntity[SynologyDSMCameraUpdateCoordinator], C
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return the availability of the camera."""
|
||||
return self.camera_data.is_enabled and self.coordinator.last_update_success
|
||||
return self.camera_data.is_enabled and super().available
|
||||
|
||||
@property
|
||||
def is_recording(self) -> bool:
|
||||
|
||||
@@ -286,18 +286,7 @@ class SynoApi:
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update function for updating API information."""
|
||||
try:
|
||||
await self._update()
|
||||
except SYNOLOGY_CONNECTION_EXCEPTIONS as err:
|
||||
LOGGER.debug(
|
||||
"Connection error during update of '%s' with exception: %s",
|
||||
self._entry.unique_id,
|
||||
err,
|
||||
)
|
||||
LOGGER.warning(
|
||||
"Connection error during update, fallback by reloading the entry"
|
||||
)
|
||||
await self._hass.config_entries.async_reload(self._entry.entry_id)
|
||||
await self._update()
|
||||
|
||||
async def _update(self) -> None:
|
||||
"""Update function for updating API information."""
|
||||
|
||||
@@ -75,7 +75,7 @@
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"reboot": "mdi:reboot",
|
||||
"reboot": "mdi:restart",
|
||||
"shutdown": "mdi:power"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -366,7 +366,7 @@ class SynoDSMUtilSensor(SynoDSMSensor):
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return bool(self._api.utilisation)
|
||||
return bool(self._api.utilisation) and super().available
|
||||
|
||||
|
||||
class SynoDSMStorageSensor(SynologyDSMDeviceEntity, SynoDSMSensor):
|
||||
|
||||
@@ -98,7 +98,7 @@ class SynoDSMSurveillanceHomeModeToggle(
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return bool(self._api.surveillance_station)
|
||||
return bool(self._api.surveillance_station) and super().available
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
|
||||
@@ -59,7 +59,7 @@ class SynoDSMUpdateEntity(
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return bool(self._api.upgrade)
|
||||
return bool(self._api.upgrade) and super().available
|
||||
|
||||
@property
|
||||
def installed_version(self) -> str | None:
|
||||
|
||||
@@ -10,6 +10,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["systembridgeconnector"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["systembridgeconnector==4.0.3"],
|
||||
"requirements": ["systembridgeconnector==4.0.3", "systembridgemodels==4.0.4"],
|
||||
"zeroconf": ["_system-bridge._tcp.local."]
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user