forked from home-assistant/core
Compare commits
80 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3759adcf2b | ||
|
|
17fd03d8fd | ||
|
|
972aab3c26 | ||
|
|
c5adee6821 | ||
|
|
f98f7f2022 | ||
|
|
c6b68ed916 | ||
|
|
343c2672bb | ||
|
|
31a4c649ff | ||
|
|
300d2a0881 | ||
|
|
1253f7f85d | ||
|
|
6dbe5942ed | ||
|
|
e41cb1e020 | ||
|
|
44a4f4115f | ||
|
|
db148b65e5 | ||
|
|
89dfd4b162 | ||
|
|
a24463f7ea | ||
|
|
498da3bba3 | ||
|
|
8cf6f50193 | ||
|
|
8b0e10d8a5 | ||
|
|
2ee4cd02c7 | ||
|
|
d63569da82 | ||
|
|
bc7cf1f649 | ||
|
|
d6b1a7ca68 | ||
|
|
af248fa386 | ||
|
|
fa56e3633d | ||
|
|
e886d37124 | ||
|
|
f4ed7720de | ||
|
|
70473df2fe | ||
|
|
0d31d94532 | ||
|
|
4678466560 | ||
|
|
a886c6110d | ||
|
|
33f282af46 | ||
|
|
2f3232f087 | ||
|
|
54ff6ddd41 | ||
|
|
eef79e2912 | ||
|
|
93aad108a7 | ||
|
|
792ebbb600 | ||
|
|
c47774e273 | ||
|
|
22bdeab1e7 | ||
|
|
ca05cde6ba | ||
|
|
1e59ce19f5 | ||
|
|
7bdada7898 | ||
|
|
06a2fe94d3 | ||
|
|
854b0dbb2d | ||
|
|
bd8424d184 | ||
|
|
b50e3d5ce7 | ||
|
|
39c6a57c35 | ||
|
|
c0482bdbfd | ||
|
|
d9a41d10ff | ||
|
|
b401f16583 | ||
|
|
373634cc50 | ||
|
|
10fb3035d6 | ||
|
|
f3136c811c | ||
|
|
085eee88c9 | ||
|
|
6a3b74adf6 | ||
|
|
5c512ad5cb | ||
|
|
65cb82765b | ||
|
|
9f8fe7fca6 | ||
|
|
73536c07d7 | ||
|
|
e0ca5bafda | ||
|
|
ddc8c0a3b7 | ||
|
|
ff687a8248 | ||
|
|
0d9330c39e | ||
|
|
69e8f5bb98 | ||
|
|
6a8a97b57c | ||
|
|
f5e0363117 | ||
|
|
f1bcfedf84 | ||
|
|
0e985284c9 | ||
|
|
12e6f143a4 | ||
|
|
2b77db2597 | ||
|
|
a4297c0411 | ||
|
|
54b94c4826 | ||
|
|
b28b204b86 | ||
|
|
8558ea2f9a | ||
|
|
01b3da1554 | ||
|
|
880590da64 | ||
|
|
b74bd1aa0a | ||
|
|
caa79d8462 | ||
|
|
9295cc4df9 | ||
|
|
24e148ab8e |
@@ -26,7 +26,7 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up BAF fan auto comfort."""
|
||||
data: BAFData = hass.data[DOMAIN][entry.entry_id]
|
||||
if data.device.has_fan:
|
||||
if data.device.has_fan and data.device.has_auto_comfort:
|
||||
async_add_entities(
|
||||
[BAFAutoComfort(data.device, f"{data.device.name} Auto Comfort")]
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Big Ass Fans",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/baf",
|
||||
"requirements": ["aiobafi6==0.3.0"],
|
||||
"requirements": ["aiobafi6==0.5.0"],
|
||||
"codeowners": ["@bdraco", "@jfroy"],
|
||||
"iot_class": "local_push",
|
||||
"zeroconf": [
|
||||
|
||||
@@ -36,27 +36,7 @@ class BAFNumberDescription(NumberEntityDescription, BAFNumberDescriptionMixin):
|
||||
"""Class describing BAF sensor entities."""
|
||||
|
||||
|
||||
FAN_NUMBER_DESCRIPTIONS = (
|
||||
BAFNumberDescription(
|
||||
key="return_to_auto_timeout",
|
||||
name="Return to Auto Timeout",
|
||||
min_value=ONE_MIN_SECS,
|
||||
max_value=HALF_DAY_SECS,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
unit_of_measurement=TIME_SECONDS,
|
||||
value_fn=lambda device: cast(Optional[int], device.return_to_auto_timeout),
|
||||
mode=NumberMode.SLIDER,
|
||||
),
|
||||
BAFNumberDescription(
|
||||
key="motion_sense_timeout",
|
||||
name="Motion Sense Timeout",
|
||||
min_value=ONE_MIN_SECS,
|
||||
max_value=ONE_DAY_SECS,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
unit_of_measurement=TIME_SECONDS,
|
||||
value_fn=lambda device: cast(Optional[int], device.motion_sense_timeout),
|
||||
mode=NumberMode.SLIDER,
|
||||
),
|
||||
AUTO_COMFORT_NUMBER_DESCRIPTIONS = (
|
||||
BAFNumberDescription(
|
||||
key="comfort_min_speed",
|
||||
name="Auto Comfort Minimum Speed",
|
||||
@@ -86,6 +66,29 @@ FAN_NUMBER_DESCRIPTIONS = (
|
||||
),
|
||||
)
|
||||
|
||||
FAN_NUMBER_DESCRIPTIONS = (
|
||||
BAFNumberDescription(
|
||||
key="return_to_auto_timeout",
|
||||
name="Return to Auto Timeout",
|
||||
min_value=ONE_MIN_SECS,
|
||||
max_value=HALF_DAY_SECS,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
unit_of_measurement=TIME_SECONDS,
|
||||
value_fn=lambda device: cast(Optional[int], device.return_to_auto_timeout),
|
||||
mode=NumberMode.SLIDER,
|
||||
),
|
||||
BAFNumberDescription(
|
||||
key="motion_sense_timeout",
|
||||
name="Motion Sense Timeout",
|
||||
min_value=ONE_MIN_SECS,
|
||||
max_value=ONE_DAY_SECS,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
unit_of_measurement=TIME_SECONDS,
|
||||
value_fn=lambda device: cast(Optional[int], device.motion_sense_timeout),
|
||||
mode=NumberMode.SLIDER,
|
||||
),
|
||||
)
|
||||
|
||||
LIGHT_NUMBER_DESCRIPTIONS = (
|
||||
BAFNumberDescription(
|
||||
key="light_return_to_auto_timeout",
|
||||
@@ -125,6 +128,8 @@ async def async_setup_entry(
|
||||
descriptions.extend(FAN_NUMBER_DESCRIPTIONS)
|
||||
if device.has_light:
|
||||
descriptions.extend(LIGHT_NUMBER_DESCRIPTIONS)
|
||||
if device.has_auto_comfort:
|
||||
descriptions.extend(AUTO_COMFORT_NUMBER_DESCRIPTIONS)
|
||||
async_add_entities(BAFNumber(device, description) for description in descriptions)
|
||||
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ class BAFSensorDescription(
|
||||
"""Class describing BAF sensor entities."""
|
||||
|
||||
|
||||
BASE_SENSORS = (
|
||||
AUTO_COMFORT_SENSORS = (
|
||||
BAFSensorDescription(
|
||||
key="temperature",
|
||||
name="Temperature",
|
||||
@@ -103,10 +103,12 @@ async def async_setup_entry(
|
||||
"""Set up BAF fan sensors."""
|
||||
data: BAFData = hass.data[DOMAIN][entry.entry_id]
|
||||
device = data.device
|
||||
sensors_descriptions = list(BASE_SENSORS)
|
||||
sensors_descriptions: list[BAFSensorDescription] = []
|
||||
for description in DEFINED_ONLY_SENSORS:
|
||||
if getattr(device, description.key):
|
||||
sensors_descriptions.append(description)
|
||||
if device.has_auto_comfort:
|
||||
sensors_descriptions.extend(AUTO_COMFORT_SENSORS)
|
||||
if device.has_fan:
|
||||
sensors_descriptions.extend(FAN_SENSORS)
|
||||
async_add_entities(
|
||||
|
||||
@@ -48,13 +48,16 @@ BASE_SWITCHES = [
|
||||
),
|
||||
]
|
||||
|
||||
FAN_SWITCHES = [
|
||||
AUTO_COMFORT_SWITCHES = [
|
||||
BAFSwitchDescription(
|
||||
key="comfort_heat_assist_enable",
|
||||
name="Auto Comfort Heat Assist",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
value_fn=lambda device: cast(Optional[bool], device.comfort_heat_assist_enable),
|
||||
),
|
||||
]
|
||||
|
||||
FAN_SWITCHES = [
|
||||
BAFSwitchDescription(
|
||||
key="fan_beep_enable",
|
||||
name="Beep",
|
||||
@@ -120,6 +123,8 @@ async def async_setup_entry(
|
||||
descriptions.extend(FAN_SWITCHES)
|
||||
if device.has_light:
|
||||
descriptions.extend(LIGHT_SWITCHES)
|
||||
if device.has_auto_comfort:
|
||||
descriptions.extend(AUTO_COMFORT_SWITCHES)
|
||||
async_add_entities(BAFSwitch(device, description) for description in descriptions)
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "bmw_connected_drive",
|
||||
"name": "BMW Connected Drive",
|
||||
"documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive",
|
||||
"requirements": ["bimmer_connected==0.9.3"],
|
||||
"requirements": ["bimmer_connected==0.9.4"],
|
||||
"codeowners": ["@gerard33", "@rikroe"],
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling",
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for WebDav Calendar."""
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
import re
|
||||
@@ -143,15 +142,13 @@ class WebDavCalendarEntity(CalendarEntity):
|
||||
def update(self):
|
||||
"""Update event data."""
|
||||
self.data.update()
|
||||
event = copy.deepcopy(self.data.event)
|
||||
if event is None:
|
||||
self._event = event
|
||||
return
|
||||
(summary, offset) = extract_offset(event.summary, OFFSET)
|
||||
event.summary = summary
|
||||
self._event = event
|
||||
self._event = self.data.event
|
||||
self._attr_extra_state_attributes = {
|
||||
"offset_reached": is_offset_reached(event.start_datetime_local, offset)
|
||||
"offset_reached": is_offset_reached(
|
||||
self._event.start_datetime_local, self.data.offset
|
||||
)
|
||||
if self._event
|
||||
else False
|
||||
}
|
||||
|
||||
|
||||
@@ -165,6 +162,7 @@ class WebDavCalendarData:
|
||||
self.include_all_day = include_all_day
|
||||
self.search = search
|
||||
self.event = None
|
||||
self.offset = None
|
||||
|
||||
async def async_get_events(
|
||||
self, hass: HomeAssistant, start_date: datetime, end_date: datetime
|
||||
@@ -264,13 +262,15 @@ class WebDavCalendarData:
|
||||
return
|
||||
|
||||
# Populate the entity attributes with the event values
|
||||
(summary, offset) = extract_offset(vevent.summary.value, OFFSET)
|
||||
self.event = CalendarEvent(
|
||||
summary=vevent.summary.value,
|
||||
summary=summary,
|
||||
start=vevent.dtstart.value,
|
||||
end=self.get_end_date(vevent),
|
||||
location=self.get_attr_value(vevent, "location"),
|
||||
description=self.get_attr_value(vevent, "description"),
|
||||
)
|
||||
self.offset = offset
|
||||
|
||||
@staticmethod
|
||||
def is_matching(vevent, search):
|
||||
|
||||
@@ -39,7 +39,6 @@ class CloudGoogleConfig(AbstractConfig):
|
||||
self._cur_entity_prefs = self._prefs.google_entity_configs
|
||||
self._cur_default_expose = self._prefs.google_default_expose
|
||||
self._sync_entities_lock = asyncio.Lock()
|
||||
self._sync_on_started = False
|
||||
|
||||
@property
|
||||
def enabled(self):
|
||||
@@ -224,7 +223,7 @@ class CloudGoogleConfig(AbstractConfig):
|
||||
self._cur_entity_prefs = prefs.google_entity_configs
|
||||
self._cur_default_expose = prefs.google_default_expose
|
||||
|
||||
if sync_entities:
|
||||
if sync_entities and self.hass.is_running:
|
||||
await self.async_sync_entities_all()
|
||||
|
||||
@callback
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "DLNA Digital Media Renderer",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/dlna_dmr",
|
||||
"requirements": ["async-upnp-client==0.30.1"],
|
||||
"requirements": ["async-upnp-client==0.31.1"],
|
||||
"dependencies": ["ssdp"],
|
||||
"after_dependencies": ["media_source"],
|
||||
"ssdp": [
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "DLNA Digital Media Server",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/dlna_dms",
|
||||
"requirements": ["async-upnp-client==0.30.1"],
|
||||
"requirements": ["async-upnp-client==0.31.1"],
|
||||
"dependencies": ["ssdp"],
|
||||
"after_dependencies": ["media_source"],
|
||||
"ssdp": [
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from enum import Enum
|
||||
import logging
|
||||
import re
|
||||
from types import MappingProxyType
|
||||
@@ -481,7 +482,10 @@ class ElkEntity(Entity):
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the default attributes of the element."""
|
||||
return {**self._element.as_dict(), **self.initial_attrs()}
|
||||
dict_as_str = {}
|
||||
for key, val in self._element.as_dict().items():
|
||||
dict_as_str[key] = val.value if isinstance(val, Enum) else val
|
||||
return {**dict_as_str, **self.initial_attrs()}
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
|
||||
@@ -25,7 +25,12 @@ from homeassistant.const import STATE_IDLE, STATE_PAUSED, STATE_PLAYING
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import EsphomeEntity, EsphomeEnumMapper, platform_async_setup_entry
|
||||
from . import (
|
||||
EsphomeEntity,
|
||||
EsphomeEnumMapper,
|
||||
esphome_state_property,
|
||||
platform_async_setup_entry,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -54,6 +59,10 @@ _STATES: EsphomeEnumMapper[MediaPlayerState, str] = EsphomeEnumMapper(
|
||||
)
|
||||
|
||||
|
||||
# https://github.com/PyCQA/pylint/issues/3150 for all @esphome_state_property
|
||||
# pylint: disable=invalid-overridden-method
|
||||
|
||||
|
||||
class EsphomeMediaPlayer(
|
||||
EsphomeEntity[MediaPlayerInfo, MediaPlayerEntityState], MediaPlayerEntity
|
||||
):
|
||||
@@ -61,17 +70,17 @@ class EsphomeMediaPlayer(
|
||||
|
||||
_attr_device_class = MediaPlayerDeviceClass.SPEAKER
|
||||
|
||||
@property
|
||||
@esphome_state_property
|
||||
def state(self) -> str | None:
|
||||
"""Return current state."""
|
||||
return _STATES.from_esphome(self._state.state)
|
||||
|
||||
@property
|
||||
@esphome_state_property
|
||||
def is_volume_muted(self) -> bool:
|
||||
"""Return true if volume is muted."""
|
||||
return self._state.muted
|
||||
|
||||
@property
|
||||
@esphome_state_property
|
||||
def volume_level(self) -> float | None:
|
||||
"""Volume level of the media player (0..1)."""
|
||||
return self._state.volume
|
||||
|
||||
@@ -70,6 +70,7 @@ class FeedManager:
|
||||
self._last_entry_timestamp = None
|
||||
self._last_update_successful = False
|
||||
self._has_published_parsed = False
|
||||
self._has_updated_parsed = False
|
||||
self._event_type = EVENT_FEEDREADER
|
||||
self._feed_id = url
|
||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, lambda _: self._update())
|
||||
@@ -122,7 +123,7 @@ class FeedManager:
|
||||
)
|
||||
self._filter_entries()
|
||||
self._publish_new_entries()
|
||||
if self._has_published_parsed:
|
||||
if self._has_published_parsed or self._has_updated_parsed:
|
||||
self._storage.put_timestamp(
|
||||
self._feed_id, self._last_entry_timestamp
|
||||
)
|
||||
@@ -143,7 +144,7 @@ class FeedManager:
|
||||
|
||||
def _update_and_fire_entry(self, entry):
|
||||
"""Update last_entry_timestamp and fire entry."""
|
||||
# Check if the entry has a published date.
|
||||
# Check if the entry has a published or updated date.
|
||||
if "published_parsed" in entry and entry.published_parsed:
|
||||
# We are lucky, `published_parsed` data available, let's make use of
|
||||
# it to publish only new available entries since the last run
|
||||
@@ -151,9 +152,20 @@ class FeedManager:
|
||||
self._last_entry_timestamp = max(
|
||||
entry.published_parsed, self._last_entry_timestamp
|
||||
)
|
||||
elif "updated_parsed" in entry and entry.updated_parsed:
|
||||
# We are lucky, `updated_parsed` data available, let's make use of
|
||||
# it to publish only new available entries since the last run
|
||||
self._has_updated_parsed = True
|
||||
self._last_entry_timestamp = max(
|
||||
entry.updated_parsed, self._last_entry_timestamp
|
||||
)
|
||||
else:
|
||||
self._has_published_parsed = False
|
||||
_LOGGER.debug("No published_parsed info available for entry %s", entry)
|
||||
self._has_updated_parsed = False
|
||||
_LOGGER.debug(
|
||||
"No published_parsed or updated_parsed info available for entry %s",
|
||||
entry,
|
||||
)
|
||||
entry.update({"feed_url": self._url})
|
||||
self._hass.bus.fire(self._event_type, entry)
|
||||
|
||||
@@ -167,9 +179,16 @@ class FeedManager:
|
||||
# Set last entry timestamp as epoch time if not available
|
||||
self._last_entry_timestamp = datetime.utcfromtimestamp(0).timetuple()
|
||||
for entry in self._feed.entries:
|
||||
if self._firstrun or (
|
||||
"published_parsed" in entry
|
||||
and entry.published_parsed > self._last_entry_timestamp
|
||||
if (
|
||||
self._firstrun
|
||||
or (
|
||||
"published_parsed" in entry
|
||||
and entry.published_parsed > self._last_entry_timestamp
|
||||
)
|
||||
or (
|
||||
"updated_parsed" in entry
|
||||
and entry.updated_parsed > self._last_entry_timestamp
|
||||
)
|
||||
):
|
||||
self._update_and_fire_entry(entry)
|
||||
new_entries = True
|
||||
|
||||
@@ -46,6 +46,8 @@ class FibaroCover(FibaroDevice, CoverEntity):
|
||||
self._attr_supported_features = (
|
||||
CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
|
||||
)
|
||||
if "stop" in self.fibaro_device.actions:
|
||||
self._attr_supported_features |= CoverEntityFeature.STOP
|
||||
|
||||
@staticmethod
|
||||
def bound(position):
|
||||
|
||||
@@ -9,7 +9,7 @@ import logging
|
||||
from bleak import BleakScanner
|
||||
from bleak.backends.device import BLEDevice
|
||||
from bleak.backends.scanner import AdvertisementData
|
||||
from fjaraskupan import DEVICE_NAME, Device, State, device_filter
|
||||
from fjaraskupan import Device, State, device_filter
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
@@ -90,7 +90,7 @@ class EntryState:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Fjäråskupan from a config entry."""
|
||||
|
||||
scanner = BleakScanner(filters={"Pattern": DEVICE_NAME, "DuplicateData": True})
|
||||
scanner = BleakScanner(filters={"DuplicateData": True})
|
||||
|
||||
state = EntryState(scanner, {})
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
|
||||
@@ -7,7 +7,7 @@ import async_timeout
|
||||
from bleak import BleakScanner
|
||||
from bleak.backends.device import BLEDevice
|
||||
from bleak.backends.scanner import AdvertisementData
|
||||
from fjaraskupan import DEVICE_NAME, device_filter
|
||||
from fjaraskupan import device_filter
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.config_entry_flow import register_discovery_flow
|
||||
@@ -28,7 +28,7 @@ async def _async_has_devices(hass: HomeAssistant) -> bool:
|
||||
|
||||
async with BleakScanner(
|
||||
detection_callback=detection,
|
||||
filters={"Pattern": DEVICE_NAME, "DuplicateData": True},
|
||||
filters={"DuplicateData": True},
|
||||
):
|
||||
try:
|
||||
async with async_timeout.timeout(CONST_WAIT_TIME):
|
||||
|
||||
@@ -169,7 +169,16 @@ def wifi_entities_list(
|
||||
}
|
||||
for i, network in networks.copy().items():
|
||||
networks[i]["switch_name"] = network["ssid"]
|
||||
if len([j for j, n in networks.items() if n["ssid"] == network["ssid"]]) > 1:
|
||||
if (
|
||||
len(
|
||||
[
|
||||
j
|
||||
for j, n in networks.items()
|
||||
if slugify(n["ssid"]) == slugify(network["ssid"])
|
||||
]
|
||||
)
|
||||
> 1
|
||||
):
|
||||
networks[i]["switch_name"] += f" ({WIFI_STANDARD[i]})"
|
||||
|
||||
_LOGGER.debug("WiFi networks list: %s", networks)
|
||||
|
||||
@@ -96,7 +96,9 @@ SENSOR_TYPES: Final[tuple[FritzSensorEntityDescription, ...]] = (
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suitable=lambda device: device.has_powermeter, # type: ignore[no-any-return]
|
||||
native_value=lambda device: device.voltage / 1000 if device.voltage else 0.0,
|
||||
native_value=lambda device: device.voltage / 1000
|
||||
if getattr(device, "voltage", None)
|
||||
else 0.0,
|
||||
),
|
||||
FritzSensorEntityDescription(
|
||||
key="electric_current",
|
||||
@@ -106,7 +108,7 @@ SENSOR_TYPES: Final[tuple[FritzSensorEntityDescription, ...]] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suitable=lambda device: device.has_powermeter, # type: ignore[no-any-return]
|
||||
native_value=lambda device: device.power / device.voltage
|
||||
if device.power and device.voltage
|
||||
if device.power and getattr(device, "voltage", None)
|
||||
else 0.0,
|
||||
),
|
||||
FritzSensorEntityDescription(
|
||||
|
||||
@@ -460,7 +460,7 @@ async def _async_setup_themes(
|
||||
async def reload_themes(_: ServiceCall) -> None:
|
||||
"""Reload themes."""
|
||||
config = await async_hass_config_yaml(hass)
|
||||
new_themes = config[DOMAIN].get(CONF_THEMES, {})
|
||||
new_themes = config.get(DOMAIN, {}).get(CONF_THEMES, {})
|
||||
hass.data[DATA_THEMES] = new_themes
|
||||
if hass.data[DATA_DEFAULT_THEME] not in new_themes:
|
||||
hass.data[DATA_DEFAULT_THEME] = DEFAULT_THEME
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "frontend",
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"requirements": ["home-assistant-frontend==20220531.0"],
|
||||
"requirements": ["home-assistant-frontend==20220601.0"],
|
||||
"dependencies": [
|
||||
"api",
|
||||
"auth",
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
from collections.abc import Awaitable, Callable
|
||||
import datetime
|
||||
import logging
|
||||
import time
|
||||
from typing import Any, cast
|
||||
|
||||
import aiohttp
|
||||
@@ -50,12 +49,16 @@ class DeviceAuth(AuthImplementation):
|
||||
async def async_resolve_external_data(self, external_data: Any) -> dict:
|
||||
"""Resolve a Google API Credentials object to Home Assistant token."""
|
||||
creds: Credentials = external_data[DEVICE_AUTH_CREDS]
|
||||
delta = creds.token_expiry.replace(tzinfo=datetime.timezone.utc) - dt.utcnow()
|
||||
_LOGGER.debug(
|
||||
"Token expires at %s (in %s)", creds.token_expiry, delta.total_seconds()
|
||||
)
|
||||
return {
|
||||
"access_token": creds.access_token,
|
||||
"refresh_token": creds.refresh_token,
|
||||
"scope": " ".join(creds.scopes),
|
||||
"token_type": "Bearer",
|
||||
"expires_in": creds.token_expiry.timestamp() - time.time(),
|
||||
"expires_in": delta.total_seconds(),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -67,13 +67,15 @@ class HistoryStats:
|
||||
current_period_end_timestamp = floored_timestamp(current_period_end)
|
||||
previous_period_start_timestamp = floored_timestamp(previous_period_start)
|
||||
previous_period_end_timestamp = floored_timestamp(previous_period_end)
|
||||
now_timestamp = floored_timestamp(datetime.datetime.now())
|
||||
utc_now = dt_util.utcnow()
|
||||
now_timestamp = floored_timestamp(utc_now)
|
||||
|
||||
if now_timestamp < current_period_start_timestamp:
|
||||
if current_period_start > utc_now:
|
||||
# History cannot tell the future
|
||||
self._history_current_period = []
|
||||
self._previous_run_before_start = True
|
||||
|
||||
self._state = HistoryStatsState(None, None, self._period)
|
||||
return self._state
|
||||
#
|
||||
# We avoid querying the database if the below did NOT happen:
|
||||
#
|
||||
@@ -82,7 +84,7 @@ class HistoryStats:
|
||||
# - The period shrank in size
|
||||
# - The previous period ended before now
|
||||
#
|
||||
elif (
|
||||
if (
|
||||
not self._previous_run_before_start
|
||||
and current_period_start_timestamp == previous_period_start_timestamp
|
||||
and (
|
||||
@@ -117,10 +119,6 @@ class HistoryStats:
|
||||
)
|
||||
self._previous_run_before_start = False
|
||||
|
||||
if not self._history_current_period:
|
||||
self._state = HistoryStatsState(None, None, self._period)
|
||||
return self._state
|
||||
|
||||
hours_matched, match_count = self._async_compute_hours_and_changes(
|
||||
now_timestamp,
|
||||
current_period_start_timestamp,
|
||||
|
||||
@@ -75,14 +75,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Hive from a config entry."""
|
||||
|
||||
websession = aiohttp_client.async_get_clientsession(hass)
|
||||
web_session = aiohttp_client.async_get_clientsession(hass)
|
||||
hive_config = dict(entry.data)
|
||||
hive = Hive(
|
||||
websession,
|
||||
deviceGroupKey=hive_config["device_data"][0],
|
||||
deviceKey=hive_config["device_data"][1],
|
||||
devicePassword=hive_config["device_data"][2],
|
||||
)
|
||||
hive = Hive(web_session)
|
||||
|
||||
hive_config["options"] = {}
|
||||
hive_config["options"].update(
|
||||
|
||||
@@ -27,6 +27,7 @@ class HiveFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
self.data = {}
|
||||
self.tokens = {}
|
||||
self.entry = None
|
||||
self.device_registration = False
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
"""Prompt user input. Create or edit entry."""
|
||||
@@ -88,6 +89,7 @@ class HiveFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if not errors:
|
||||
try:
|
||||
self.device_registration = True
|
||||
return await self.async_setup_hive_entry()
|
||||
except UnknownHiveError:
|
||||
errors["base"] = "unknown"
|
||||
@@ -102,8 +104,10 @@ class HiveFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
raise UnknownHiveError
|
||||
|
||||
# Setup the config entry
|
||||
if self.device_registration:
|
||||
await self.hive_auth.device_registration("Home Assistant")
|
||||
self.data["device_data"] = await self.hive_auth.getDeviceData()
|
||||
self.data["tokens"] = self.tokens
|
||||
self.data["device_data"] = await self.hive_auth.getDeviceData()
|
||||
if self.context["source"] == config_entries.SOURCE_REAUTH:
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self.entry, title=self.data["username"], data=self.data
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Hive",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/hive",
|
||||
"requirements": ["pyhiveapi==0.5.4"],
|
||||
"requirements": ["pyhiveapi==0.5.9"],
|
||||
"codeowners": ["@Rendili", "@KJonline"],
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["apyhiveapi"]
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "ialarm_xr",
|
||||
"name": "Antifurto365 iAlarmXR",
|
||||
"documentation": "https://www.home-assistant.io/integrations/ialarm_xr",
|
||||
"requirements": ["pyialarmxr==1.0.18"],
|
||||
"requirements": ["pyialarmxr-homeassistant==1.0.18"],
|
||||
"codeowners": ["@bigmoby"],
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling",
|
||||
|
||||
@@ -117,7 +117,7 @@ class ISYThermostatEntity(ISYNodeEntity, ClimateEntity):
|
||||
"""Return the current humidity."""
|
||||
if not (humidity := self._node.aux_properties.get(PROP_HUMIDITY)):
|
||||
return None
|
||||
if humidity == ISY_VALUE_UNKNOWN:
|
||||
if humidity.value == ISY_VALUE_UNKNOWN:
|
||||
return None
|
||||
return int(humidity.value)
|
||||
|
||||
|
||||
@@ -636,11 +636,6 @@ class KodiEntity(MediaPlayerEntity):
|
||||
|
||||
return None
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
"""Return True if entity is available."""
|
||||
return not self._connect_error
|
||||
|
||||
async def async_turn_on(self):
|
||||
"""Turn the media player on."""
|
||||
_LOGGER.debug("Firing event to turn on device")
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "LCN",
|
||||
"config_flow": false,
|
||||
"documentation": "https://www.home-assistant.io/integrations/lcn",
|
||||
"requirements": ["pypck==0.7.14"],
|
||||
"requirements": ["pypck==0.7.15"],
|
||||
"codeowners": ["@alengwenus"],
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pypck"]
|
||||
|
||||
@@ -2,9 +2,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED
|
||||
from homeassistant.components.counter import DOMAIN as COUNTER_DOMAIN
|
||||
from homeassistant.components.proximity import DOMAIN as PROXIMITY_DOMAIN
|
||||
from homeassistant.components.script import EVENT_SCRIPT_STARTED
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.const import EVENT_CALL_SERVICE, EVENT_LOGBOOK_ENTRY
|
||||
|
||||
# Domains that are always continuous
|
||||
ALWAYS_CONTINUOUS_DOMAINS = {COUNTER_DOMAIN, PROXIMITY_DOMAIN}
|
||||
|
||||
# Domains that are continuous if there is a UOM set on the entity
|
||||
CONDITIONALLY_CONTINUOUS_DOMAINS = {SENSOR_DOMAIN}
|
||||
|
||||
ATTR_MESSAGE = "message"
|
||||
|
||||
DOMAIN = "logbook"
|
||||
@@ -30,13 +39,11 @@ LOGBOOK_ENTRY_NAME = "name"
|
||||
LOGBOOK_ENTRY_STATE = "state"
|
||||
LOGBOOK_ENTRY_WHEN = "when"
|
||||
|
||||
ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED = {EVENT_LOGBOOK_ENTRY, EVENT_CALL_SERVICE}
|
||||
ENTITY_EVENTS_WITHOUT_CONFIG_ENTRY = {
|
||||
EVENT_LOGBOOK_ENTRY,
|
||||
EVENT_AUTOMATION_TRIGGERED,
|
||||
EVENT_SCRIPT_STARTED,
|
||||
}
|
||||
# Automation events that can affect an entity_id or device_id
|
||||
AUTOMATION_EVENTS = {EVENT_AUTOMATION_TRIGGERED, EVENT_SCRIPT_STARTED}
|
||||
|
||||
# Events that are built-in to the logbook or core
|
||||
BUILT_IN_EVENTS = {EVENT_LOGBOOK_ENTRY, EVENT_CALL_SERVICE}
|
||||
|
||||
LOGBOOK_FILTERS = "logbook_filters"
|
||||
LOGBOOK_ENTITIES_FILTER = "entities_filter"
|
||||
|
||||
@@ -7,6 +7,7 @@ from typing import Any
|
||||
from homeassistant.components.sensor import ATTR_STATE_CLASS
|
||||
from homeassistant.const import (
|
||||
ATTR_DEVICE_ID,
|
||||
ATTR_DOMAIN,
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_UNIT_OF_MEASUREMENT,
|
||||
EVENT_LOGBOOK_ENTRY,
|
||||
@@ -19,15 +20,13 @@ from homeassistant.core import (
|
||||
State,
|
||||
callback,
|
||||
is_callback,
|
||||
split_entity_id,
|
||||
)
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.entityfilter import EntityFilter
|
||||
from homeassistant.helpers.event import async_track_state_change_event
|
||||
|
||||
from .const import (
|
||||
ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED,
|
||||
DOMAIN,
|
||||
ENTITY_EVENTS_WITHOUT_CONFIG_ENTRY,
|
||||
)
|
||||
from .const import ALWAYS_CONTINUOUS_DOMAINS, AUTOMATION_EVENTS, BUILT_IN_EVENTS, DOMAIN
|
||||
from .models import LazyEventPartialState
|
||||
|
||||
|
||||
@@ -41,6 +40,25 @@ def async_filter_entities(hass: HomeAssistant, entity_ids: list[str]) -> list[st
|
||||
]
|
||||
|
||||
|
||||
@callback
|
||||
def _async_config_entries_for_ids(
|
||||
hass: HomeAssistant, entity_ids: list[str] | None, device_ids: list[str] | None
|
||||
) -> set[str]:
|
||||
"""Find the config entry ids for a set of entities or devices."""
|
||||
config_entry_ids: set[str] = set()
|
||||
if entity_ids:
|
||||
eng_reg = er.async_get(hass)
|
||||
for entity_id in entity_ids:
|
||||
if (entry := eng_reg.async_get(entity_id)) and entry.config_entry_id:
|
||||
config_entry_ids.add(entry.config_entry_id)
|
||||
if device_ids:
|
||||
dev_reg = dr.async_get(hass)
|
||||
for device_id in device_ids:
|
||||
if (device := dev_reg.async_get(device_id)) and device.config_entries:
|
||||
config_entry_ids |= device.config_entries
|
||||
return config_entry_ids
|
||||
|
||||
|
||||
def async_determine_event_types(
|
||||
hass: HomeAssistant, entity_ids: list[str] | None, device_ids: list[str] | None
|
||||
) -> tuple[str, ...]:
|
||||
@@ -49,42 +67,91 @@ def async_determine_event_types(
|
||||
str, tuple[str, Callable[[LazyEventPartialState], dict[str, Any]]]
|
||||
] = hass.data.get(DOMAIN, {})
|
||||
if not entity_ids and not device_ids:
|
||||
return (*ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED, *external_events)
|
||||
config_entry_ids: set[str] = set()
|
||||
intrested_event_types: set[str] = set()
|
||||
return (*BUILT_IN_EVENTS, *external_events)
|
||||
|
||||
interested_domains: set[str] = set()
|
||||
for entry_id in _async_config_entries_for_ids(hass, entity_ids, device_ids):
|
||||
if entry := hass.config_entries.async_get_entry(entry_id):
|
||||
interested_domains.add(entry.domain)
|
||||
|
||||
#
|
||||
# automations and scripts can refer to entities or devices
|
||||
# but they do not have a config entry so we need
|
||||
# to add them since we have historically included
|
||||
# them when matching only on entities
|
||||
#
|
||||
intrested_event_types: set[str] = {
|
||||
external_event
|
||||
for external_event, domain_call in external_events.items()
|
||||
if domain_call[0] in interested_domains
|
||||
} | AUTOMATION_EVENTS
|
||||
if entity_ids:
|
||||
#
|
||||
# Home Assistant doesn't allow firing events from
|
||||
# entities so we have a limited list to check
|
||||
#
|
||||
# automations and scripts can refer to entities
|
||||
# but they do not have a config entry so we need
|
||||
# to add them.
|
||||
#
|
||||
# We also allow entity_ids to be recorded via
|
||||
# manual logbook entries.
|
||||
#
|
||||
intrested_event_types |= ENTITY_EVENTS_WITHOUT_CONFIG_ENTRY
|
||||
# We also allow entity_ids to be recorded via manual logbook entries.
|
||||
intrested_event_types.add(EVENT_LOGBOOK_ENTRY)
|
||||
|
||||
if device_ids:
|
||||
dev_reg = dr.async_get(hass)
|
||||
for device_id in device_ids:
|
||||
if (device := dev_reg.async_get(device_id)) and device.config_entries:
|
||||
config_entry_ids |= device.config_entries
|
||||
interested_domains: set[str] = set()
|
||||
for entry_id in config_entry_ids:
|
||||
if entry := hass.config_entries.async_get_entry(entry_id):
|
||||
interested_domains.add(entry.domain)
|
||||
for external_event, domain_call in external_events.items():
|
||||
if domain_call[0] in interested_domains:
|
||||
intrested_event_types.add(external_event)
|
||||
return tuple(intrested_event_types)
|
||||
|
||||
return tuple(
|
||||
event_type
|
||||
for event_type in (EVENT_LOGBOOK_ENTRY, *external_events)
|
||||
if event_type in intrested_event_types
|
||||
)
|
||||
|
||||
@callback
|
||||
def extract_attr(source: dict[str, Any], attr: str) -> list[str]:
|
||||
"""Extract an attribute as a list or string."""
|
||||
if (value := source.get(attr)) is None:
|
||||
return []
|
||||
if isinstance(value, list):
|
||||
return value
|
||||
return str(value).split(",")
|
||||
|
||||
|
||||
@callback
|
||||
def event_forwarder_filtered(
|
||||
target: Callable[[Event], None],
|
||||
entities_filter: EntityFilter | None,
|
||||
entity_ids: list[str] | None,
|
||||
device_ids: list[str] | None,
|
||||
) -> Callable[[Event], None]:
|
||||
"""Make a callable to filter events."""
|
||||
if not entities_filter and not entity_ids and not device_ids:
|
||||
# No filter
|
||||
# - Script Trace (context ids)
|
||||
# - Automation Trace (context ids)
|
||||
return target
|
||||
|
||||
if entities_filter:
|
||||
# We have an entity filter:
|
||||
# - Logbook panel
|
||||
|
||||
@callback
|
||||
def _forward_events_filtered_by_entities_filter(event: Event) -> None:
|
||||
assert entities_filter is not None
|
||||
event_data = event.data
|
||||
entity_ids = extract_attr(event_data, ATTR_ENTITY_ID)
|
||||
if entity_ids and not any(
|
||||
entities_filter(entity_id) for entity_id in entity_ids
|
||||
):
|
||||
return
|
||||
domain = event_data.get(ATTR_DOMAIN)
|
||||
if domain and not entities_filter(f"{domain}._"):
|
||||
return
|
||||
target(event)
|
||||
|
||||
return _forward_events_filtered_by_entities_filter
|
||||
|
||||
# We are filtering on entity_ids and/or device_ids:
|
||||
# - Areas
|
||||
# - Devices
|
||||
# - Logbook Card
|
||||
entity_ids_set = set(entity_ids) if entity_ids else set()
|
||||
device_ids_set = set(device_ids) if device_ids else set()
|
||||
|
||||
@callback
|
||||
def _forward_events_filtered_by_device_entity_ids(event: Event) -> None:
|
||||
event_data = event.data
|
||||
if entity_ids_set.intersection(
|
||||
extract_attr(event_data, ATTR_ENTITY_ID)
|
||||
) or device_ids_set.intersection(extract_attr(event_data, ATTR_DEVICE_ID)):
|
||||
target(event)
|
||||
|
||||
return _forward_events_filtered_by_device_entity_ids
|
||||
|
||||
|
||||
@callback
|
||||
@@ -93,6 +160,7 @@ def async_subscribe_events(
|
||||
subscriptions: list[CALLBACK_TYPE],
|
||||
target: Callable[[Event], None],
|
||||
event_types: tuple[str, ...],
|
||||
entities_filter: EntityFilter | None,
|
||||
entity_ids: list[str] | None,
|
||||
device_ids: list[str] | None,
|
||||
) -> None:
|
||||
@@ -103,41 +171,31 @@ def async_subscribe_events(
|
||||
"""
|
||||
ent_reg = er.async_get(hass)
|
||||
assert is_callback(target), "target must be a callback"
|
||||
event_forwarder = target
|
||||
|
||||
if entity_ids or device_ids:
|
||||
entity_ids_set = set(entity_ids) if entity_ids else set()
|
||||
device_ids_set = set(device_ids) if device_ids else set()
|
||||
|
||||
@callback
|
||||
def _forward_events_filtered(event: Event) -> None:
|
||||
event_data = event.data
|
||||
if (
|
||||
entity_ids_set and event_data.get(ATTR_ENTITY_ID) in entity_ids_set
|
||||
) or (device_ids_set and event_data.get(ATTR_DEVICE_ID) in device_ids_set):
|
||||
target(event)
|
||||
|
||||
event_forwarder = _forward_events_filtered
|
||||
|
||||
event_forwarder = event_forwarder_filtered(
|
||||
target, entities_filter, entity_ids, device_ids
|
||||
)
|
||||
for event_type in event_types:
|
||||
subscriptions.append(
|
||||
hass.bus.async_listen(event_type, event_forwarder, run_immediately=True)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _forward_state_events_filtered(event: Event) -> None:
|
||||
if event.data.get("old_state") is None or event.data.get("new_state") is None:
|
||||
return
|
||||
state: State = event.data["new_state"]
|
||||
if not _is_state_filtered(ent_reg, state):
|
||||
target(event)
|
||||
|
||||
if device_ids and not entity_ids:
|
||||
# No entities to subscribe to but we are filtering
|
||||
# on device ids so we do not want to get any state
|
||||
# changed events
|
||||
return
|
||||
|
||||
@callback
|
||||
def _forward_state_events_filtered(event: Event) -> None:
|
||||
if event.data.get("old_state") is None or event.data.get("new_state") is None:
|
||||
return
|
||||
state: State = event.data["new_state"]
|
||||
if _is_state_filtered(ent_reg, state) or (
|
||||
entities_filter and not entities_filter(state.entity_id)
|
||||
):
|
||||
return
|
||||
target(event)
|
||||
|
||||
if entity_ids:
|
||||
subscriptions.append(
|
||||
async_track_state_change_event(
|
||||
@@ -178,7 +236,8 @@ def _is_state_filtered(ent_reg: er.EntityRegistry, state: State) -> bool:
|
||||
we only get significant changes (state.last_changed != state.last_updated)
|
||||
"""
|
||||
return bool(
|
||||
state.last_changed != state.last_updated
|
||||
split_entity_id(state.entity_id)[0] in ALWAYS_CONTINUOUS_DOMAINS
|
||||
or state.last_changed != state.last_updated
|
||||
or ATTR_UNIT_OF_MEASUREMENT in state.attributes
|
||||
or is_sensor_continuous(ent_reg, state.entity_id)
|
||||
)
|
||||
@@ -193,7 +252,8 @@ def _is_entity_id_filtered(
|
||||
from the database when a list of entities is requested.
|
||||
"""
|
||||
return bool(
|
||||
(state := hass.states.get(entity_id))
|
||||
split_entity_id(entity_id)[0] in ALWAYS_CONTINUOUS_DOMAINS
|
||||
or (state := hass.states.get(entity_id))
|
||||
and (ATTR_UNIT_OF_MEASUREMENT in state.attributes)
|
||||
or is_sensor_continuous(ent_reg, entity_id)
|
||||
)
|
||||
|
||||
@@ -5,8 +5,6 @@ from collections.abc import Callable, Generator
|
||||
from contextlib import suppress
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime as dt
|
||||
import logging
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy.engine.row import Row
|
||||
@@ -30,7 +28,6 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, split_entity_id
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entityfilter import EntityFilter
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import (
|
||||
@@ -46,7 +43,6 @@ from .const import (
|
||||
CONTEXT_STATE,
|
||||
CONTEXT_USER_ID,
|
||||
DOMAIN,
|
||||
LOGBOOK_ENTITIES_FILTER,
|
||||
LOGBOOK_ENTRY_DOMAIN,
|
||||
LOGBOOK_ENTRY_ENTITY_ID,
|
||||
LOGBOOK_ENTRY_ICON,
|
||||
@@ -62,11 +58,6 @@ from .models import EventAsRow, LazyEventPartialState, async_event_to_row
|
||||
from .queries import statement_for_request
|
||||
from .queries.common import PSUEDO_EVENT_STATE_CHANGED
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ENTITY_ID_JSON_EXTRACT = re.compile('"entity_id": ?"([^"]+)"')
|
||||
DOMAIN_JSON_EXTRACT = re.compile('"domain": ?"([^"]+)"')
|
||||
|
||||
|
||||
@dataclass
|
||||
class LogbookRun:
|
||||
@@ -106,10 +97,6 @@ class EventProcessor:
|
||||
self.device_ids = device_ids
|
||||
self.context_id = context_id
|
||||
self.filters: Filters | None = hass.data[LOGBOOK_FILTERS]
|
||||
if self.limited_select:
|
||||
self.entities_filter: EntityFilter | Callable[[str], bool] | None = None
|
||||
else:
|
||||
self.entities_filter = hass.data[LOGBOOK_ENTITIES_FILTER]
|
||||
format_time = (
|
||||
_row_time_fired_timestamp if timestamp else _row_time_fired_isoformat
|
||||
)
|
||||
@@ -183,7 +170,6 @@ class EventProcessor:
|
||||
return list(
|
||||
_humanify(
|
||||
row_generator,
|
||||
self.entities_filter,
|
||||
self.ent_reg,
|
||||
self.logbook_run,
|
||||
self.context_augmenter,
|
||||
@@ -193,7 +179,6 @@ class EventProcessor:
|
||||
|
||||
def _humanify(
|
||||
rows: Generator[Row | EventAsRow, None, None],
|
||||
entities_filter: EntityFilter | Callable[[str], bool] | None,
|
||||
ent_reg: er.EntityRegistry,
|
||||
logbook_run: LogbookRun,
|
||||
context_augmenter: ContextAugmenter,
|
||||
@@ -208,29 +193,13 @@ def _humanify(
|
||||
include_entity_name = logbook_run.include_entity_name
|
||||
format_time = logbook_run.format_time
|
||||
|
||||
def _keep_row(row: EventAsRow) -> bool:
|
||||
"""Check if the entity_filter rejects a row."""
|
||||
assert entities_filter is not None
|
||||
if entity_id := row.entity_id:
|
||||
return entities_filter(entity_id)
|
||||
if entity_id := row.data.get(ATTR_ENTITY_ID):
|
||||
return entities_filter(entity_id)
|
||||
if domain := row.data.get(ATTR_DOMAIN):
|
||||
return entities_filter(f"{domain}._")
|
||||
return True
|
||||
|
||||
# Process rows
|
||||
for row in rows:
|
||||
context_id = context_lookup.memorize(row)
|
||||
if row.context_only:
|
||||
continue
|
||||
event_type = row.event_type
|
||||
if event_type == EVENT_CALL_SERVICE or (
|
||||
entities_filter
|
||||
# We literally mean is EventAsRow not a subclass of EventAsRow
|
||||
and type(row) is EventAsRow # pylint: disable=unidiomatic-typecheck
|
||||
and not _keep_row(row)
|
||||
):
|
||||
if event_type == EVENT_CALL_SERVICE:
|
||||
continue
|
||||
if event_type is PSUEDO_EVENT_STATE_CHANGED:
|
||||
entity_id = row.entity_id
|
||||
@@ -417,12 +386,6 @@ def _rows_match(row: Row | EventAsRow, other_row: Row | EventAsRow) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def _row_event_data_extract(row: Row | EventAsRow, extractor: re.Pattern) -> str | None:
|
||||
"""Extract from event_data row."""
|
||||
result = extractor.search(row.shared_data or row.event_data or "")
|
||||
return result.group(1) if result else None
|
||||
|
||||
|
||||
def _row_time_fired_isoformat(row: Row | EventAsRow) -> str:
|
||||
"""Convert the row timed_fired to isoformat."""
|
||||
return process_timestamp_to_utc_isoformat(row.time_fired or dt_util.utcnow())
|
||||
|
||||
@@ -2,8 +2,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime as dt
|
||||
import json
|
||||
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.sql.selectable import Select
|
||||
|
||||
from homeassistant.components.recorder.filters import Filters
|
||||
|
||||
@@ -21,7 +22,7 @@ def statement_for_request(
|
||||
device_ids: list[str] | None = None,
|
||||
filters: Filters | None = None,
|
||||
context_id: str | None = None,
|
||||
) -> StatementLambdaElement:
|
||||
) -> Select:
|
||||
"""Generate the logbook statement for a logbook request."""
|
||||
|
||||
# No entities: logbook sends everything for the timeframe
|
||||
@@ -38,41 +39,36 @@ def statement_for_request(
|
||||
context_id,
|
||||
)
|
||||
|
||||
# sqlalchemy caches object quoting, the
|
||||
# json quotable ones must be a different
|
||||
# object from the non-json ones to prevent
|
||||
# sqlalchemy from quoting them incorrectly
|
||||
|
||||
# entities and devices: logbook sends everything for the timeframe for the entities and devices
|
||||
if entity_ids and device_ids:
|
||||
json_quotable_entity_ids = list(entity_ids)
|
||||
json_quotable_device_ids = list(device_ids)
|
||||
json_quoted_entity_ids = [json.dumps(entity_id) for entity_id in entity_ids]
|
||||
json_quoted_device_ids = [json.dumps(device_id) for device_id in device_ids]
|
||||
return entities_devices_stmt(
|
||||
start_day,
|
||||
end_day,
|
||||
event_types,
|
||||
entity_ids,
|
||||
json_quotable_entity_ids,
|
||||
json_quotable_device_ids,
|
||||
json_quoted_entity_ids,
|
||||
json_quoted_device_ids,
|
||||
)
|
||||
|
||||
# entities: logbook sends everything for the timeframe for the entities
|
||||
if entity_ids:
|
||||
json_quotable_entity_ids = list(entity_ids)
|
||||
json_quoted_entity_ids = [json.dumps(entity_id) for entity_id in entity_ids]
|
||||
return entities_stmt(
|
||||
start_day,
|
||||
end_day,
|
||||
event_types,
|
||||
entity_ids,
|
||||
json_quotable_entity_ids,
|
||||
json_quoted_entity_ids,
|
||||
)
|
||||
|
||||
# devices: logbook sends everything for the timeframe for the devices
|
||||
assert device_ids is not None
|
||||
json_quotable_device_ids = list(device_ids)
|
||||
json_quoted_device_ids = [json.dumps(device_id) for device_id in device_ids]
|
||||
return devices_stmt(
|
||||
start_day,
|
||||
end_day,
|
||||
event_types,
|
||||
json_quotable_device_ids,
|
||||
json_quoted_device_ids,
|
||||
)
|
||||
|
||||
@@ -3,10 +3,9 @@ from __future__ import annotations
|
||||
|
||||
from datetime import datetime as dt
|
||||
|
||||
from sqlalchemy import lambda_stmt
|
||||
from sqlalchemy.orm import Query
|
||||
from sqlalchemy.sql.elements import ClauseList
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.sql.selectable import Select
|
||||
|
||||
from homeassistant.components.recorder.models import LAST_UPDATED_INDEX, Events, States
|
||||
|
||||
@@ -25,32 +24,29 @@ def all_stmt(
|
||||
states_entity_filter: ClauseList | None = None,
|
||||
events_entity_filter: ClauseList | None = None,
|
||||
context_id: str | None = None,
|
||||
) -> StatementLambdaElement:
|
||||
) -> Select:
|
||||
"""Generate a logbook query for all entities."""
|
||||
stmt = lambda_stmt(
|
||||
lambda: select_events_without_states(start_day, end_day, event_types)
|
||||
)
|
||||
stmt = select_events_without_states(start_day, end_day, event_types)
|
||||
if context_id is not None:
|
||||
# Once all the old `state_changed` events
|
||||
# are gone from the database remove the
|
||||
# _legacy_select_events_context_id()
|
||||
stmt += lambda s: s.where(Events.context_id == context_id).union_all(
|
||||
stmt = stmt.where(Events.context_id == context_id).union_all(
|
||||
_states_query_for_context_id(start_day, end_day, context_id),
|
||||
legacy_select_events_context_id(start_day, end_day, context_id),
|
||||
)
|
||||
else:
|
||||
if events_entity_filter is not None:
|
||||
stmt += lambda s: s.where(events_entity_filter)
|
||||
stmt = stmt.where(events_entity_filter)
|
||||
|
||||
if states_entity_filter is not None:
|
||||
stmt += lambda s: s.union_all(
|
||||
stmt = stmt.union_all(
|
||||
_states_query_for_all(start_day, end_day).where(states_entity_filter)
|
||||
)
|
||||
else:
|
||||
stmt += lambda s: s.union_all(_states_query_for_all(start_day, end_day))
|
||||
stmt = stmt.union_all(_states_query_for_all(start_day, end_day))
|
||||
|
||||
stmt += lambda s: s.order_by(Events.time_fired)
|
||||
return stmt
|
||||
return stmt.order_by(Events.time_fired)
|
||||
|
||||
|
||||
def _states_query_for_all(start_day: dt, end_day: dt) -> Query:
|
||||
|
||||
@@ -10,25 +10,31 @@ from sqlalchemy.sql.elements import ClauseList
|
||||
from sqlalchemy.sql.expression import literal
|
||||
from sqlalchemy.sql.selectable import Select
|
||||
|
||||
from homeassistant.components.proximity import DOMAIN as PROXIMITY_DOMAIN
|
||||
from homeassistant.components.recorder.filters import like_domain_matchers
|
||||
from homeassistant.components.recorder.models import (
|
||||
EVENTS_CONTEXT_ID_INDEX,
|
||||
OLD_FORMAT_ATTRS_JSON,
|
||||
OLD_STATE,
|
||||
SHARED_ATTRS_JSON,
|
||||
STATES_CONTEXT_ID_INDEX,
|
||||
EventData,
|
||||
Events,
|
||||
StateAttributes,
|
||||
States,
|
||||
)
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
|
||||
CONTINUOUS_DOMAINS = {PROXIMITY_DOMAIN, SENSOR_DOMAIN}
|
||||
CONTINUOUS_ENTITY_ID_LIKE = [f"{domain}.%" for domain in CONTINUOUS_DOMAINS]
|
||||
from ..const import ALWAYS_CONTINUOUS_DOMAINS, CONDITIONALLY_CONTINUOUS_DOMAINS
|
||||
|
||||
# Domains that are continuous if there is a UOM set on the entity
|
||||
CONDITIONALLY_CONTINUOUS_ENTITY_ID_LIKE = like_domain_matchers(
|
||||
CONDITIONALLY_CONTINUOUS_DOMAINS
|
||||
)
|
||||
# Domains that are always continuous
|
||||
ALWAYS_CONTINUOUS_ENTITY_ID_LIKE = like_domain_matchers(ALWAYS_CONTINUOUS_DOMAINS)
|
||||
|
||||
UNIT_OF_MEASUREMENT_JSON = '"unit_of_measurement":'
|
||||
UNIT_OF_MEASUREMENT_JSON_LIKE = f"%{UNIT_OF_MEASUREMENT_JSON}%"
|
||||
|
||||
|
||||
PSUEDO_EVENT_STATE_CHANGED = None
|
||||
# Since we don't store event_types and None
|
||||
# and we don't store state_changed in events
|
||||
@@ -121,9 +127,7 @@ def select_events_context_only() -> Select:
|
||||
By marking them as context_only we know they are only for
|
||||
linking context ids and we can avoid processing them.
|
||||
"""
|
||||
return select(*EVENT_ROWS_NO_STATES, CONTEXT_ONLY).outerjoin(
|
||||
EventData, (Events.data_id == EventData.data_id)
|
||||
)
|
||||
return select(*EVENT_ROWS_NO_STATES, CONTEXT_ONLY)
|
||||
|
||||
|
||||
def select_states_context_only() -> Select:
|
||||
@@ -220,29 +224,44 @@ def _missing_state_matcher() -> sqlalchemy.and_:
|
||||
def _not_continuous_entity_matcher() -> sqlalchemy.or_:
|
||||
"""Match non continuous entities."""
|
||||
return sqlalchemy.or_(
|
||||
_not_continuous_domain_matcher(),
|
||||
# First exclude domains that may be continuous
|
||||
_not_possible_continuous_domain_matcher(),
|
||||
# But let in the entities in the possible continuous domains
|
||||
# that are not actually continuous sensors because they lack a UOM
|
||||
sqlalchemy.and_(
|
||||
_continuous_domain_matcher, _not_uom_attributes_matcher()
|
||||
_conditionally_continuous_domain_matcher, _not_uom_attributes_matcher()
|
||||
).self_group(),
|
||||
)
|
||||
|
||||
|
||||
def _not_continuous_domain_matcher() -> sqlalchemy.and_:
|
||||
"""Match not continuous domains."""
|
||||
def _not_possible_continuous_domain_matcher() -> sqlalchemy.and_:
|
||||
"""Match not continuous domains.
|
||||
|
||||
This matches domain that are always considered continuous
|
||||
and domains that are conditionally (if they have a UOM)
|
||||
continuous domains.
|
||||
"""
|
||||
return sqlalchemy.and_(
|
||||
*[
|
||||
~States.entity_id.like(entity_domain)
|
||||
for entity_domain in CONTINUOUS_ENTITY_ID_LIKE
|
||||
for entity_domain in (
|
||||
*ALWAYS_CONTINUOUS_ENTITY_ID_LIKE,
|
||||
*CONDITIONALLY_CONTINUOUS_ENTITY_ID_LIKE,
|
||||
)
|
||||
],
|
||||
).self_group()
|
||||
|
||||
|
||||
def _continuous_domain_matcher() -> sqlalchemy.or_:
|
||||
"""Match continuous domains."""
|
||||
def _conditionally_continuous_domain_matcher() -> sqlalchemy.or_:
|
||||
"""Match conditionally continuous domains.
|
||||
|
||||
This matches domain that are only considered
|
||||
continuous if a UOM is set.
|
||||
"""
|
||||
return sqlalchemy.or_(
|
||||
*[
|
||||
States.entity_id.like(entity_domain)
|
||||
for entity_domain in CONTINUOUS_ENTITY_ID_LIKE
|
||||
for entity_domain in CONDITIONALLY_CONTINUOUS_ENTITY_ID_LIKE
|
||||
],
|
||||
).self_group()
|
||||
|
||||
@@ -252,3 +271,17 @@ def _not_uom_attributes_matcher() -> ClauseList:
|
||||
return ~StateAttributes.shared_attrs.like(
|
||||
UNIT_OF_MEASUREMENT_JSON_LIKE
|
||||
) | ~States.attributes.like(UNIT_OF_MEASUREMENT_JSON_LIKE)
|
||||
|
||||
|
||||
def apply_states_context_hints(query: Query) -> Query:
|
||||
"""Force mysql to use the right index on large context_id selects."""
|
||||
return query.with_hint(
|
||||
States, f"FORCE INDEX ({STATES_CONTEXT_ID_INDEX})", dialect_name="mysql"
|
||||
)
|
||||
|
||||
|
||||
def apply_events_context_hints(query: Query) -> Query:
|
||||
"""Force mysql to use the right index on large context_id selects."""
|
||||
return query.with_hint(
|
||||
Events, f"FORCE INDEX ({EVENTS_CONTEXT_ID_INDEX})", dialect_name="mysql"
|
||||
)
|
||||
|
||||
@@ -4,15 +4,21 @@ from __future__ import annotations
|
||||
from collections.abc import Iterable
|
||||
from datetime import datetime as dt
|
||||
|
||||
from sqlalchemy import lambda_stmt, select, union_all
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Query
|
||||
from sqlalchemy.sql.elements import ClauseList
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.sql.selectable import CTE, CompoundSelect
|
||||
from sqlalchemy.sql.selectable import CTE, CompoundSelect, Select
|
||||
|
||||
from homeassistant.components.recorder.models import DEVICE_ID_IN_EVENT, Events, States
|
||||
from homeassistant.components.recorder.models import (
|
||||
DEVICE_ID_IN_EVENT,
|
||||
EventData,
|
||||
Events,
|
||||
States,
|
||||
)
|
||||
|
||||
from .common import (
|
||||
apply_events_context_hints,
|
||||
apply_states_context_hints,
|
||||
select_events_context_id_subquery,
|
||||
select_events_context_only,
|
||||
select_events_without_states,
|
||||
@@ -24,16 +30,13 @@ def _select_device_id_context_ids_sub_query(
|
||||
start_day: dt,
|
||||
end_day: dt,
|
||||
event_types: tuple[str, ...],
|
||||
json_quotable_device_ids: list[str],
|
||||
json_quoted_device_ids: list[str],
|
||||
) -> CompoundSelect:
|
||||
"""Generate a subquery to find context ids for multiple devices."""
|
||||
return select(
|
||||
union_all(
|
||||
select_events_context_id_subquery(start_day, end_day, event_types).where(
|
||||
apply_event_device_id_matchers(json_quotable_device_ids)
|
||||
),
|
||||
).c.context_id
|
||||
inner = select_events_context_id_subquery(start_day, end_day, event_types).where(
|
||||
apply_event_device_id_matchers(json_quoted_device_ids)
|
||||
)
|
||||
return select(inner.c.context_id).group_by(inner.c.context_id)
|
||||
|
||||
|
||||
def _apply_devices_context_union(
|
||||
@@ -41,18 +44,26 @@ def _apply_devices_context_union(
|
||||
start_day: dt,
|
||||
end_day: dt,
|
||||
event_types: tuple[str, ...],
|
||||
json_quotable_device_ids: list[str],
|
||||
json_quoted_device_ids: list[str],
|
||||
) -> CompoundSelect:
|
||||
"""Generate a CTE to find the device context ids and a query to find linked row."""
|
||||
devices_cte: CTE = _select_device_id_context_ids_sub_query(
|
||||
start_day,
|
||||
end_day,
|
||||
event_types,
|
||||
json_quotable_device_ids,
|
||||
json_quoted_device_ids,
|
||||
).cte()
|
||||
return query.union_all(
|
||||
select_events_context_only().where(Events.context_id.in_(devices_cte.select())),
|
||||
select_states_context_only().where(States.context_id.in_(devices_cte.select())),
|
||||
apply_events_context_hints(
|
||||
select_events_context_only()
|
||||
.select_from(devices_cte)
|
||||
.outerjoin(Events, devices_cte.c.context_id == Events.context_id)
|
||||
).outerjoin(EventData, (Events.data_id == EventData.data_id)),
|
||||
apply_states_context_hints(
|
||||
select_states_context_only()
|
||||
.select_from(devices_cte)
|
||||
.outerjoin(States, devices_cte.c.context_id == States.context_id)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -60,25 +71,22 @@ def devices_stmt(
|
||||
start_day: dt,
|
||||
end_day: dt,
|
||||
event_types: tuple[str, ...],
|
||||
json_quotable_device_ids: list[str],
|
||||
) -> StatementLambdaElement:
|
||||
json_quoted_device_ids: list[str],
|
||||
) -> Select:
|
||||
"""Generate a logbook query for multiple devices."""
|
||||
stmt = lambda_stmt(
|
||||
lambda: _apply_devices_context_union(
|
||||
select_events_without_states(start_day, end_day, event_types).where(
|
||||
apply_event_device_id_matchers(json_quotable_device_ids)
|
||||
),
|
||||
start_day,
|
||||
end_day,
|
||||
event_types,
|
||||
json_quotable_device_ids,
|
||||
).order_by(Events.time_fired)
|
||||
)
|
||||
return stmt
|
||||
return _apply_devices_context_union(
|
||||
select_events_without_states(start_day, end_day, event_types).where(
|
||||
apply_event_device_id_matchers(json_quoted_device_ids)
|
||||
),
|
||||
start_day,
|
||||
end_day,
|
||||
event_types,
|
||||
json_quoted_device_ids,
|
||||
).order_by(Events.time_fired)
|
||||
|
||||
|
||||
def apply_event_device_id_matchers(
|
||||
json_quotable_device_ids: Iterable[str],
|
||||
json_quoted_device_ids: Iterable[str],
|
||||
) -> ClauseList:
|
||||
"""Create matchers for the device_ids in the event_data."""
|
||||
return DEVICE_ID_IN_EVENT.in_(json_quotable_device_ids)
|
||||
return DEVICE_ID_IN_EVENT.in_(json_quoted_device_ids)
|
||||
|
||||
@@ -5,20 +5,22 @@ from collections.abc import Iterable
|
||||
from datetime import datetime as dt
|
||||
|
||||
import sqlalchemy
|
||||
from sqlalchemy import lambda_stmt, select, union_all
|
||||
from sqlalchemy import select, union_all
|
||||
from sqlalchemy.orm import Query
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.sql.selectable import CTE, CompoundSelect
|
||||
from sqlalchemy.sql.selectable import CTE, CompoundSelect, Select
|
||||
|
||||
from homeassistant.components.recorder.models import (
|
||||
ENTITY_ID_IN_EVENT,
|
||||
ENTITY_ID_LAST_UPDATED_INDEX,
|
||||
OLD_ENTITY_ID_IN_EVENT,
|
||||
EventData,
|
||||
Events,
|
||||
States,
|
||||
)
|
||||
|
||||
from .common import (
|
||||
apply_events_context_hints,
|
||||
apply_states_context_hints,
|
||||
apply_states_filters,
|
||||
select_events_context_id_subquery,
|
||||
select_events_context_only,
|
||||
@@ -33,19 +35,18 @@ def _select_entities_context_ids_sub_query(
|
||||
end_day: dt,
|
||||
event_types: tuple[str, ...],
|
||||
entity_ids: list[str],
|
||||
json_quotable_entity_ids: list[str],
|
||||
json_quoted_entity_ids: list[str],
|
||||
) -> CompoundSelect:
|
||||
"""Generate a subquery to find context ids for multiple entities."""
|
||||
return select(
|
||||
union_all(
|
||||
select_events_context_id_subquery(start_day, end_day, event_types).where(
|
||||
apply_event_entity_id_matchers(json_quotable_entity_ids)
|
||||
),
|
||||
apply_entities_hints(select(States.context_id))
|
||||
.filter((States.last_updated > start_day) & (States.last_updated < end_day))
|
||||
.where(States.entity_id.in_(entity_ids)),
|
||||
).c.context_id
|
||||
union = union_all(
|
||||
select_events_context_id_subquery(start_day, end_day, event_types).where(
|
||||
apply_event_entity_id_matchers(json_quoted_entity_ids)
|
||||
),
|
||||
apply_entities_hints(select(States.context_id))
|
||||
.filter((States.last_updated > start_day) & (States.last_updated < end_day))
|
||||
.where(States.entity_id.in_(entity_ids)),
|
||||
)
|
||||
return select(union.c.context_id).group_by(union.c.context_id)
|
||||
|
||||
|
||||
def _apply_entities_context_union(
|
||||
@@ -54,7 +55,7 @@ def _apply_entities_context_union(
|
||||
end_day: dt,
|
||||
event_types: tuple[str, ...],
|
||||
entity_ids: list[str],
|
||||
json_quotable_entity_ids: list[str],
|
||||
json_quoted_entity_ids: list[str],
|
||||
) -> CompoundSelect:
|
||||
"""Generate a CTE to find the entity and device context ids and a query to find linked row."""
|
||||
entities_cte: CTE = _select_entities_context_ids_sub_query(
|
||||
@@ -62,16 +63,25 @@ def _apply_entities_context_union(
|
||||
end_day,
|
||||
event_types,
|
||||
entity_ids,
|
||||
json_quotable_entity_ids,
|
||||
json_quoted_entity_ids,
|
||||
).cte()
|
||||
# We used to optimize this to exclude rows we already in the union with
|
||||
# a States.entity_id.not_in(entity_ids) but that made the
|
||||
# query much slower on MySQL, and since we already filter them away
|
||||
# in the python code anyways since they will have context_only
|
||||
# set on them the impact is minimal.
|
||||
return query.union_all(
|
||||
states_query_for_entity_ids(start_day, end_day, entity_ids),
|
||||
select_events_context_only().where(
|
||||
Events.context_id.in_(entities_cte.select())
|
||||
apply_events_context_hints(
|
||||
select_events_context_only()
|
||||
.select_from(entities_cte)
|
||||
.outerjoin(Events, entities_cte.c.context_id == Events.context_id)
|
||||
).outerjoin(EventData, (Events.data_id == EventData.data_id)),
|
||||
apply_states_context_hints(
|
||||
select_states_context_only()
|
||||
.select_from(entities_cte)
|
||||
.outerjoin(States, entities_cte.c.context_id == States.context_id)
|
||||
),
|
||||
select_states_context_only()
|
||||
.where(States.entity_id.not_in(entity_ids))
|
||||
.where(States.context_id.in_(entities_cte.select())),
|
||||
)
|
||||
|
||||
|
||||
@@ -80,21 +90,19 @@ def entities_stmt(
|
||||
end_day: dt,
|
||||
event_types: tuple[str, ...],
|
||||
entity_ids: list[str],
|
||||
json_quotable_entity_ids: list[str],
|
||||
) -> StatementLambdaElement:
|
||||
json_quoted_entity_ids: list[str],
|
||||
) -> Select:
|
||||
"""Generate a logbook query for multiple entities."""
|
||||
return lambda_stmt(
|
||||
lambda: _apply_entities_context_union(
|
||||
select_events_without_states(start_day, end_day, event_types).where(
|
||||
apply_event_entity_id_matchers(json_quotable_entity_ids)
|
||||
),
|
||||
start_day,
|
||||
end_day,
|
||||
event_types,
|
||||
entity_ids,
|
||||
json_quotable_entity_ids,
|
||||
).order_by(Events.time_fired)
|
||||
)
|
||||
return _apply_entities_context_union(
|
||||
select_events_without_states(start_day, end_day, event_types).where(
|
||||
apply_event_entity_id_matchers(json_quoted_entity_ids)
|
||||
),
|
||||
start_day,
|
||||
end_day,
|
||||
event_types,
|
||||
entity_ids,
|
||||
json_quoted_entity_ids,
|
||||
).order_by(Events.time_fired)
|
||||
|
||||
|
||||
def states_query_for_entity_ids(
|
||||
@@ -107,12 +115,12 @@ def states_query_for_entity_ids(
|
||||
|
||||
|
||||
def apply_event_entity_id_matchers(
|
||||
json_quotable_entity_ids: Iterable[str],
|
||||
json_quoted_entity_ids: Iterable[str],
|
||||
) -> sqlalchemy.or_:
|
||||
"""Create matchers for the entity_id in the event_data."""
|
||||
return ENTITY_ID_IN_EVENT.in_(
|
||||
json_quotable_entity_ids
|
||||
) | OLD_ENTITY_ID_IN_EVENT.in_(json_quotable_entity_ids)
|
||||
return ENTITY_ID_IN_EVENT.in_(json_quoted_entity_ids) | OLD_ENTITY_ID_IN_EVENT.in_(
|
||||
json_quoted_entity_ids
|
||||
)
|
||||
|
||||
|
||||
def apply_entities_hints(query: Query) -> Query:
|
||||
|
||||
@@ -5,14 +5,15 @@ from collections.abc import Iterable
|
||||
from datetime import datetime as dt
|
||||
|
||||
import sqlalchemy
|
||||
from sqlalchemy import lambda_stmt, select, union_all
|
||||
from sqlalchemy import select, union_all
|
||||
from sqlalchemy.orm import Query
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.sql.selectable import CTE, CompoundSelect
|
||||
from sqlalchemy.sql.selectable import CTE, CompoundSelect, Select
|
||||
|
||||
from homeassistant.components.recorder.models import Events, States
|
||||
from homeassistant.components.recorder.models import EventData, Events, States
|
||||
|
||||
from .common import (
|
||||
apply_events_context_hints,
|
||||
apply_states_context_hints,
|
||||
select_events_context_id_subquery,
|
||||
select_events_context_only,
|
||||
select_events_without_states,
|
||||
@@ -31,22 +32,21 @@ def _select_entities_device_id_context_ids_sub_query(
|
||||
end_day: dt,
|
||||
event_types: tuple[str, ...],
|
||||
entity_ids: list[str],
|
||||
json_quotable_entity_ids: list[str],
|
||||
json_quotable_device_ids: list[str],
|
||||
json_quoted_entity_ids: list[str],
|
||||
json_quoted_device_ids: list[str],
|
||||
) -> CompoundSelect:
|
||||
"""Generate a subquery to find context ids for multiple entities and multiple devices."""
|
||||
return select(
|
||||
union_all(
|
||||
select_events_context_id_subquery(start_day, end_day, event_types).where(
|
||||
_apply_event_entity_id_device_id_matchers(
|
||||
json_quotable_entity_ids, json_quotable_device_ids
|
||||
)
|
||||
),
|
||||
apply_entities_hints(select(States.context_id))
|
||||
.filter((States.last_updated > start_day) & (States.last_updated < end_day))
|
||||
.where(States.entity_id.in_(entity_ids)),
|
||||
).c.context_id
|
||||
union = union_all(
|
||||
select_events_context_id_subquery(start_day, end_day, event_types).where(
|
||||
_apply_event_entity_id_device_id_matchers(
|
||||
json_quoted_entity_ids, json_quoted_device_ids
|
||||
)
|
||||
),
|
||||
apply_entities_hints(select(States.context_id))
|
||||
.filter((States.last_updated > start_day) & (States.last_updated < end_day))
|
||||
.where(States.entity_id.in_(entity_ids)),
|
||||
)
|
||||
return select(union.c.context_id).group_by(union.c.context_id)
|
||||
|
||||
|
||||
def _apply_entities_devices_context_union(
|
||||
@@ -55,25 +55,34 @@ def _apply_entities_devices_context_union(
|
||||
end_day: dt,
|
||||
event_types: tuple[str, ...],
|
||||
entity_ids: list[str],
|
||||
json_quotable_entity_ids: list[str],
|
||||
json_quotable_device_ids: list[str],
|
||||
json_quoted_entity_ids: list[str],
|
||||
json_quoted_device_ids: list[str],
|
||||
) -> CompoundSelect:
|
||||
devices_entities_cte: CTE = _select_entities_device_id_context_ids_sub_query(
|
||||
start_day,
|
||||
end_day,
|
||||
event_types,
|
||||
entity_ids,
|
||||
json_quotable_entity_ids,
|
||||
json_quotable_device_ids,
|
||||
json_quoted_entity_ids,
|
||||
json_quoted_device_ids,
|
||||
).cte()
|
||||
# We used to optimize this to exclude rows we already in the union with
|
||||
# a States.entity_id.not_in(entity_ids) but that made the
|
||||
# query much slower on MySQL, and since we already filter them away
|
||||
# in the python code anyways since they will have context_only
|
||||
# set on them the impact is minimal.
|
||||
return query.union_all(
|
||||
states_query_for_entity_ids(start_day, end_day, entity_ids),
|
||||
select_events_context_only().where(
|
||||
Events.context_id.in_(devices_entities_cte.select())
|
||||
apply_events_context_hints(
|
||||
select_events_context_only()
|
||||
.select_from(devices_entities_cte)
|
||||
.outerjoin(Events, devices_entities_cte.c.context_id == Events.context_id)
|
||||
).outerjoin(EventData, (Events.data_id == EventData.data_id)),
|
||||
apply_states_context_hints(
|
||||
select_states_context_only()
|
||||
.select_from(devices_entities_cte)
|
||||
.outerjoin(States, devices_entities_cte.c.context_id == States.context_id)
|
||||
),
|
||||
select_states_context_only()
|
||||
.where(States.entity_id.not_in(entity_ids))
|
||||
.where(States.context_id.in_(devices_entities_cte.select())),
|
||||
)
|
||||
|
||||
|
||||
@@ -82,32 +91,30 @@ def entities_devices_stmt(
|
||||
end_day: dt,
|
||||
event_types: tuple[str, ...],
|
||||
entity_ids: list[str],
|
||||
json_quotable_entity_ids: list[str],
|
||||
json_quotable_device_ids: list[str],
|
||||
) -> StatementLambdaElement:
|
||||
json_quoted_entity_ids: list[str],
|
||||
json_quoted_device_ids: list[str],
|
||||
) -> Select:
|
||||
"""Generate a logbook query for multiple entities."""
|
||||
stmt = lambda_stmt(
|
||||
lambda: _apply_entities_devices_context_union(
|
||||
select_events_without_states(start_day, end_day, event_types).where(
|
||||
_apply_event_entity_id_device_id_matchers(
|
||||
json_quotable_entity_ids, json_quotable_device_ids
|
||||
)
|
||||
),
|
||||
start_day,
|
||||
end_day,
|
||||
event_types,
|
||||
entity_ids,
|
||||
json_quotable_entity_ids,
|
||||
json_quotable_device_ids,
|
||||
).order_by(Events.time_fired)
|
||||
)
|
||||
stmt = _apply_entities_devices_context_union(
|
||||
select_events_without_states(start_day, end_day, event_types).where(
|
||||
_apply_event_entity_id_device_id_matchers(
|
||||
json_quoted_entity_ids, json_quoted_device_ids
|
||||
)
|
||||
),
|
||||
start_day,
|
||||
end_day,
|
||||
event_types,
|
||||
entity_ids,
|
||||
json_quoted_entity_ids,
|
||||
json_quoted_device_ids,
|
||||
).order_by(Events.time_fired)
|
||||
return stmt
|
||||
|
||||
|
||||
def _apply_event_entity_id_device_id_matchers(
|
||||
json_quotable_entity_ids: Iterable[str], json_quotable_device_ids: Iterable[str]
|
||||
json_quoted_entity_ids: Iterable[str], json_quoted_device_ids: Iterable[str]
|
||||
) -> sqlalchemy.or_:
|
||||
"""Create matchers for the device_id and entity_id in the event_data."""
|
||||
return apply_event_entity_id_matchers(
|
||||
json_quotable_entity_ids
|
||||
) | apply_event_device_id_matchers(json_quotable_device_ids)
|
||||
json_quoted_entity_ids
|
||||
) | apply_event_device_id_matchers(json_quoted_device_ids)
|
||||
|
||||
@@ -16,9 +16,11 @@ from homeassistant.components.websocket_api import messages
|
||||
from homeassistant.components.websocket_api.connection import ActiveConnection
|
||||
from homeassistant.components.websocket_api.const import JSON_DUMP
|
||||
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, callback
|
||||
from homeassistant.helpers.entityfilter import EntityFilter
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import LOGBOOK_ENTITIES_FILTER
|
||||
from .helpers import (
|
||||
async_determine_event_types,
|
||||
async_filter_entities,
|
||||
@@ -67,6 +69,23 @@ async def _async_wait_for_recorder_sync(hass: HomeAssistant) -> None:
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_send_empty_response(
|
||||
connection: ActiveConnection, msg_id: int, start_time: dt, end_time: dt | None
|
||||
) -> None:
|
||||
"""Send an empty response.
|
||||
|
||||
The current case for this is when they ask for entity_ids
|
||||
that will all be filtered away because they have UOMs or
|
||||
state_class.
|
||||
"""
|
||||
connection.send_result(msg_id)
|
||||
stream_end_time = end_time or dt_util.utcnow()
|
||||
empty_stream_message = _generate_stream_message([], start_time, stream_end_time)
|
||||
empty_response = messages.event_message(msg_id, empty_stream_message)
|
||||
connection.send_message(JSON_DUMP(empty_response))
|
||||
|
||||
|
||||
async def _async_send_historical_events(
|
||||
hass: HomeAssistant,
|
||||
connection: ActiveConnection,
|
||||
@@ -171,6 +190,17 @@ async def _async_get_ws_stream_events(
|
||||
)
|
||||
|
||||
|
||||
def _generate_stream_message(
|
||||
events: list[dict[str, Any]], start_day: dt, end_day: dt
|
||||
) -> dict[str, Any]:
|
||||
"""Generate a logbook stream message response."""
|
||||
return {
|
||||
"events": events,
|
||||
"start_time": dt_util.utc_to_timestamp(start_day),
|
||||
"end_time": dt_util.utc_to_timestamp(end_day),
|
||||
}
|
||||
|
||||
|
||||
def _ws_stream_get_events(
|
||||
msg_id: int,
|
||||
start_day: dt,
|
||||
@@ -184,11 +214,7 @@ def _ws_stream_get_events(
|
||||
last_time = None
|
||||
if events:
|
||||
last_time = dt_util.utc_from_timestamp(events[-1]["when"])
|
||||
message = {
|
||||
"events": events,
|
||||
"start_time": dt_util.utc_to_timestamp(start_day),
|
||||
"end_time": dt_util.utc_to_timestamp(end_day),
|
||||
}
|
||||
message = _generate_stream_message(events, start_day, end_day)
|
||||
if partial:
|
||||
# This is a hint to consumers of the api that
|
||||
# we are about to send a another block of historical
|
||||
@@ -275,6 +301,10 @@ async def ws_event_stream(
|
||||
entity_ids = msg.get("entity_ids")
|
||||
if entity_ids:
|
||||
entity_ids = async_filter_entities(hass, entity_ids)
|
||||
if not entity_ids:
|
||||
_async_send_empty_response(connection, msg_id, start_time, end_time)
|
||||
return
|
||||
|
||||
event_types = async_determine_event_types(hass, entity_ids, device_ids)
|
||||
event_processor = EventProcessor(
|
||||
hass,
|
||||
@@ -337,8 +367,18 @@ async def ws_event_stream(
|
||||
)
|
||||
_unsub()
|
||||
|
||||
entities_filter: EntityFilter | None = None
|
||||
if not event_processor.limited_select:
|
||||
entities_filter = hass.data[LOGBOOK_ENTITIES_FILTER]
|
||||
|
||||
async_subscribe_events(
|
||||
hass, subscriptions, _queue_or_cancel, event_types, entity_ids, device_ids
|
||||
hass,
|
||||
subscriptions,
|
||||
_queue_or_cancel,
|
||||
event_types,
|
||||
entities_filter,
|
||||
entity_ids,
|
||||
device_ids,
|
||||
)
|
||||
subscriptions_setup_complete_time = dt_util.utcnow()
|
||||
connection.subscriptions[msg_id] = _unsub
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "LOOKin",
|
||||
"documentation": "https://www.home-assistant.io/integrations/lookin/",
|
||||
"codeowners": ["@ANMalko", "@bdraco"],
|
||||
"requirements": ["aiolookin==0.1.0"],
|
||||
"requirements": ["aiolookin==0.1.1"],
|
||||
"zeroconf": ["_lookin._tcp.local."],
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push",
|
||||
|
||||
@@ -110,7 +110,7 @@ def _state_schema(state):
|
||||
|
||||
PLATFORM_SCHEMA = vol.Schema(
|
||||
vol.All(
|
||||
mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend(
|
||||
mqtt.config.MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_PLATFORM): "manual_mqtt",
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_ALARM_NAME): cv.string,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,6 @@
|
||||
"""This platform enables the possibility to control a MQTT alarm."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
import logging
|
||||
import re
|
||||
@@ -31,8 +30,8 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import DEFAULT_RETAIN, MQTT_BASE_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TEMPLATE,
|
||||
CONF_COMMAND_TOPIC,
|
||||
@@ -45,11 +44,13 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate, MqttValueTemplate
|
||||
from .util import valid_publish_topic, valid_subscribe_topic
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -85,7 +86,7 @@ DEFAULT_NAME = "MQTT Alarm"
|
||||
REMOTE_CODE = "REMOTE_CODE"
|
||||
REMOTE_CODE_TEXT = "REMOTE_CODE_TEXT"
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_CODE): cv.string,
|
||||
vol.Optional(CONF_CODE_ARM_REQUIRED, default=True): cv.boolean,
|
||||
@@ -94,7 +95,7 @@ PLATFORM_SCHEMA_MODERN = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
vol.Optional(
|
||||
CONF_COMMAND_TEMPLATE, default=DEFAULT_COMMAND_TEMPLATE
|
||||
): cv.template,
|
||||
vol.Required(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Required(CONF_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_ARM_AWAY, default=DEFAULT_ARM_AWAY): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_ARM_HOME, default=DEFAULT_ARM_HOME): cv.string,
|
||||
@@ -107,8 +108,8 @@ PLATFORM_SCHEMA_MODERN = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_DISARM, default=DEFAULT_DISARM): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_TRIGGER, default=DEFAULT_TRIGGER): cv.string,
|
||||
vol.Optional(CONF_RETAIN, default=mqtt.DEFAULT_RETAIN): cv.boolean,
|
||||
vol.Required(CONF_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
vol.Required(CONF_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
|
||||
}
|
||||
).extend(MQTT_ENTITY_COMMON_SCHEMA.schema)
|
||||
@@ -131,7 +132,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT alarm control panel configured under the alarm_control_panel key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, alarm.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
alarm.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -142,13 +147,8 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT alarm control panel through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, alarm.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(hass, alarm.DOMAIN, PLATFORM_SCHEMA_MODERN)
|
||||
)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT binary sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import functools
|
||||
import logging
|
||||
@@ -34,19 +33,20 @@ from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RO_SCHEMA
|
||||
from .const import CONF_ENCODING, CONF_QOS, CONF_STATE_TOPIC, PAYLOAD_NONE
|
||||
from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttAvailability,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttValueTemplate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -57,7 +57,7 @@ DEFAULT_PAYLOAD_ON = "ON"
|
||||
DEFAULT_FORCE_UPDATE = False
|
||||
CONF_EXPIRE_AFTER = "expire_after"
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_RO_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_RO_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_EXPIRE_AFTER): cv.positive_int,
|
||||
@@ -87,7 +87,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT binary sensor configured under the fan platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, binary_sensor.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
binary_sensor.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -98,12 +102,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT binary sensor through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, binary_sensor.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, binary_sensor.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT buttons."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -15,8 +14,7 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttCommandTemplate
|
||||
from .. import mqtt
|
||||
from .config import DEFAULT_RETAIN, MQTT_BASE_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TEMPLATE,
|
||||
CONF_COMMAND_TOPIC,
|
||||
@@ -27,24 +25,26 @@ from .const import (
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate
|
||||
from .util import valid_publish_topic
|
||||
|
||||
CONF_PAYLOAD_PRESS = "payload_press"
|
||||
DEFAULT_NAME = "MQTT Button"
|
||||
DEFAULT_PAYLOAD_PRESS = "PRESS"
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Required(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Required(CONF_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_DEVICE_CLASS): button.DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_PRESS, default=DEFAULT_PAYLOAD_PRESS): cv.string,
|
||||
vol.Optional(CONF_RETAIN, default=mqtt.DEFAULT_RETAIN): cv.boolean,
|
||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
}
|
||||
).extend(MQTT_ENTITY_COMMON_SCHEMA.schema)
|
||||
|
||||
@@ -67,7 +67,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT button configured under the fan platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, button.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
button.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -78,12 +82,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT button through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, button.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, button.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Camera that loads a picture from an MQTT topic."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from base64 import b64decode
|
||||
import functools
|
||||
|
||||
@@ -17,17 +16,18 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import subscription
|
||||
from .. import mqtt
|
||||
from .config import MQTT_BASE_SCHEMA
|
||||
from .const import CONF_ENCODING, CONF_QOS, CONF_TOPIC
|
||||
from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .util import valid_subscribe_topic
|
||||
|
||||
DEFAULT_NAME = "MQTT Camera"
|
||||
|
||||
@@ -40,10 +40,10 @@ MQTT_CAMERA_ATTRIBUTES_BLOCKED = frozenset(
|
||||
}
|
||||
)
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Required(CONF_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Required(CONF_TOPIC): valid_subscribe_topic,
|
||||
}
|
||||
).extend(MQTT_ENTITY_COMMON_SCHEMA.schema)
|
||||
|
||||
@@ -65,7 +65,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT camera configured under the camera platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, camera.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
camera.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -76,12 +80,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT camera through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, camera.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, camera.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
659
homeassistant/components/mqtt/client.py
Normal file
659
homeassistant/components/mqtt/client.py
Normal file
@@ -0,0 +1,659 @@
|
||||
"""Support for MQTT message handling."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable, Callable
|
||||
from functools import lru_cache, partial, wraps
|
||||
import inspect
|
||||
from itertools import groupby
|
||||
import logging
|
||||
from operator import attrgetter
|
||||
import ssl
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Any, Union, cast
|
||||
import uuid
|
||||
|
||||
import attr
|
||||
import certifi
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_CLIENT_ID,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_PROTOCOL,
|
||||
CONF_USERNAME,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
)
|
||||
from homeassistant.core import CoreState, HassJob, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.async_ import run_callback_threadsafe
|
||||
from homeassistant.util.logging import catch_log_exception
|
||||
|
||||
from .const import (
|
||||
ATTR_TOPIC,
|
||||
CONF_BIRTH_MESSAGE,
|
||||
CONF_BROKER,
|
||||
CONF_CERTIFICATE,
|
||||
CONF_CLIENT_CERT,
|
||||
CONF_CLIENT_KEY,
|
||||
CONF_KEEPALIVE,
|
||||
CONF_TLS_INSECURE,
|
||||
CONF_WILL_MESSAGE,
|
||||
DATA_MQTT,
|
||||
DEFAULT_ENCODING,
|
||||
DEFAULT_QOS,
|
||||
MQTT_CONNECTED,
|
||||
MQTT_DISCONNECTED,
|
||||
PROTOCOL_31,
|
||||
)
|
||||
from .discovery import LAST_DISCOVERY
|
||||
from .models import (
|
||||
AsyncMessageCallbackType,
|
||||
MessageCallbackType,
|
||||
PublishMessage,
|
||||
PublishPayloadType,
|
||||
ReceiveMessage,
|
||||
ReceivePayloadType,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# Only import for paho-mqtt type checking here, imports are done locally
|
||||
# because integrations should be able to optionally rely on MQTT.
|
||||
import paho.mqtt.client as mqtt
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DISCOVERY_COOLDOWN = 2
|
||||
TIMEOUT_ACK = 10
|
||||
|
||||
SubscribePayloadType = Union[str, bytes] # Only bytes if encoding is None
|
||||
|
||||
|
||||
def publish(
|
||||
hass: HomeAssistant,
|
||||
topic: str,
|
||||
payload: PublishPayloadType,
|
||||
qos: int | None = 0,
|
||||
retain: bool | None = False,
|
||||
encoding: str | None = DEFAULT_ENCODING,
|
||||
) -> None:
|
||||
"""Publish message to a MQTT topic."""
|
||||
hass.add_job(async_publish, hass, topic, payload, qos, retain, encoding)
|
||||
|
||||
|
||||
async def async_publish(
|
||||
hass: HomeAssistant,
|
||||
topic: str,
|
||||
payload: PublishPayloadType,
|
||||
qos: int | None = 0,
|
||||
retain: bool | None = False,
|
||||
encoding: str | None = DEFAULT_ENCODING,
|
||||
) -> None:
|
||||
"""Publish message to a MQTT topic."""
|
||||
|
||||
outgoing_payload = payload
|
||||
if not isinstance(payload, bytes):
|
||||
if not encoding:
|
||||
_LOGGER.error(
|
||||
"Can't pass-through payload for publishing %s on %s with no encoding set, need 'bytes' got %s",
|
||||
payload,
|
||||
topic,
|
||||
type(payload),
|
||||
)
|
||||
return
|
||||
outgoing_payload = str(payload)
|
||||
if encoding != DEFAULT_ENCODING:
|
||||
# a string is encoded as utf-8 by default, other encoding requires bytes as payload
|
||||
try:
|
||||
outgoing_payload = outgoing_payload.encode(encoding)
|
||||
except (AttributeError, LookupError, UnicodeEncodeError):
|
||||
_LOGGER.error(
|
||||
"Can't encode payload for publishing %s on %s with encoding %s",
|
||||
payload,
|
||||
topic,
|
||||
encoding,
|
||||
)
|
||||
return
|
||||
|
||||
await hass.data[DATA_MQTT].async_publish(topic, outgoing_payload, qos, retain)
|
||||
|
||||
|
||||
AsyncDeprecatedMessageCallbackType = Callable[
|
||||
[str, ReceivePayloadType, int], Awaitable[None]
|
||||
]
|
||||
DeprecatedMessageCallbackType = Callable[[str, ReceivePayloadType, int], None]
|
||||
|
||||
|
||||
def wrap_msg_callback(
|
||||
msg_callback: AsyncDeprecatedMessageCallbackType | DeprecatedMessageCallbackType,
|
||||
) -> AsyncMessageCallbackType | MessageCallbackType:
|
||||
"""Wrap an MQTT message callback to support deprecated signature."""
|
||||
# Check for partials to properly determine if coroutine function
|
||||
check_func = msg_callback
|
||||
while isinstance(check_func, partial):
|
||||
check_func = check_func.func
|
||||
|
||||
wrapper_func: AsyncMessageCallbackType | MessageCallbackType
|
||||
if asyncio.iscoroutinefunction(check_func):
|
||||
|
||||
@wraps(msg_callback)
|
||||
async def async_wrapper(msg: ReceiveMessage) -> None:
|
||||
"""Call with deprecated signature."""
|
||||
await cast(AsyncDeprecatedMessageCallbackType, msg_callback)(
|
||||
msg.topic, msg.payload, msg.qos
|
||||
)
|
||||
|
||||
wrapper_func = async_wrapper
|
||||
else:
|
||||
|
||||
@wraps(msg_callback)
|
||||
def wrapper(msg: ReceiveMessage) -> None:
|
||||
"""Call with deprecated signature."""
|
||||
msg_callback(msg.topic, msg.payload, msg.qos)
|
||||
|
||||
wrapper_func = wrapper
|
||||
return wrapper_func
|
||||
|
||||
|
||||
@bind_hass
|
||||
async def async_subscribe(
|
||||
hass: HomeAssistant,
|
||||
topic: str,
|
||||
msg_callback: AsyncMessageCallbackType
|
||||
| MessageCallbackType
|
||||
| DeprecatedMessageCallbackType
|
||||
| AsyncDeprecatedMessageCallbackType,
|
||||
qos: int = DEFAULT_QOS,
|
||||
encoding: str | None = "utf-8",
|
||||
):
|
||||
"""Subscribe to an MQTT topic.
|
||||
|
||||
Call the return value to unsubscribe.
|
||||
"""
|
||||
# Count callback parameters which don't have a default value
|
||||
non_default = 0
|
||||
if msg_callback:
|
||||
non_default = sum(
|
||||
p.default == inspect.Parameter.empty
|
||||
for _, p in inspect.signature(msg_callback).parameters.items()
|
||||
)
|
||||
|
||||
wrapped_msg_callback = msg_callback
|
||||
# If we have 3 parameters with no default value, wrap the callback
|
||||
if non_default == 3:
|
||||
module = inspect.getmodule(msg_callback)
|
||||
_LOGGER.warning(
|
||||
"Signature of MQTT msg_callback '%s.%s' is deprecated",
|
||||
module.__name__ if module else "<unknown>",
|
||||
msg_callback.__name__,
|
||||
)
|
||||
wrapped_msg_callback = wrap_msg_callback(
|
||||
cast(DeprecatedMessageCallbackType, msg_callback)
|
||||
)
|
||||
|
||||
async_remove = await hass.data[DATA_MQTT].async_subscribe(
|
||||
topic,
|
||||
catch_log_exception(
|
||||
wrapped_msg_callback,
|
||||
lambda msg: (
|
||||
f"Exception in {msg_callback.__name__} when handling msg on "
|
||||
f"'{msg.topic}': '{msg.payload}'"
|
||||
),
|
||||
),
|
||||
qos,
|
||||
encoding,
|
||||
)
|
||||
return async_remove
|
||||
|
||||
|
||||
@bind_hass
|
||||
def subscribe(
|
||||
hass: HomeAssistant,
|
||||
topic: str,
|
||||
msg_callback: MessageCallbackType,
|
||||
qos: int = DEFAULT_QOS,
|
||||
encoding: str = "utf-8",
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe to an MQTT topic."""
|
||||
async_remove = asyncio.run_coroutine_threadsafe(
|
||||
async_subscribe(hass, topic, msg_callback, qos, encoding), hass.loop
|
||||
).result()
|
||||
|
||||
def remove():
|
||||
"""Remove listener convert."""
|
||||
run_callback_threadsafe(hass.loop, async_remove).result()
|
||||
|
||||
return remove
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class Subscription:
|
||||
"""Class to hold data about an active subscription."""
|
||||
|
||||
topic: str = attr.ib()
|
||||
matcher: Any = attr.ib()
|
||||
job: HassJob = attr.ib()
|
||||
qos: int = attr.ib(default=0)
|
||||
encoding: str | None = attr.ib(default="utf-8")
|
||||
|
||||
|
||||
class MqttClientSetup:
|
||||
"""Helper class to setup the paho mqtt client from config."""
|
||||
|
||||
def __init__(self, config: ConfigType) -> None:
|
||||
"""Initialize the MQTT client setup helper."""
|
||||
|
||||
# We don't import on the top because some integrations
|
||||
# should be able to optionally rely on MQTT.
|
||||
import paho.mqtt.client as mqtt # pylint: disable=import-outside-toplevel
|
||||
|
||||
if config[CONF_PROTOCOL] == PROTOCOL_31:
|
||||
proto = mqtt.MQTTv31
|
||||
else:
|
||||
proto = mqtt.MQTTv311
|
||||
|
||||
if (client_id := config.get(CONF_CLIENT_ID)) is None:
|
||||
# PAHO MQTT relies on the MQTT server to generate random client IDs.
|
||||
# However, that feature is not mandatory so we generate our own.
|
||||
client_id = mqtt.base62(uuid.uuid4().int, padding=22)
|
||||
self._client = mqtt.Client(client_id, protocol=proto)
|
||||
|
||||
# Enable logging
|
||||
self._client.enable_logger()
|
||||
|
||||
username = config.get(CONF_USERNAME)
|
||||
password = config.get(CONF_PASSWORD)
|
||||
if username is not None:
|
||||
self._client.username_pw_set(username, password)
|
||||
|
||||
if (certificate := config.get(CONF_CERTIFICATE)) == "auto":
|
||||
certificate = certifi.where()
|
||||
|
||||
client_key = config.get(CONF_CLIENT_KEY)
|
||||
client_cert = config.get(CONF_CLIENT_CERT)
|
||||
tls_insecure = config.get(CONF_TLS_INSECURE)
|
||||
if certificate is not None:
|
||||
self._client.tls_set(
|
||||
certificate,
|
||||
certfile=client_cert,
|
||||
keyfile=client_key,
|
||||
tls_version=ssl.PROTOCOL_TLS,
|
||||
)
|
||||
|
||||
if tls_insecure is not None:
|
||||
self._client.tls_insecure_set(tls_insecure)
|
||||
|
||||
@property
|
||||
def client(self) -> mqtt.Client:
|
||||
"""Return the paho MQTT client."""
|
||||
return self._client
|
||||
|
||||
|
||||
class MQTT:
|
||||
"""Home Assistant MQTT client."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry,
|
||||
conf,
|
||||
) -> None:
|
||||
"""Initialize Home Assistant MQTT client."""
|
||||
# We don't import on the top because some integrations
|
||||
# should be able to optionally rely on MQTT.
|
||||
import paho.mqtt.client as mqtt # pylint: disable=import-outside-toplevel
|
||||
|
||||
self.hass = hass
|
||||
self.config_entry = config_entry
|
||||
self.conf = conf
|
||||
self.subscriptions: list[Subscription] = []
|
||||
self.connected = False
|
||||
self._ha_started = asyncio.Event()
|
||||
self._last_subscribe = time.time()
|
||||
self._mqttc: mqtt.Client = None
|
||||
self._paho_lock = asyncio.Lock()
|
||||
|
||||
self._pending_operations: dict[str, asyncio.Event] = {}
|
||||
|
||||
if self.hass.state == CoreState.running:
|
||||
self._ha_started.set()
|
||||
else:
|
||||
|
||||
@callback
|
||||
def ha_started(_):
|
||||
self._ha_started.set()
|
||||
|
||||
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, ha_started)
|
||||
|
||||
self.init_client()
|
||||
|
||||
def init_client(self):
|
||||
"""Initialize paho client."""
|
||||
self._mqttc = MqttClientSetup(self.conf).client
|
||||
self._mqttc.on_connect = self._mqtt_on_connect
|
||||
self._mqttc.on_disconnect = self._mqtt_on_disconnect
|
||||
self._mqttc.on_message = self._mqtt_on_message
|
||||
self._mqttc.on_publish = self._mqtt_on_callback
|
||||
self._mqttc.on_subscribe = self._mqtt_on_callback
|
||||
self._mqttc.on_unsubscribe = self._mqtt_on_callback
|
||||
|
||||
if (
|
||||
CONF_WILL_MESSAGE in self.conf
|
||||
and ATTR_TOPIC in self.conf[CONF_WILL_MESSAGE]
|
||||
):
|
||||
will_message = PublishMessage(**self.conf[CONF_WILL_MESSAGE])
|
||||
else:
|
||||
will_message = None
|
||||
|
||||
if will_message is not None:
|
||||
self._mqttc.will_set(
|
||||
topic=will_message.topic,
|
||||
payload=will_message.payload,
|
||||
qos=will_message.qos,
|
||||
retain=will_message.retain,
|
||||
)
|
||||
|
||||
async def async_publish(
|
||||
self, topic: str, payload: PublishPayloadType, qos: int, retain: bool
|
||||
) -> None:
|
||||
"""Publish a MQTT message."""
|
||||
async with self._paho_lock:
|
||||
msg_info = await self.hass.async_add_executor_job(
|
||||
self._mqttc.publish, topic, payload, qos, retain
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Transmitting message on %s: '%s', mid: %s",
|
||||
topic,
|
||||
payload,
|
||||
msg_info.mid,
|
||||
)
|
||||
_raise_on_error(msg_info.rc)
|
||||
await self._wait_for_mid(msg_info.mid)
|
||||
|
||||
async def async_connect(self) -> None:
|
||||
"""Connect to the host. Does not process messages yet."""
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
import paho.mqtt.client as mqtt
|
||||
|
||||
result: int | None = None
|
||||
try:
|
||||
result = await self.hass.async_add_executor_job(
|
||||
self._mqttc.connect,
|
||||
self.conf[CONF_BROKER],
|
||||
self.conf[CONF_PORT],
|
||||
self.conf[CONF_KEEPALIVE],
|
||||
)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Failed to connect to MQTT server due to exception: %s", err)
|
||||
|
||||
if result is not None and result != 0:
|
||||
_LOGGER.error(
|
||||
"Failed to connect to MQTT server: %s", mqtt.error_string(result)
|
||||
)
|
||||
|
||||
self._mqttc.loop_start()
|
||||
|
||||
async def async_disconnect(self):
|
||||
"""Stop the MQTT client."""
|
||||
|
||||
def stop():
|
||||
"""Stop the MQTT client."""
|
||||
# Do not disconnect, we want the broker to always publish will
|
||||
self._mqttc.loop_stop()
|
||||
|
||||
await self.hass.async_add_executor_job(stop)
|
||||
|
||||
async def async_subscribe(
|
||||
self,
|
||||
topic: str,
|
||||
msg_callback: MessageCallbackType,
|
||||
qos: int,
|
||||
encoding: str | None = None,
|
||||
) -> Callable[[], None]:
|
||||
"""Set up a subscription to a topic with the provided qos.
|
||||
|
||||
This method is a coroutine.
|
||||
"""
|
||||
if not isinstance(topic, str):
|
||||
raise HomeAssistantError("Topic needs to be a string!")
|
||||
|
||||
subscription = Subscription(
|
||||
topic, _matcher_for_topic(topic), HassJob(msg_callback), qos, encoding
|
||||
)
|
||||
self.subscriptions.append(subscription)
|
||||
self._matching_subscriptions.cache_clear()
|
||||
|
||||
# Only subscribe if currently connected.
|
||||
if self.connected:
|
||||
self._last_subscribe = time.time()
|
||||
await self._async_perform_subscription(topic, qos)
|
||||
|
||||
@callback
|
||||
def async_remove() -> None:
|
||||
"""Remove subscription."""
|
||||
if subscription not in self.subscriptions:
|
||||
raise HomeAssistantError("Can't remove subscription twice")
|
||||
self.subscriptions.remove(subscription)
|
||||
self._matching_subscriptions.cache_clear()
|
||||
|
||||
# Only unsubscribe if currently connected.
|
||||
if self.connected:
|
||||
self.hass.async_create_task(self._async_unsubscribe(topic))
|
||||
|
||||
return async_remove
|
||||
|
||||
async def _async_unsubscribe(self, topic: str) -> None:
|
||||
"""Unsubscribe from a topic.
|
||||
|
||||
This method is a coroutine.
|
||||
"""
|
||||
if any(other.topic == topic for other in self.subscriptions):
|
||||
# Other subscriptions on topic remaining - don't unsubscribe.
|
||||
return
|
||||
|
||||
async with self._paho_lock:
|
||||
result: int | None = None
|
||||
result, mid = await self.hass.async_add_executor_job(
|
||||
self._mqttc.unsubscribe, topic
|
||||
)
|
||||
_LOGGER.debug("Unsubscribing from %s, mid: %s", topic, mid)
|
||||
_raise_on_error(result)
|
||||
await self._wait_for_mid(mid)
|
||||
|
||||
async def _async_perform_subscription(self, topic: str, qos: int) -> None:
|
||||
"""Perform a paho-mqtt subscription."""
|
||||
async with self._paho_lock:
|
||||
result: int | None = None
|
||||
result, mid = await self.hass.async_add_executor_job(
|
||||
self._mqttc.subscribe, topic, qos
|
||||
)
|
||||
_LOGGER.debug("Subscribing to %s, mid: %s", topic, mid)
|
||||
_raise_on_error(result)
|
||||
await self._wait_for_mid(mid)
|
||||
|
||||
def _mqtt_on_connect(self, _mqttc, _userdata, _flags, result_code: int) -> None:
|
||||
"""On connect callback.
|
||||
|
||||
Resubscribe to all topics we were subscribed to and publish birth
|
||||
message.
|
||||
"""
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
import paho.mqtt.client as mqtt
|
||||
|
||||
if result_code != mqtt.CONNACK_ACCEPTED:
|
||||
_LOGGER.error(
|
||||
"Unable to connect to the MQTT broker: %s",
|
||||
mqtt.connack_string(result_code),
|
||||
)
|
||||
return
|
||||
|
||||
self.connected = True
|
||||
dispatcher_send(self.hass, MQTT_CONNECTED)
|
||||
_LOGGER.info(
|
||||
"Connected to MQTT server %s:%s (%s)",
|
||||
self.conf[CONF_BROKER],
|
||||
self.conf[CONF_PORT],
|
||||
result_code,
|
||||
)
|
||||
|
||||
# Group subscriptions to only re-subscribe once for each topic.
|
||||
keyfunc = attrgetter("topic")
|
||||
for topic, subs in groupby(sorted(self.subscriptions, key=keyfunc), keyfunc):
|
||||
# Re-subscribe with the highest requested qos
|
||||
max_qos = max(subscription.qos for subscription in subs)
|
||||
self.hass.add_job(self._async_perform_subscription, topic, max_qos)
|
||||
|
||||
if (
|
||||
CONF_BIRTH_MESSAGE in self.conf
|
||||
and ATTR_TOPIC in self.conf[CONF_BIRTH_MESSAGE]
|
||||
):
|
||||
|
||||
async def publish_birth_message(birth_message):
|
||||
await self._ha_started.wait() # Wait for Home Assistant to start
|
||||
await self._discovery_cooldown() # Wait for MQTT discovery to cool down
|
||||
await self.async_publish(
|
||||
topic=birth_message.topic,
|
||||
payload=birth_message.payload,
|
||||
qos=birth_message.qos,
|
||||
retain=birth_message.retain,
|
||||
)
|
||||
|
||||
birth_message = PublishMessage(**self.conf[CONF_BIRTH_MESSAGE])
|
||||
asyncio.run_coroutine_threadsafe(
|
||||
publish_birth_message(birth_message), self.hass.loop
|
||||
)
|
||||
|
||||
def _mqtt_on_message(self, _mqttc, _userdata, msg) -> None:
|
||||
"""Message received callback."""
|
||||
self.hass.add_job(self._mqtt_handle_message, msg)
|
||||
|
||||
@lru_cache(2048)
|
||||
def _matching_subscriptions(self, topic):
|
||||
subscriptions = []
|
||||
for subscription in self.subscriptions:
|
||||
if subscription.matcher(topic):
|
||||
subscriptions.append(subscription)
|
||||
return subscriptions
|
||||
|
||||
@callback
|
||||
def _mqtt_handle_message(self, msg) -> None:
|
||||
_LOGGER.debug(
|
||||
"Received message on %s%s: %s",
|
||||
msg.topic,
|
||||
" (retained)" if msg.retain else "",
|
||||
msg.payload[0:8192],
|
||||
)
|
||||
timestamp = dt_util.utcnow()
|
||||
|
||||
subscriptions = self._matching_subscriptions(msg.topic)
|
||||
|
||||
for subscription in subscriptions:
|
||||
|
||||
payload: SubscribePayloadType = msg.payload
|
||||
if subscription.encoding is not None:
|
||||
try:
|
||||
payload = msg.payload.decode(subscription.encoding)
|
||||
except (AttributeError, UnicodeDecodeError):
|
||||
_LOGGER.warning(
|
||||
"Can't decode payload %s on %s with encoding %s (for %s)",
|
||||
msg.payload[0:8192],
|
||||
msg.topic,
|
||||
subscription.encoding,
|
||||
subscription.job,
|
||||
)
|
||||
continue
|
||||
|
||||
self.hass.async_run_hass_job(
|
||||
subscription.job,
|
||||
ReceiveMessage(
|
||||
msg.topic,
|
||||
payload,
|
||||
msg.qos,
|
||||
msg.retain,
|
||||
subscription.topic,
|
||||
timestamp,
|
||||
),
|
||||
)
|
||||
|
||||
def _mqtt_on_callback(self, _mqttc, _userdata, mid, _granted_qos=None) -> None:
|
||||
"""Publish / Subscribe / Unsubscribe callback."""
|
||||
self.hass.add_job(self._mqtt_handle_mid, mid)
|
||||
|
||||
@callback
|
||||
def _mqtt_handle_mid(self, mid) -> None:
|
||||
# Create the mid event if not created, either _mqtt_handle_mid or _wait_for_mid
|
||||
# may be executed first.
|
||||
if mid not in self._pending_operations:
|
||||
self._pending_operations[mid] = asyncio.Event()
|
||||
self._pending_operations[mid].set()
|
||||
|
||||
def _mqtt_on_disconnect(self, _mqttc, _userdata, result_code: int) -> None:
|
||||
"""Disconnected callback."""
|
||||
self.connected = False
|
||||
dispatcher_send(self.hass, MQTT_DISCONNECTED)
|
||||
_LOGGER.warning(
|
||||
"Disconnected from MQTT server %s:%s (%s)",
|
||||
self.conf[CONF_BROKER],
|
||||
self.conf[CONF_PORT],
|
||||
result_code,
|
||||
)
|
||||
|
||||
async def _wait_for_mid(self, mid):
|
||||
"""Wait for ACK from broker."""
|
||||
# Create the mid event if not created, either _mqtt_handle_mid or _wait_for_mid
|
||||
# may be executed first.
|
||||
if mid not in self._pending_operations:
|
||||
self._pending_operations[mid] = asyncio.Event()
|
||||
try:
|
||||
await asyncio.wait_for(self._pending_operations[mid].wait(), TIMEOUT_ACK)
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"No ACK from MQTT server in %s seconds (mid: %s)", TIMEOUT_ACK, mid
|
||||
)
|
||||
finally:
|
||||
del self._pending_operations[mid]
|
||||
|
||||
async def _discovery_cooldown(self):
|
||||
now = time.time()
|
||||
# Reset discovery and subscribe cooldowns
|
||||
self.hass.data[LAST_DISCOVERY] = now
|
||||
self._last_subscribe = now
|
||||
|
||||
last_discovery = self.hass.data[LAST_DISCOVERY]
|
||||
last_subscribe = self._last_subscribe
|
||||
wait_until = max(
|
||||
last_discovery + DISCOVERY_COOLDOWN, last_subscribe + DISCOVERY_COOLDOWN
|
||||
)
|
||||
while now < wait_until:
|
||||
await asyncio.sleep(wait_until - now)
|
||||
now = time.time()
|
||||
last_discovery = self.hass.data[LAST_DISCOVERY]
|
||||
last_subscribe = self._last_subscribe
|
||||
wait_until = max(
|
||||
last_discovery + DISCOVERY_COOLDOWN, last_subscribe + DISCOVERY_COOLDOWN
|
||||
)
|
||||
|
||||
|
||||
def _raise_on_error(result_code: int | None) -> None:
|
||||
"""Raise error if error result."""
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
import paho.mqtt.client as mqtt
|
||||
|
||||
if result_code is not None and result_code != 0:
|
||||
raise HomeAssistantError(
|
||||
f"Error talking to MQTT: {mqtt.error_string(result_code)}"
|
||||
)
|
||||
|
||||
|
||||
def _matcher_for_topic(subscription: str) -> Any:
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
from paho.mqtt.matcher import MQTTMatcher
|
||||
|
||||
matcher = MQTTMatcher()
|
||||
matcher[subscription] = True
|
||||
|
||||
return lambda topic: next(matcher.iter_match(topic), False)
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT climate devices."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
import logging
|
||||
|
||||
@@ -44,18 +43,20 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import DEFAULT_RETAIN, MQTT_BASE_SCHEMA
|
||||
from .const import CONF_ENCODING, CONF_QOS, CONF_RETAIN, PAYLOAD_NONE
|
||||
from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate, MqttValueTemplate
|
||||
from .util import valid_publish_topic, valid_subscribe_topic
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -232,33 +233,33 @@ def valid_preset_mode_configuration(config):
|
||||
return config
|
||||
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
_PLATFORM_SCHEMA_BASE = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_AUX_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_AUX_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_AUX_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_AUX_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_AUX_STATE_TOPIC): valid_subscribe_topic,
|
||||
# AWAY and HOLD mode topics and templates are deprecated, support will be removed with release 2022.9
|
||||
vol.Optional(CONF_AWAY_MODE_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_AWAY_MODE_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_AWAY_MODE_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_AWAY_MODE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_AWAY_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_CURRENT_TEMP_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_CURRENT_TEMP_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_CURRENT_TEMP_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_FAN_MODE_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_FAN_MODE_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_FAN_MODE_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(
|
||||
CONF_FAN_MODE_LIST,
|
||||
default=[FAN_AUTO, FAN_LOW, FAN_MEDIUM, FAN_HIGH],
|
||||
): cv.ensure_list,
|
||||
vol.Optional(CONF_FAN_MODE_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_FAN_MODE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_FAN_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
# AWAY and HOLD mode topics and templates are deprecated, support will be removed with release 2022.9
|
||||
vol.Optional(CONF_HOLD_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_HOLD_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_HOLD_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_HOLD_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_HOLD_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_HOLD_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_HOLD_LIST): cv.ensure_list,
|
||||
vol.Optional(CONF_MODE_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_MODE_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_MODE_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(
|
||||
CONF_MODE_LIST,
|
||||
default=[
|
||||
@@ -271,54 +272,54 @@ _PLATFORM_SCHEMA_BASE = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
],
|
||||
): cv.ensure_list,
|
||||
vol.Optional(CONF_MODE_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_MODE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_ON, default="ON"): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_OFF, default="OFF"): cv.string,
|
||||
vol.Optional(CONF_POWER_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_POWER_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_POWER_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_POWER_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_POWER_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_PRECISION): vol.In(
|
||||
[PRECISION_TENTHS, PRECISION_HALVES, PRECISION_WHOLE]
|
||||
),
|
||||
vol.Optional(CONF_RETAIN, default=mqtt.DEFAULT_RETAIN): cv.boolean,
|
||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
# CONF_SEND_IF_OFF is deprecated, support will be removed with release 2022.9
|
||||
vol.Optional(CONF_SEND_IF_OFF): cv.boolean,
|
||||
vol.Optional(CONF_ACTION_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_ACTION_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_ACTION_TOPIC): valid_subscribe_topic,
|
||||
# CONF_PRESET_MODE_COMMAND_TOPIC and CONF_PRESET_MODES_LIST must be used together
|
||||
vol.Inclusive(
|
||||
CONF_PRESET_MODE_COMMAND_TOPIC, "preset_modes"
|
||||
): mqtt.valid_publish_topic,
|
||||
): valid_publish_topic,
|
||||
vol.Inclusive(
|
||||
CONF_PRESET_MODES_LIST, "preset_modes", default=[]
|
||||
): cv.ensure_list,
|
||||
vol.Optional(CONF_PRESET_MODE_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_PRESET_MODE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_PRESET_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_PRESET_MODE_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_SWING_MODE_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_SWING_MODE_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_SWING_MODE_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(
|
||||
CONF_SWING_MODE_LIST, default=[SWING_ON, SWING_OFF]
|
||||
): cv.ensure_list,
|
||||
vol.Optional(CONF_SWING_MODE_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_SWING_MODE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_SWING_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_TEMP_INITIAL, default=21): cv.positive_int,
|
||||
vol.Optional(CONF_TEMP_MIN, default=DEFAULT_MIN_TEMP): vol.Coerce(float),
|
||||
vol.Optional(CONF_TEMP_MAX, default=DEFAULT_MAX_TEMP): vol.Coerce(float),
|
||||
vol.Optional(CONF_TEMP_STEP, default=1.0): vol.Coerce(float),
|
||||
vol.Optional(CONF_TEMP_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_TEMP_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_TEMP_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_TEMP_HIGH_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_TEMP_HIGH_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_TEMP_HIGH_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_TEMP_HIGH_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_TEMP_HIGH_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_TEMP_HIGH_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_TEMP_LOW_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_TEMP_LOW_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_TEMP_LOW_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_TEMP_LOW_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_TEMP_LOW_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_TEMP_LOW_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_TEMP_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_TEMP_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_TEMP_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_TEMPERATURE_UNIT): cv.temperature_unit,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
|
||||
}
|
||||
@@ -375,7 +376,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT climate configured under the fan platform key (deprecated)."""
|
||||
# The use of PLATFORM_SCHEMA is deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, climate.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
climate.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -386,12 +391,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT climate device through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, climate.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, climate.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
148
homeassistant/components/mqtt/config.py
Normal file
148
homeassistant/components/mqtt/config.py
Normal file
@@ -0,0 +1,148 @@
|
||||
"""Support for MQTT message handling."""
|
||||
from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_CLIENT_ID,
|
||||
CONF_DISCOVERY,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_PROTOCOL,
|
||||
CONF_USERNAME,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import (
|
||||
ATTR_PAYLOAD,
|
||||
ATTR_QOS,
|
||||
ATTR_RETAIN,
|
||||
ATTR_TOPIC,
|
||||
CONF_BIRTH_MESSAGE,
|
||||
CONF_BROKER,
|
||||
CONF_CERTIFICATE,
|
||||
CONF_CLIENT_CERT,
|
||||
CONF_CLIENT_KEY,
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_DISCOVERY_PREFIX,
|
||||
CONF_ENCODING,
|
||||
CONF_KEEPALIVE,
|
||||
CONF_QOS,
|
||||
CONF_RETAIN,
|
||||
CONF_STATE_TOPIC,
|
||||
CONF_TLS_INSECURE,
|
||||
CONF_TLS_VERSION,
|
||||
CONF_WILL_MESSAGE,
|
||||
DEFAULT_BIRTH,
|
||||
DEFAULT_DISCOVERY,
|
||||
DEFAULT_ENCODING,
|
||||
DEFAULT_PREFIX,
|
||||
DEFAULT_QOS,
|
||||
DEFAULT_RETAIN,
|
||||
DEFAULT_WILL,
|
||||
PLATFORMS,
|
||||
PROTOCOL_31,
|
||||
PROTOCOL_311,
|
||||
)
|
||||
from .util import _VALID_QOS_SCHEMA, valid_publish_topic, valid_subscribe_topic
|
||||
|
||||
DEFAULT_PORT = 1883
|
||||
DEFAULT_KEEPALIVE = 60
|
||||
DEFAULT_PROTOCOL = PROTOCOL_311
|
||||
DEFAULT_TLS_PROTOCOL = "auto"
|
||||
|
||||
DEFAULT_VALUES = {
|
||||
CONF_BIRTH_MESSAGE: DEFAULT_BIRTH,
|
||||
CONF_DISCOVERY: DEFAULT_DISCOVERY,
|
||||
CONF_PORT: DEFAULT_PORT,
|
||||
CONF_TLS_VERSION: DEFAULT_TLS_PROTOCOL,
|
||||
CONF_WILL_MESSAGE: DEFAULT_WILL,
|
||||
}
|
||||
|
||||
CLIENT_KEY_AUTH_MSG = (
|
||||
"client_key and client_cert must both be present in "
|
||||
"the MQTT broker configuration"
|
||||
)
|
||||
|
||||
MQTT_WILL_BIRTH_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Inclusive(ATTR_TOPIC, "topic_payload"): valid_publish_topic,
|
||||
vol.Inclusive(ATTR_PAYLOAD, "topic_payload"): cv.string,
|
||||
vol.Optional(ATTR_QOS, default=DEFAULT_QOS): _VALID_QOS_SCHEMA,
|
||||
vol.Optional(ATTR_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
},
|
||||
required=True,
|
||||
)
|
||||
|
||||
PLATFORM_CONFIG_SCHEMA_BASE = vol.Schema(
|
||||
{vol.Optional(platform.value): cv.ensure_list for platform in PLATFORMS}
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA_BASE = PLATFORM_CONFIG_SCHEMA_BASE.extend(
|
||||
{
|
||||
vol.Optional(CONF_CLIENT_ID): cv.string,
|
||||
vol.Optional(CONF_KEEPALIVE, default=DEFAULT_KEEPALIVE): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=15)
|
||||
),
|
||||
vol.Optional(CONF_BROKER): cv.string,
|
||||
vol.Optional(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_USERNAME): cv.string,
|
||||
vol.Optional(CONF_PASSWORD): cv.string,
|
||||
vol.Optional(CONF_CERTIFICATE): vol.Any("auto", cv.isfile),
|
||||
vol.Inclusive(
|
||||
CONF_CLIENT_KEY, "client_key_auth", msg=CLIENT_KEY_AUTH_MSG
|
||||
): cv.isfile,
|
||||
vol.Inclusive(
|
||||
CONF_CLIENT_CERT, "client_key_auth", msg=CLIENT_KEY_AUTH_MSG
|
||||
): cv.isfile,
|
||||
vol.Optional(CONF_TLS_INSECURE): cv.boolean,
|
||||
vol.Optional(CONF_TLS_VERSION): vol.Any("auto", "1.0", "1.1", "1.2"),
|
||||
vol.Optional(CONF_PROTOCOL, default=DEFAULT_PROTOCOL): vol.All(
|
||||
cv.string, vol.In([PROTOCOL_31, PROTOCOL_311])
|
||||
),
|
||||
vol.Optional(CONF_WILL_MESSAGE): MQTT_WILL_BIRTH_SCHEMA,
|
||||
vol.Optional(CONF_BIRTH_MESSAGE): MQTT_WILL_BIRTH_SCHEMA,
|
||||
vol.Optional(CONF_DISCOVERY): cv.boolean,
|
||||
# discovery_prefix must be a valid publish topic because if no
|
||||
# state topic is specified, it will be created with the given prefix.
|
||||
vol.Optional(
|
||||
CONF_DISCOVERY_PREFIX, default=DEFAULT_PREFIX
|
||||
): valid_publish_topic,
|
||||
}
|
||||
)
|
||||
|
||||
DEPRECATED_CONFIG_KEYS = [
|
||||
CONF_BIRTH_MESSAGE,
|
||||
CONF_BROKER,
|
||||
CONF_DISCOVERY,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_TLS_VERSION,
|
||||
CONF_USERNAME,
|
||||
CONF_WILL_MESSAGE,
|
||||
]
|
||||
|
||||
SCHEMA_BASE = {
|
||||
vol.Optional(CONF_QOS, default=DEFAULT_QOS): _VALID_QOS_SCHEMA,
|
||||
vol.Optional(CONF_ENCODING, default=DEFAULT_ENCODING): cv.string,
|
||||
}
|
||||
|
||||
MQTT_BASE_SCHEMA = vol.Schema(SCHEMA_BASE)
|
||||
|
||||
# Sensor type platforms subscribe to MQTT events
|
||||
MQTT_RO_SCHEMA = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
|
||||
}
|
||||
)
|
||||
|
||||
# Switch type platforms publish to MQTT and may subscribe
|
||||
MQTT_RW_SCHEMA = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
vol.Optional(CONF_STATE_TOPIC): valid_subscribe_topic,
|
||||
}
|
||||
)
|
||||
@@ -17,7 +17,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
|
||||
from . import MqttClientSetup
|
||||
from .client import MqttClientSetup
|
||||
from .const import (
|
||||
ATTR_PAYLOAD,
|
||||
ATTR_QOS,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""Constants used by multiple MQTT modules."""
|
||||
from homeassistant.const import CONF_PAYLOAD
|
||||
from homeassistant.const import CONF_PAYLOAD, Platform
|
||||
|
||||
ATTR_DISCOVERY_HASH = "discovery_hash"
|
||||
ATTR_DISCOVERY_PAYLOAD = "discovery_payload"
|
||||
@@ -14,7 +14,9 @@ CONF_BROKER = "broker"
|
||||
CONF_BIRTH_MESSAGE = "birth_message"
|
||||
CONF_COMMAND_TEMPLATE = "command_template"
|
||||
CONF_COMMAND_TOPIC = "command_topic"
|
||||
CONF_DISCOVERY_PREFIX = "discovery_prefix"
|
||||
CONF_ENCODING = "encoding"
|
||||
CONF_KEEPALIVE = "keepalive"
|
||||
CONF_QOS = ATTR_QOS
|
||||
CONF_RETAIN = ATTR_RETAIN
|
||||
CONF_STATE_TOPIC = "state_topic"
|
||||
@@ -30,8 +32,10 @@ CONF_TLS_VERSION = "tls_version"
|
||||
|
||||
CONFIG_ENTRY_IS_SETUP = "mqtt_config_entry_is_setup"
|
||||
DATA_CONFIG_ENTRY_LOCK = "mqtt_config_entry_lock"
|
||||
DATA_MQTT = "mqtt"
|
||||
DATA_MQTT_CONFIG = "mqtt_config"
|
||||
DATA_MQTT_RELOAD_NEEDED = "mqtt_reload_needed"
|
||||
DATA_MQTT_UPDATED_CONFIG = "mqtt_updated_config"
|
||||
|
||||
DEFAULT_PREFIX = "homeassistant"
|
||||
DEFAULT_BIRTH_WILL_TOPIC = DEFAULT_PREFIX + "/status"
|
||||
@@ -60,9 +64,31 @@ DOMAIN = "mqtt"
|
||||
|
||||
MQTT_CONNECTED = "mqtt_connected"
|
||||
MQTT_DISCONNECTED = "mqtt_disconnected"
|
||||
MQTT_RELOADED = "mqtt_reloaded"
|
||||
|
||||
PAYLOAD_EMPTY_JSON = "{}"
|
||||
PAYLOAD_NONE = "None"
|
||||
|
||||
PROTOCOL_31 = "3.1"
|
||||
PROTOCOL_311 = "3.1.1"
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.CAMERA,
|
||||
Platform.CLIMATE,
|
||||
Platform.DEVICE_TRACKER,
|
||||
Platform.COVER,
|
||||
Platform.FAN,
|
||||
Platform.HUMIDIFIER,
|
||||
Platform.LIGHT,
|
||||
Platform.LOCK,
|
||||
Platform.NUMBER,
|
||||
Platform.SELECT,
|
||||
Platform.SCENE,
|
||||
Platform.SENSOR,
|
||||
Platform.SIREN,
|
||||
Platform.SWITCH,
|
||||
Platform.VACUUM,
|
||||
]
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT cover devices."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
from json import JSONDecodeError, loads as json_loads
|
||||
import logging
|
||||
@@ -26,15 +25,14 @@ from homeassistant.const import (
|
||||
STATE_CLOSING,
|
||||
STATE_OPEN,
|
||||
STATE_OPENING,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_BASE_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_ENCODING,
|
||||
@@ -46,11 +44,13 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate, MqttValueTemplate
|
||||
from .util import valid_publish_topic, valid_subscribe_topic
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -152,11 +152,11 @@ def validate_options(value):
|
||||
return value
|
||||
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
_PLATFORM_SCHEMA_BASE = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_GET_POSITION_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_GET_POSITION_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
|
||||
vol.Optional(CONF_PAYLOAD_CLOSE, default=DEFAULT_PAYLOAD_CLOSE): vol.Any(
|
||||
@@ -172,24 +172,24 @@ _PLATFORM_SCHEMA_BASE = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
vol.Optional(CONF_POSITION_OPEN, default=DEFAULT_POSITION_OPEN): int,
|
||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
vol.Optional(CONF_SET_POSITION_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_SET_POSITION_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_SET_POSITION_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_STATE_CLOSED, default=STATE_CLOSED): cv.string,
|
||||
vol.Optional(CONF_STATE_CLOSING, default=STATE_CLOSING): cv.string,
|
||||
vol.Optional(CONF_STATE_OPEN, default=STATE_OPEN): cv.string,
|
||||
vol.Optional(CONF_STATE_OPENING, default=STATE_OPENING): cv.string,
|
||||
vol.Optional(CONF_STATE_STOPPED, default=DEFAULT_STATE_STOPPED): cv.string,
|
||||
vol.Optional(CONF_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(
|
||||
CONF_TILT_CLOSED_POSITION, default=DEFAULT_TILT_CLOSED_POSITION
|
||||
): int,
|
||||
vol.Optional(CONF_TILT_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_TILT_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_TILT_MAX, default=DEFAULT_TILT_MAX): int,
|
||||
vol.Optional(CONF_TILT_MIN, default=DEFAULT_TILT_MIN): int,
|
||||
vol.Optional(CONF_TILT_OPEN_POSITION, default=DEFAULT_TILT_OPEN_POSITION): int,
|
||||
vol.Optional(
|
||||
CONF_TILT_STATE_OPTIMISTIC, default=DEFAULT_TILT_OPTIMISTIC
|
||||
): cv.boolean,
|
||||
vol.Optional(CONF_TILT_STATUS_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_TILT_STATUS_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_TILT_STATUS_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_GET_POSITION_TEMPLATE): cv.template,
|
||||
@@ -225,7 +225,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT covers configured under the fan platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, cover.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
cover.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -236,13 +240,8 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT cover through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, cover.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(hass, cover.DOMAIN, PLATFORM_SCHEMA_MODERN)
|
||||
)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
@@ -470,7 +469,6 @@ class MqttCover(MqttEntity, CoverEntity):
|
||||
}
|
||||
|
||||
if self._config.get(CONF_TILT_STATUS_TOPIC) is not None:
|
||||
self._tilt_value = STATE_UNKNOWN
|
||||
topics["tilt_status_topic"] = {
|
||||
"topic": self._config.get(CONF_TILT_STATUS_TOPIC),
|
||||
"msg_callback": tilt_message_received,
|
||||
|
||||
@@ -3,8 +3,10 @@ import functools
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from . import device_trigger
|
||||
from .. import mqtt
|
||||
from .config import MQTT_BASE_SCHEMA
|
||||
from .mixins import async_setup_entry_helper
|
||||
|
||||
AUTOMATION_TYPE_TRIGGER = "trigger"
|
||||
@@ -12,10 +14,10 @@ AUTOMATION_TYPES = [AUTOMATION_TYPE_TRIGGER]
|
||||
AUTOMATION_TYPES_SCHEMA = vol.In(AUTOMATION_TYPES)
|
||||
CONF_AUTOMATION_TYPE = "automation_type"
|
||||
|
||||
PLATFORM_SCHEMA = mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend(
|
||||
{vol.Required(CONF_AUTOMATION_TYPE): AUTOMATION_TYPES_SCHEMA},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
).extend(MQTT_BASE_SCHEMA.schema)
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry):
|
||||
|
||||
@@ -19,8 +19,8 @@ from homeassistant.const import (
|
||||
from homeassistant.core import callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from .. import MqttValueTemplate, subscription
|
||||
from ... import mqtt
|
||||
from .. import subscription
|
||||
from ..config import MQTT_RO_SCHEMA
|
||||
from ..const import CONF_QOS, CONF_STATE_TOPIC
|
||||
from ..debug_info import log_messages
|
||||
from ..mixins import (
|
||||
@@ -29,12 +29,13 @@ from ..mixins import (
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
)
|
||||
from ..models import MqttValueTemplate
|
||||
|
||||
CONF_PAYLOAD_HOME = "payload_home"
|
||||
CONF_PAYLOAD_NOT_HOME = "payload_not_home"
|
||||
CONF_SOURCE_TYPE = "source_type"
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_RO_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_RO_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_HOME, default=STATE_HOME): cv.string,
|
||||
|
||||
@@ -7,16 +7,18 @@ from homeassistant.const import CONF_DEVICES, STATE_HOME, STATE_NOT_HOME
|
||||
from homeassistant.core import callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from ... import mqtt
|
||||
from ..client import async_subscribe
|
||||
from ..config import SCHEMA_BASE
|
||||
from ..const import CONF_QOS
|
||||
from ..util import valid_subscribe_topic
|
||||
|
||||
CONF_PAYLOAD_HOME = "payload_home"
|
||||
CONF_PAYLOAD_NOT_HOME = "payload_not_home"
|
||||
CONF_SOURCE_TYPE = "source_type"
|
||||
|
||||
PLATFORM_SCHEMA_YAML = PLATFORM_SCHEMA.extend(mqtt.SCHEMA_BASE).extend(
|
||||
PLATFORM_SCHEMA_YAML = PLATFORM_SCHEMA.extend(SCHEMA_BASE).extend(
|
||||
{
|
||||
vol.Required(CONF_DEVICES): {cv.string: mqtt.valid_subscribe_topic},
|
||||
vol.Required(CONF_DEVICES): {cv.string: valid_subscribe_topic},
|
||||
vol.Optional(CONF_PAYLOAD_HOME, default=STATE_HOME): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_NOT_HOME, default=STATE_NOT_HOME): cv.string,
|
||||
vol.Optional(CONF_SOURCE_TYPE): vol.In(SOURCE_TYPES),
|
||||
@@ -50,6 +52,6 @@ async def async_setup_scanner_from_yaml(hass, config, async_see, discovery_info=
|
||||
|
||||
hass.async_create_task(async_see(**see_args))
|
||||
|
||||
await mqtt.async_subscribe(hass, topic, async_message_received, qos)
|
||||
await async_subscribe(hass, topic, async_message_received, qos)
|
||||
|
||||
return True
|
||||
|
||||
@@ -29,8 +29,15 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import debug_info, trigger as mqtt_trigger
|
||||
from .. import mqtt
|
||||
from .const import ATTR_DISCOVERY_HASH, CONF_PAYLOAD, CONF_QOS, CONF_TOPIC, DOMAIN
|
||||
from .config import MQTT_BASE_SCHEMA
|
||||
from .const import (
|
||||
ATTR_DISCOVERY_HASH,
|
||||
CONF_ENCODING,
|
||||
CONF_PAYLOAD,
|
||||
CONF_QOS,
|
||||
CONF_TOPIC,
|
||||
DOMAIN,
|
||||
)
|
||||
from .discovery import MQTT_DISCOVERY_DONE
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_DEVICE_INFO_SCHEMA,
|
||||
@@ -64,7 +71,7 @@ TRIGGER_SCHEMA = DEVICE_TRIGGER_BASE_SCHEMA.extend(
|
||||
}
|
||||
)
|
||||
|
||||
TRIGGER_DISCOVERY_SCHEMA = mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend(
|
||||
TRIGGER_DISCOVERY_SCHEMA = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_AUTOMATION_TYPE): str,
|
||||
vol.Required(CONF_DEVICE): MQTT_ENTITY_DEVICE_INFO_SCHEMA,
|
||||
@@ -94,10 +101,10 @@ class TriggerInstance:
|
||||
async def async_attach_trigger(self) -> None:
|
||||
"""Attach MQTT trigger."""
|
||||
mqtt_config = {
|
||||
mqtt_trigger.CONF_PLATFORM: mqtt.DOMAIN,
|
||||
mqtt_trigger.CONF_TOPIC: self.trigger.topic,
|
||||
mqtt_trigger.CONF_ENCODING: DEFAULT_ENCODING,
|
||||
mqtt_trigger.CONF_QOS: self.trigger.qos,
|
||||
CONF_PLATFORM: DOMAIN,
|
||||
CONF_TOPIC: self.trigger.topic,
|
||||
CONF_ENCODING: DEFAULT_ENCODING,
|
||||
CONF_QOS: self.trigger.qos,
|
||||
}
|
||||
if self.trigger.payload:
|
||||
mqtt_config[CONF_PAYLOAD] = self.trigger.payload
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT fans."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
import logging
|
||||
import math
|
||||
@@ -34,8 +33,8 @@ from homeassistant.util.percentage import (
|
||||
ranged_value_to_percentage,
|
||||
)
|
||||
|
||||
from . import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RW_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TEMPLATE,
|
||||
CONF_COMMAND_TOPIC,
|
||||
@@ -50,11 +49,13 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate, MqttValueTemplate
|
||||
from .util import valid_publish_topic, valid_subscribe_topic
|
||||
|
||||
CONF_PERCENTAGE_STATE_TOPIC = "percentage_state_topic"
|
||||
CONF_PERCENTAGE_COMMAND_TOPIC = "percentage_command_topic"
|
||||
@@ -125,28 +126,28 @@ def valid_preset_mode_configuration(config):
|
||||
return config
|
||||
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
_PLATFORM_SCHEMA_BASE = MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
|
||||
vol.Optional(CONF_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_OSCILLATION_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_OSCILLATION_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_OSCILLATION_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_OSCILLATION_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_OSCILLATION_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_OSCILLATION_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_PERCENTAGE_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_PERCENTAGE_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_PERCENTAGE_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_PERCENTAGE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_PERCENTAGE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_PERCENTAGE_VALUE_TEMPLATE): cv.template,
|
||||
# CONF_PRESET_MODE_COMMAND_TOPIC and CONF_PRESET_MODES_LIST must be used together
|
||||
vol.Inclusive(
|
||||
CONF_PRESET_MODE_COMMAND_TOPIC, "preset_modes"
|
||||
): mqtt.valid_publish_topic,
|
||||
): valid_publish_topic,
|
||||
vol.Inclusive(
|
||||
CONF_PRESET_MODES_LIST, "preset_modes", default=[]
|
||||
): cv.ensure_list,
|
||||
vol.Optional(CONF_PRESET_MODE_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_PRESET_MODE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_PRESET_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_PRESET_MODE_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(
|
||||
CONF_SPEED_RANGE_MIN, default=DEFAULT_SPEED_RANGE_MIN
|
||||
@@ -168,8 +169,8 @@ _PLATFORM_SCHEMA_BASE = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
vol.Optional(
|
||||
CONF_PAYLOAD_OSCILLATION_ON, default=OSCILLATE_ON_PAYLOAD
|
||||
): cv.string,
|
||||
vol.Optional(CONF_SPEED_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_SPEED_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_SPEED_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_SPEED_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_SPEED_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_STATE_VALUE_TEMPLATE): cv.template,
|
||||
}
|
||||
@@ -215,7 +216,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT fans configured under the fan platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, fan.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
fan.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -226,13 +231,8 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT fan through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, fan.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(hass, fan.DOMAIN, PLATFORM_SCHEMA_MODERN)
|
||||
)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT humidifiers."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
import logging
|
||||
|
||||
@@ -30,8 +29,8 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RW_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TEMPLATE,
|
||||
CONF_COMMAND_TOPIC,
|
||||
@@ -46,11 +45,13 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate, MqttValueTemplate
|
||||
from .util import valid_publish_topic, valid_subscribe_topic
|
||||
|
||||
CONF_AVAILABLE_MODES_LIST = "modes"
|
||||
CONF_DEVICE_CLASS = "device_class"
|
||||
@@ -103,15 +104,13 @@ def valid_humidity_range_configuration(config):
|
||||
return config
|
||||
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
_PLATFORM_SCHEMA_BASE = MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
# CONF_AVAIALABLE_MODES_LIST and CONF_MODE_COMMAND_TOPIC must be used together
|
||||
vol.Inclusive(
|
||||
CONF_AVAILABLE_MODES_LIST, "available_modes", default=[]
|
||||
): cv.ensure_list,
|
||||
vol.Inclusive(
|
||||
CONF_MODE_COMMAND_TOPIC, "available_modes"
|
||||
): mqtt.valid_publish_topic,
|
||||
vol.Inclusive(CONF_MODE_COMMAND_TOPIC, "available_modes"): valid_publish_topic,
|
||||
vol.Optional(CONF_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(
|
||||
CONF_DEVICE_CLASS, default=HumidifierDeviceClass.HUMIDIFIER
|
||||
@@ -119,14 +118,14 @@ _PLATFORM_SCHEMA_BASE = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
[HumidifierDeviceClass.HUMIDIFIER, HumidifierDeviceClass.DEHUMIDIFIER]
|
||||
),
|
||||
vol.Optional(CONF_MODE_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_MODE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_MODE_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
|
||||
vol.Optional(CONF_PAYLOAD_OFF, default=DEFAULT_PAYLOAD_OFF): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_ON, default=DEFAULT_PAYLOAD_ON): cv.string,
|
||||
vol.Optional(CONF_STATE_VALUE_TEMPLATE): cv.template,
|
||||
vol.Required(CONF_TARGET_HUMIDITY_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Required(CONF_TARGET_HUMIDITY_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_TARGET_HUMIDITY_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(
|
||||
CONF_TARGET_HUMIDITY_MAX, default=DEFAULT_MAX_HUMIDITY
|
||||
@@ -135,7 +134,7 @@ _PLATFORM_SCHEMA_BASE = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
CONF_TARGET_HUMIDITY_MIN, default=DEFAULT_MIN_HUMIDITY
|
||||
): cv.positive_int,
|
||||
vol.Optional(CONF_TARGET_HUMIDITY_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_TARGET_HUMIDITY_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_TARGET_HUMIDITY_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(
|
||||
CONF_PAYLOAD_RESET_HUMIDITY, default=DEFAULT_PAYLOAD_RESET
|
||||
): cv.string,
|
||||
@@ -173,7 +172,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT humidifier configured under the fan platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, humidifier.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
humidifier.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -184,14 +187,12 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT humidifier through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, humidifier.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, humidifier.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
) # setup for discovery
|
||||
)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
_async_setup_entity, hass, async_add_entities, config_entry=config_entry
|
||||
)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT lights."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -14,8 +13,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from ..mixins import (
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
@@ -97,7 +96,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT light through configuration.yaml (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, light.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
light.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -108,13 +111,8 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT lights configured under the light platform key (deprecated)."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, light.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(hass, light.DOMAIN, PLATFORM_SCHEMA_MODERN)
|
||||
)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
|
||||
@@ -42,8 +42,8 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
import homeassistant.util.color as color_util
|
||||
|
||||
from .. import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from ... import mqtt
|
||||
from .. import subscription
|
||||
from ..config import MQTT_RW_SCHEMA
|
||||
from ..const import (
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_ENCODING,
|
||||
@@ -55,6 +55,8 @@ from ..const import (
|
||||
)
|
||||
from ..debug_info import log_messages
|
||||
from ..mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity
|
||||
from ..models import MqttCommandTemplate, MqttValueTemplate
|
||||
from ..util import valid_publish_topic, valid_subscribe_topic
|
||||
from .schema import MQTT_LIGHT_SCHEMA_SCHEMA
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -156,28 +158,28 @@ VALUE_TEMPLATE_KEYS = [
|
||||
]
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = (
|
||||
mqtt.MQTT_RW_SCHEMA.extend(
|
||||
MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_BRIGHTNESS_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_BRIGHTNESS_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_BRIGHTNESS_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(
|
||||
CONF_BRIGHTNESS_SCALE, default=DEFAULT_BRIGHTNESS_SCALE
|
||||
): vol.All(vol.Coerce(int), vol.Range(min=1)),
|
||||
vol.Optional(CONF_BRIGHTNESS_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_BRIGHTNESS_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_BRIGHTNESS_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_COLOR_MODE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_COLOR_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_COLOR_MODE_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_COLOR_TEMP_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_COLOR_TEMP_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_COLOR_TEMP_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_COLOR_TEMP_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_COLOR_TEMP_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_COLOR_TEMP_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_EFFECT_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_EFFECT_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_EFFECT_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_EFFECT_LIST): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(CONF_EFFECT_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_EFFECT_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_EFFECT_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_HS_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_HS_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_HS_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_HS_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_HS_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_MAX_MIREDS): cv.positive_int,
|
||||
vol.Optional(CONF_MIN_MIREDS): cv.positive_int,
|
||||
@@ -189,30 +191,30 @@ _PLATFORM_SCHEMA_BASE = (
|
||||
vol.Optional(CONF_PAYLOAD_OFF, default=DEFAULT_PAYLOAD_OFF): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_ON, default=DEFAULT_PAYLOAD_ON): cv.string,
|
||||
vol.Optional(CONF_RGB_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_RGB_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_RGB_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_RGB_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_RGB_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_RGB_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_RGBW_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_RGBW_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_RGBW_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_RGBW_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_RGBW_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_RGBW_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_RGBWW_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_RGBWW_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_RGBWW_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_RGBWW_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_RGBWW_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_RGBWW_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_STATE_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_WHITE_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_WHITE_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_WHITE_SCALE, default=DEFAULT_WHITE_SCALE): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1)
|
||||
),
|
||||
vol.Optional(CONF_WHITE_VALUE_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_WHITE_VALUE_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(
|
||||
CONF_WHITE_VALUE_SCALE, default=DEFAULT_WHITE_VALUE_SCALE
|
||||
): vol.All(vol.Coerce(int), vol.Range(min=1)),
|
||||
vol.Optional(CONF_WHITE_VALUE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_WHITE_VALUE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_WHITE_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_XY_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_XY_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_XY_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_XY_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_XY_VALUE_TEMPLATE): cv.template,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -51,7 +51,7 @@ from homeassistant.helpers.typing import ConfigType
|
||||
import homeassistant.util.color as color_util
|
||||
|
||||
from .. import subscription
|
||||
from ... import mqtt
|
||||
from ..config import DEFAULT_QOS, DEFAULT_RETAIN, MQTT_RW_SCHEMA
|
||||
from ..const import (
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_ENCODING,
|
||||
@@ -61,6 +61,7 @@ from ..const import (
|
||||
)
|
||||
from ..debug_info import log_messages
|
||||
from ..mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity
|
||||
from ..util import valid_subscribe_topic
|
||||
from .schema import MQTT_LIGHT_SCHEMA_SCHEMA
|
||||
from .schema_basic import CONF_BRIGHTNESS_SCALE, MQTT_LIGHT_ATTRIBUTES_BLOCKED
|
||||
|
||||
@@ -103,7 +104,7 @@ def valid_color_configuration(config):
|
||||
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = (
|
||||
mqtt.MQTT_RW_SCHEMA.extend(
|
||||
MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_BRIGHTNESS, default=DEFAULT_BRIGHTNESS): cv.boolean,
|
||||
vol.Optional(
|
||||
@@ -126,12 +127,12 @@ _PLATFORM_SCHEMA_BASE = (
|
||||
vol.Optional(CONF_MIN_MIREDS): cv.positive_int,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
|
||||
vol.Optional(CONF_QOS, default=mqtt.DEFAULT_QOS): vol.All(
|
||||
vol.Optional(CONF_QOS, default=DEFAULT_QOS): vol.All(
|
||||
vol.Coerce(int), vol.In([0, 1, 2])
|
||||
),
|
||||
vol.Optional(CONF_RETAIN, default=mqtt.DEFAULT_RETAIN): cv.boolean,
|
||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
vol.Optional(CONF_RGB, default=DEFAULT_RGB): cv.boolean,
|
||||
vol.Optional(CONF_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Inclusive(CONF_SUPPORTED_COLOR_MODES, "color_mode"): vol.All(
|
||||
cv.ensure_list,
|
||||
[vol.In(VALID_COLOR_MODES)],
|
||||
|
||||
@@ -31,8 +31,8 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
import homeassistant.util.color as color_util
|
||||
|
||||
from .. import MqttValueTemplate, subscription
|
||||
from ... import mqtt
|
||||
from .. import subscription
|
||||
from ..config import MQTT_RW_SCHEMA
|
||||
from ..const import (
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_ENCODING,
|
||||
@@ -43,6 +43,7 @@ from ..const import (
|
||||
)
|
||||
from ..debug_info import log_messages
|
||||
from ..mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity
|
||||
from ..models import MqttValueTemplate
|
||||
from .schema import MQTT_LIGHT_SCHEMA_SCHEMA
|
||||
from .schema_basic import MQTT_LIGHT_ATTRIBUTES_BLOCKED
|
||||
|
||||
@@ -67,7 +68,7 @@ CONF_RED_TEMPLATE = "red_template"
|
||||
CONF_WHITE_VALUE_TEMPLATE = "white_value_template"
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = (
|
||||
mqtt.MQTT_RW_SCHEMA.extend(
|
||||
MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_BLUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_BRIGHTNESS_TEMPLATE): cv.template,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT locks."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -15,8 +14,8 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RW_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_ENCODING,
|
||||
@@ -28,11 +27,12 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttValueTemplate
|
||||
|
||||
CONF_PAYLOAD_LOCK = "payload_lock"
|
||||
CONF_PAYLOAD_UNLOCK = "payload_unlock"
|
||||
@@ -56,7 +56,7 @@ MQTT_LOCK_ATTRIBUTES_BLOCKED = frozenset(
|
||||
}
|
||||
)
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
|
||||
@@ -87,7 +87,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT locks configured under the lock platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, lock.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
lock.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -98,13 +102,8 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT lock through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, lock.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(hass, lock.DOMAIN, PLATFORM_SCHEMA_MODERN)
|
||||
)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
import json
|
||||
import logging
|
||||
@@ -27,10 +28,11 @@ from homeassistant.const import (
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, callback
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
discovery,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import EVENT_DEVICE_REGISTRY_UPDATED
|
||||
@@ -46,17 +48,10 @@ from homeassistant.helpers.entity import (
|
||||
async_generate_entity_id,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.reload import async_setup_reload_service
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import (
|
||||
DATA_MQTT,
|
||||
PLATFORMS,
|
||||
MqttValueTemplate,
|
||||
async_publish,
|
||||
debug_info,
|
||||
subscription,
|
||||
)
|
||||
from . import debug_info, subscription
|
||||
from .client import async_publish
|
||||
from .const import (
|
||||
ATTR_DISCOVERY_HASH,
|
||||
ATTR_DISCOVERY_PAYLOAD,
|
||||
@@ -65,14 +60,17 @@ from .const import (
|
||||
CONF_ENCODING,
|
||||
CONF_QOS,
|
||||
CONF_TOPIC,
|
||||
DATA_MQTT,
|
||||
DATA_MQTT_CONFIG,
|
||||
DATA_MQTT_RELOAD_NEEDED,
|
||||
DATA_MQTT_UPDATED_CONFIG,
|
||||
DEFAULT_ENCODING,
|
||||
DEFAULT_PAYLOAD_AVAILABLE,
|
||||
DEFAULT_PAYLOAD_NOT_AVAILABLE,
|
||||
DOMAIN,
|
||||
MQTT_CONNECTED,
|
||||
MQTT_DISCONNECTED,
|
||||
MQTT_RELOADED,
|
||||
)
|
||||
from .debug_info import log_message, log_messages
|
||||
from .discovery import (
|
||||
@@ -82,7 +80,7 @@ from .discovery import (
|
||||
clear_discovery_hash,
|
||||
set_discovery_hash,
|
||||
)
|
||||
from .models import PublishPayloadType, ReceiveMessage
|
||||
from .models import MqttValueTemplate, PublishPayloadType, ReceiveMessage
|
||||
from .subscription import (
|
||||
async_prepare_subscribe_topics,
|
||||
async_subscribe_topics,
|
||||
@@ -264,8 +262,41 @@ class SetupEntity(Protocol):
|
||||
"""Define setup_entities type."""
|
||||
|
||||
|
||||
async def async_setup_platform_discovery(
|
||||
hass: HomeAssistant, platform_domain: str, schema: vol.Schema
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Set up platform discovery for manual config."""
|
||||
|
||||
async def _async_discover_entities() -> None:
|
||||
"""Discover entities for a platform."""
|
||||
if DATA_MQTT_UPDATED_CONFIG in hass.data:
|
||||
# The platform has been reloaded
|
||||
config_yaml = hass.data[DATA_MQTT_UPDATED_CONFIG]
|
||||
else:
|
||||
config_yaml = hass.data.get(DATA_MQTT_CONFIG, {})
|
||||
if not config_yaml:
|
||||
return
|
||||
if platform_domain not in config_yaml:
|
||||
return
|
||||
await asyncio.gather(
|
||||
*(
|
||||
discovery.async_load_platform(hass, platform_domain, DOMAIN, config, {})
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, platform_domain, schema, config_yaml
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
unsub = async_dispatcher_connect(hass, MQTT_RELOADED, _async_discover_entities)
|
||||
await _async_discover_entities()
|
||||
return unsub
|
||||
|
||||
|
||||
async def async_get_platform_config_from_yaml(
|
||||
hass: HomeAssistant, domain: str, schema: vol.Schema
|
||||
hass: HomeAssistant,
|
||||
platform_domain: str,
|
||||
schema: vol.Schema,
|
||||
config_yaml: ConfigType = None,
|
||||
) -> list[ConfigType]:
|
||||
"""Return a list of validated configurations for the domain."""
|
||||
|
||||
@@ -279,12 +310,15 @@ async def async_get_platform_config_from_yaml(
|
||||
try:
|
||||
validated_config.append(schema(config_item))
|
||||
except vol.MultipleInvalid as err:
|
||||
async_log_exception(err, domain, config_item, hass)
|
||||
async_log_exception(err, platform_domain, config_item, hass)
|
||||
|
||||
return validated_config
|
||||
|
||||
config_yaml: ConfigType = hass.data.get(DATA_MQTT_CONFIG, {})
|
||||
if not (platform_configs := config_yaml.get(domain)):
|
||||
if config_yaml is None:
|
||||
config_yaml = hass.data.get(DATA_MQTT_CONFIG)
|
||||
if not config_yaml:
|
||||
return []
|
||||
if not (platform_configs := config_yaml.get(platform_domain)):
|
||||
return []
|
||||
return async_validate_config(hass, platform_configs)
|
||||
|
||||
@@ -314,12 +348,11 @@ async def async_setup_entry_helper(hass, domain, async_setup, schema):
|
||||
async def async_setup_platform_helper(
|
||||
hass: HomeAssistant,
|
||||
platform_domain: str,
|
||||
config: ConfigType,
|
||||
config: ConfigType | DiscoveryInfoType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
async_setup_entities: SetupEntity,
|
||||
) -> None:
|
||||
"""Return true if platform setup should be aborted."""
|
||||
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
|
||||
if not bool(hass.config_entries.async_entries(DOMAIN)):
|
||||
hass.data[DATA_MQTT_RELOAD_NEEDED] = None
|
||||
_LOGGER.warning(
|
||||
|
||||
@@ -1,12 +1,21 @@
|
||||
"""Models used by multiple MQTT modules."""
|
||||
from __future__ import annotations
|
||||
|
||||
from ast import literal_eval
|
||||
from collections.abc import Awaitable, Callable
|
||||
import datetime as dt
|
||||
from typing import Union
|
||||
from typing import Any, Union
|
||||
|
||||
import attr
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_ID, ATTR_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import template
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.typing import TemplateVarsType
|
||||
|
||||
_SENTINEL = object()
|
||||
|
||||
PublishPayloadType = Union[str, bytes, int, float, None]
|
||||
ReceivePayloadType = Union[str, bytes]
|
||||
|
||||
@@ -35,3 +44,118 @@ class ReceiveMessage:
|
||||
|
||||
AsyncMessageCallbackType = Callable[[ReceiveMessage], Awaitable[None]]
|
||||
MessageCallbackType = Callable[[ReceiveMessage], None]
|
||||
|
||||
|
||||
class MqttCommandTemplate:
|
||||
"""Class for rendering MQTT payload with command templates."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
command_template: template.Template | None,
|
||||
*,
|
||||
hass: HomeAssistant | None = None,
|
||||
entity: Entity | None = None,
|
||||
) -> None:
|
||||
"""Instantiate a command template."""
|
||||
self._attr_command_template = command_template
|
||||
if command_template is None:
|
||||
return
|
||||
|
||||
self._entity = entity
|
||||
|
||||
command_template.hass = hass
|
||||
|
||||
if entity:
|
||||
command_template.hass = entity.hass
|
||||
|
||||
@callback
|
||||
def async_render(
|
||||
self,
|
||||
value: PublishPayloadType = None,
|
||||
variables: TemplateVarsType = None,
|
||||
) -> PublishPayloadType:
|
||||
"""Render or convert the command template with given value or variables."""
|
||||
|
||||
def _convert_outgoing_payload(
|
||||
payload: PublishPayloadType,
|
||||
) -> PublishPayloadType:
|
||||
"""Ensure correct raw MQTT payload is passed as bytes for publishing."""
|
||||
if isinstance(payload, str):
|
||||
try:
|
||||
native_object = literal_eval(payload)
|
||||
if isinstance(native_object, bytes):
|
||||
return native_object
|
||||
|
||||
except (ValueError, TypeError, SyntaxError, MemoryError):
|
||||
pass
|
||||
|
||||
return payload
|
||||
|
||||
if self._attr_command_template is None:
|
||||
return value
|
||||
|
||||
values = {"value": value}
|
||||
if self._entity:
|
||||
values[ATTR_ENTITY_ID] = self._entity.entity_id
|
||||
values[ATTR_NAME] = self._entity.name
|
||||
if variables is not None:
|
||||
values.update(variables)
|
||||
return _convert_outgoing_payload(
|
||||
self._attr_command_template.async_render(values, parse_result=False)
|
||||
)
|
||||
|
||||
|
||||
class MqttValueTemplate:
|
||||
"""Class for rendering MQTT value template with possible json values."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
value_template: template.Template | None,
|
||||
*,
|
||||
hass: HomeAssistant | None = None,
|
||||
entity: Entity | None = None,
|
||||
config_attributes: TemplateVarsType = None,
|
||||
) -> None:
|
||||
"""Instantiate a value template."""
|
||||
self._value_template = value_template
|
||||
self._config_attributes = config_attributes
|
||||
if value_template is None:
|
||||
return
|
||||
|
||||
value_template.hass = hass
|
||||
self._entity = entity
|
||||
|
||||
if entity:
|
||||
value_template.hass = entity.hass
|
||||
|
||||
@callback
|
||||
def async_render_with_possible_json_value(
|
||||
self,
|
||||
payload: ReceivePayloadType,
|
||||
default: ReceivePayloadType | object = _SENTINEL,
|
||||
variables: TemplateVarsType = None,
|
||||
) -> ReceivePayloadType:
|
||||
"""Render with possible json value or pass-though a received MQTT value."""
|
||||
if self._value_template is None:
|
||||
return payload
|
||||
|
||||
values: dict[str, Any] = {}
|
||||
|
||||
if variables is not None:
|
||||
values.update(variables)
|
||||
|
||||
if self._config_attributes is not None:
|
||||
values.update(self._config_attributes)
|
||||
|
||||
if self._entity:
|
||||
values[ATTR_ENTITY_ID] = self._entity.entity_id
|
||||
values[ATTR_NAME] = self._entity.name
|
||||
|
||||
if default == _SENTINEL:
|
||||
return self._value_template.async_render_with_possible_json_value(
|
||||
payload, variables=values
|
||||
)
|
||||
|
||||
return self._value_template.async_render_with_possible_json_value(
|
||||
payload, default, variables=values
|
||||
)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Configure number in a device through MQTT topic."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
import logging
|
||||
|
||||
@@ -27,8 +26,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RW_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TEMPLATE,
|
||||
CONF_COMMAND_TOPIC,
|
||||
@@ -41,11 +40,12 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate, MqttValueTemplate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -75,7 +75,7 @@ def validate_config(config):
|
||||
return config
|
||||
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
_PLATFORM_SCHEMA_BASE = MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_MAX, default=DEFAULT_MAX_VALUE): vol.Coerce(float),
|
||||
@@ -118,7 +118,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT number configured under the number platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, number.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
number.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -129,12 +133,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT number through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, number.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, number.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT scenes."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -15,25 +14,27 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .. import mqtt
|
||||
from .client import async_publish
|
||||
from .config import MQTT_BASE_SCHEMA
|
||||
from .const import CONF_COMMAND_TOPIC, CONF_ENCODING, CONF_QOS, CONF_RETAIN
|
||||
from .mixins import (
|
||||
CONF_ENABLED_BY_DEFAULT,
|
||||
CONF_OBJECT_ID,
|
||||
MQTT_AVAILABILITY_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .util import valid_publish_topic
|
||||
|
||||
DEFAULT_NAME = "MQTT Scene"
|
||||
DEFAULT_RETAIN = False
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Required(CONF_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_ON): cv.string,
|
||||
@@ -63,7 +64,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT scene configured under the scene platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, scene.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
scene.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -74,13 +79,8 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT scene through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, scene.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(hass, scene.DOMAIN, PLATFORM_SCHEMA_MODERN)
|
||||
)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
@@ -128,7 +128,7 @@ class MqttScene(
|
||||
|
||||
This method is a coroutine.
|
||||
"""
|
||||
await mqtt.async_publish(
|
||||
await async_publish(
|
||||
self.hass,
|
||||
self._config[CONF_COMMAND_TOPIC],
|
||||
self._config[CONF_PAYLOAD_ON],
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Configure select in a device through MQTT topic."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
import logging
|
||||
|
||||
@@ -17,8 +16,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RW_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TEMPLATE,
|
||||
CONF_COMMAND_TOPIC,
|
||||
@@ -31,11 +30,12 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate, MqttValueTemplate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -51,7 +51,7 @@ MQTT_SELECT_ATTRIBUTES_BLOCKED = frozenset(
|
||||
)
|
||||
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
@@ -79,7 +79,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT select configured under the select platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, select.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
select.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -90,12 +94,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT select through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, select.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, select.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import functools
|
||||
import logging
|
||||
@@ -34,19 +33,21 @@ from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RO_SCHEMA
|
||||
from .const import CONF_ENCODING, CONF_QOS, CONF_STATE_TOPIC
|
||||
from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttAvailability,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttValueTemplate
|
||||
from .util import valid_subscribe_topic
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -89,12 +90,12 @@ def validate_options(conf):
|
||||
return conf
|
||||
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = mqtt.MQTT_RO_SCHEMA.extend(
|
||||
_PLATFORM_SCHEMA_BASE = MQTT_RO_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_EXPIRE_AFTER): cv.positive_int,
|
||||
vol.Optional(CONF_FORCE_UPDATE, default=DEFAULT_FORCE_UPDATE): cv.boolean,
|
||||
vol.Optional(CONF_LAST_RESET_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_LAST_RESET_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_LAST_RESET_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_STATE_CLASS): STATE_CLASSES_SCHEMA,
|
||||
@@ -131,7 +132,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT sensors configured under the fan platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, sensor.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
sensor.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -142,12 +147,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT sensor through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, sensor.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, sensor.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT sirens."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import copy
|
||||
import functools
|
||||
import json
|
||||
@@ -35,8 +34,8 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RW_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TEMPLATE,
|
||||
CONF_COMMAND_TOPIC,
|
||||
@@ -52,11 +51,12 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate, MqttValueTemplate
|
||||
|
||||
DEFAULT_NAME = "MQTT Siren"
|
||||
DEFAULT_PAYLOAD_ON = "ON"
|
||||
@@ -74,7 +74,7 @@ CONF_SUPPORT_VOLUME_SET = "support_volume_set"
|
||||
|
||||
STATE = "state"
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_AVAILABLE_TONES): cv.ensure_list,
|
||||
vol.Optional(CONF_COMMAND_TEMPLATE): cv.template,
|
||||
@@ -128,7 +128,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT sirens configured under the fan platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, siren.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
siren.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -139,13 +143,8 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT siren through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, siren.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(hass, siren.DOMAIN, PLATFORM_SCHEMA_MODERN)
|
||||
)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT switches."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -24,8 +23,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RW_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_ENCODING,
|
||||
@@ -38,11 +37,12 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttValueTemplate
|
||||
|
||||
DEFAULT_NAME = "MQTT Switch"
|
||||
DEFAULT_PAYLOAD_ON = "ON"
|
||||
@@ -51,7 +51,7 @@ DEFAULT_OPTIMISTIC = False
|
||||
CONF_STATE_ON = "state_on"
|
||||
CONF_STATE_OFF = "state_off"
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
|
||||
@@ -82,7 +82,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT switch configured under the fan platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, switch.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
switch.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -93,12 +97,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT switch through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, switch.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, switch.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
@@ -11,8 +11,8 @@ from homeassistant.core import HomeAssistant, callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_BASE_SCHEMA
|
||||
from .const import ATTR_DISCOVERY_HASH, CONF_QOS, CONF_TOPIC
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_DEVICE_INFO_SCHEMA,
|
||||
@@ -21,7 +21,7 @@ from .mixins import (
|
||||
send_discovery_done,
|
||||
update_device,
|
||||
)
|
||||
from .models import ReceiveMessage
|
||||
from .models import MqttValueTemplate, ReceiveMessage
|
||||
from .subscription import EntitySubscription
|
||||
from .util import valid_subscribe_topic
|
||||
|
||||
@@ -30,7 +30,7 @@ LOG_NAME = "Tag"
|
||||
TAG = "tag"
|
||||
TAGS = "mqtt_tags"
|
||||
|
||||
PLATFORM_SCHEMA = mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_DEVICE): MQTT_ENTITY_DEVICE_INFO_SCHEMA,
|
||||
vol.Optional(CONF_PLATFORM): "mqtt",
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT vacuums."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -13,8 +12,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from ..mixins import (
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
)
|
||||
from .schema import CONF_SCHEMA, LEGACY, MQTT_VACUUM_SCHEMA, STATE
|
||||
@@ -77,7 +76,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT vacuum through configuration.yaml."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, vacuum.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
vacuum.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -88,12 +91,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT vacuum through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, vacuum.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, vacuum.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
@@ -15,11 +15,13 @@ from homeassistant.core import callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.icon import icon_for_battery_level
|
||||
|
||||
from .. import MqttValueTemplate, subscription
|
||||
from ... import mqtt
|
||||
from .. import subscription
|
||||
from ..config import MQTT_BASE_SCHEMA
|
||||
from ..const import CONF_COMMAND_TOPIC, CONF_ENCODING, CONF_QOS, CONF_RETAIN
|
||||
from ..debug_info import log_messages
|
||||
from ..mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity, warn_for_legacy_schema
|
||||
from ..models import MqttValueTemplate
|
||||
from ..util import valid_publish_topic
|
||||
from .const import MQTT_VACUUM_ATTRIBUTES_BLOCKED
|
||||
from .schema import MQTT_VACUUM_SCHEMA, services_to_strings, strings_to_services
|
||||
|
||||
@@ -96,25 +98,23 @@ MQTT_LEGACY_VACUUM_ATTRIBUTES_BLOCKED = MQTT_VACUUM_ATTRIBUTES_BLOCKED | frozens
|
||||
)
|
||||
|
||||
PLATFORM_SCHEMA_LEGACY_MODERN = (
|
||||
mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Inclusive(CONF_BATTERY_LEVEL_TEMPLATE, "battery"): cv.template,
|
||||
vol.Inclusive(
|
||||
CONF_BATTERY_LEVEL_TOPIC, "battery"
|
||||
): mqtt.valid_publish_topic,
|
||||
vol.Inclusive(CONF_BATTERY_LEVEL_TOPIC, "battery"): valid_publish_topic,
|
||||
vol.Inclusive(CONF_CHARGING_TEMPLATE, "charging"): cv.template,
|
||||
vol.Inclusive(CONF_CHARGING_TOPIC, "charging"): mqtt.valid_publish_topic,
|
||||
vol.Inclusive(CONF_CHARGING_TOPIC, "charging"): valid_publish_topic,
|
||||
vol.Inclusive(CONF_CLEANING_TEMPLATE, "cleaning"): cv.template,
|
||||
vol.Inclusive(CONF_CLEANING_TOPIC, "cleaning"): mqtt.valid_publish_topic,
|
||||
vol.Inclusive(CONF_CLEANING_TOPIC, "cleaning"): valid_publish_topic,
|
||||
vol.Inclusive(CONF_DOCKED_TEMPLATE, "docked"): cv.template,
|
||||
vol.Inclusive(CONF_DOCKED_TOPIC, "docked"): mqtt.valid_publish_topic,
|
||||
vol.Inclusive(CONF_DOCKED_TOPIC, "docked"): valid_publish_topic,
|
||||
vol.Inclusive(CONF_ERROR_TEMPLATE, "error"): cv.template,
|
||||
vol.Inclusive(CONF_ERROR_TOPIC, "error"): mqtt.valid_publish_topic,
|
||||
vol.Inclusive(CONF_ERROR_TOPIC, "error"): valid_publish_topic,
|
||||
vol.Optional(CONF_FAN_SPEED_LIST, default=[]): vol.All(
|
||||
cv.ensure_list, [cv.string]
|
||||
),
|
||||
vol.Inclusive(CONF_FAN_SPEED_TEMPLATE, "fan_speed"): cv.template,
|
||||
vol.Inclusive(CONF_FAN_SPEED_TOPIC, "fan_speed"): mqtt.valid_publish_topic,
|
||||
vol.Inclusive(CONF_FAN_SPEED_TOPIC, "fan_speed"): valid_publish_topic,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(
|
||||
CONF_PAYLOAD_CLEAN_SPOT, default=DEFAULT_PAYLOAD_CLEAN_SPOT
|
||||
@@ -135,12 +135,12 @@ PLATFORM_SCHEMA_LEGACY_MODERN = (
|
||||
vol.Optional(
|
||||
CONF_PAYLOAD_TURN_ON, default=DEFAULT_PAYLOAD_TURN_ON
|
||||
): cv.string,
|
||||
vol.Optional(CONF_SEND_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_SET_FAN_SPEED_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_SEND_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_SET_FAN_SPEED_TOPIC): valid_publish_topic,
|
||||
vol.Optional(
|
||||
CONF_SUPPORTED_FEATURES, default=DEFAULT_SERVICE_STRINGS
|
||||
): vol.All(cv.ensure_list, [vol.In(STRING_TO_SERVICE.keys())]),
|
||||
vol.Optional(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -23,7 +23,7 @@ from homeassistant.core import callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from .. import subscription
|
||||
from ... import mqtt
|
||||
from ..config import MQTT_BASE_SCHEMA
|
||||
from ..const import (
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_ENCODING,
|
||||
@@ -33,6 +33,7 @@ from ..const import (
|
||||
)
|
||||
from ..debug_info import log_messages
|
||||
from ..mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity, warn_for_legacy_schema
|
||||
from ..util import valid_publish_topic
|
||||
from .const import MQTT_VACUUM_ATTRIBUTES_BLOCKED
|
||||
from .schema import MQTT_VACUUM_SCHEMA, services_to_strings, strings_to_services
|
||||
|
||||
@@ -105,7 +106,7 @@ DEFAULT_PAYLOAD_START = "start"
|
||||
DEFAULT_PAYLOAD_PAUSE = "pause"
|
||||
|
||||
PLATFORM_SCHEMA_STATE_MODERN = (
|
||||
mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_FAN_SPEED_LIST, default=[]): vol.All(
|
||||
cv.ensure_list, [cv.string]
|
||||
@@ -123,13 +124,13 @@ PLATFORM_SCHEMA_STATE_MODERN = (
|
||||
vol.Optional(CONF_PAYLOAD_START, default=DEFAULT_PAYLOAD_START): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_PAUSE, default=DEFAULT_PAYLOAD_PAUSE): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_STOP, default=DEFAULT_PAYLOAD_STOP): cv.string,
|
||||
vol.Optional(CONF_SEND_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_SET_FAN_SPEED_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_STATE_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_SEND_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_SET_FAN_SPEED_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_STATE_TOPIC): valid_publish_topic,
|
||||
vol.Optional(
|
||||
CONF_SUPPORTED_FEATURES, default=DEFAULT_SERVICE_STRINGS
|
||||
): vol.All(cv.ensure_list, [vol.In(STRING_TO_SERVICE.keys())]),
|
||||
vol.Optional(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
}
|
||||
)
|
||||
@@ -178,7 +179,7 @@ class MqttStateVacuum(MqttEntity, StateVacuumEntity):
|
||||
supported_feature_strings, STRING_TO_SERVICE
|
||||
)
|
||||
self._fan_speed_list = config[CONF_FAN_SPEED_LIST]
|
||||
self._command_topic = config.get(mqtt.CONF_COMMAND_TOPIC)
|
||||
self._command_topic = config.get(CONF_COMMAND_TOPIC)
|
||||
self._set_fan_speed_topic = config.get(CONF_SET_FAN_SPEED_TOPIC)
|
||||
self._send_command_topic = config.get(CONF_SEND_COMMAND_TOPIC)
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ GPS_JSON_PAYLOAD_SCHEMA = vol.Schema(
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend(mqtt.SCHEMA_BASE).extend(
|
||||
PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend(mqtt.config.SCHEMA_BASE).extend(
|
||||
{vol.Required(CONF_DEVICES): {cv.string: mqtt.valid_subscribe_topic}}
|
||||
)
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
vol.Optional(CONF_AWAY_TIMEOUT, default=DEFAULT_AWAY_TIMEOUT): cv.positive_int,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
}
|
||||
).extend(mqtt.MQTT_RO_PLATFORM_SCHEMA.schema)
|
||||
).extend(mqtt.config.MQTT_RO_SCHEMA.schema)
|
||||
|
||||
MQTT_PAYLOAD = vol.Schema(
|
||||
vol.All(
|
||||
|
||||
@@ -18,7 +18,6 @@ from .const import (
|
||||
KEY_COORDINATOR_SPEED,
|
||||
KEY_COORDINATOR_TRAFFIC,
|
||||
KEY_ROUTER,
|
||||
MODE_ROUTER,
|
||||
PLATFORMS,
|
||||
)
|
||||
from .errors import CannotLoginException
|
||||
@@ -72,7 +71,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
async def async_update_devices() -> bool:
|
||||
"""Fetch data from the router."""
|
||||
if router.mode == MODE_ROUTER:
|
||||
if router.track_devices:
|
||||
return await router.async_update_device_trackers()
|
||||
return False
|
||||
|
||||
@@ -107,7 +106,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
update_interval=SPEED_TEST_INTERVAL,
|
||||
)
|
||||
|
||||
if router.mode == MODE_ROUTER:
|
||||
if router.track_devices:
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
await coordinator_traffic_meter.async_config_entry_first_refresh()
|
||||
|
||||
@@ -134,7 +133,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
if not hass.data[DOMAIN]:
|
||||
hass.data.pop(DOMAIN)
|
||||
|
||||
if router.mode != MODE_ROUTER:
|
||||
if not router.track_devices:
|
||||
router_id = None
|
||||
# Remove devices that are no longer tracked
|
||||
device_registry = dr.async_get(hass)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "netgear",
|
||||
"name": "NETGEAR",
|
||||
"documentation": "https://www.home-assistant.io/integrations/netgear",
|
||||
"requirements": ["pynetgear==0.10.0"],
|
||||
"requirements": ["pynetgear==0.10.4"],
|
||||
"codeowners": ["@hacf-fr", "@Quentame", "@starkillerOG"],
|
||||
"iot_class": "local_polling",
|
||||
"config_flow": true,
|
||||
|
||||
@@ -80,6 +80,7 @@ class NetgearRouter:
|
||||
self.hardware_version = ""
|
||||
self.serial_number = ""
|
||||
|
||||
self.track_devices = True
|
||||
self.method_version = 1
|
||||
consider_home_int = entry.options.get(
|
||||
CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME.total_seconds()
|
||||
@@ -112,11 +113,23 @@ class NetgearRouter:
|
||||
self.serial_number = self._info["SerialNumber"]
|
||||
self.mode = self._info.get("DeviceMode", MODE_ROUTER)
|
||||
|
||||
enabled_entries = [
|
||||
entry
|
||||
for entry in self.hass.config_entries.async_entries(DOMAIN)
|
||||
if entry.disabled_by is None
|
||||
]
|
||||
self.track_devices = self.mode == MODE_ROUTER or len(enabled_entries) == 1
|
||||
_LOGGER.debug(
|
||||
"Netgear track_devices = '%s', device mode '%s'",
|
||||
self.track_devices,
|
||||
self.mode,
|
||||
)
|
||||
|
||||
for model in MODELS_V2:
|
||||
if self.model.startswith(model):
|
||||
self.method_version = 2
|
||||
|
||||
if self.method_version == 2 and self.mode == MODE_ROUTER:
|
||||
if self.method_version == 2 and self.track_devices:
|
||||
if not self._api.get_attached_devices_2():
|
||||
_LOGGER.error(
|
||||
"Netgear Model '%s' in MODELS_V2 list, but failed to get attached devices using V2",
|
||||
@@ -133,7 +146,7 @@ class NetgearRouter:
|
||||
return False
|
||||
|
||||
# set already known devices to away instead of unavailable
|
||||
if self.mode == MODE_ROUTER:
|
||||
if self.track_devices:
|
||||
device_registry = dr.async_get(self.hass)
|
||||
devices = dr.async_entries_for_config_entry(device_registry, self.entry_id)
|
||||
for device_entry in devices:
|
||||
|
||||
@@ -5,6 +5,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import date, datetime
|
||||
from decimal import Decimal
|
||||
import logging
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
RestoreSensor,
|
||||
@@ -34,6 +35,8 @@ from .const import (
|
||||
)
|
||||
from .router import NetgearDeviceEntity, NetgearRouter, NetgearRouterEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SENSOR_TYPES = {
|
||||
"type": SensorEntityDescription(
|
||||
key="type",
|
||||
@@ -114,7 +117,7 @@ SENSOR_TRAFFIC_TYPES = [
|
||||
native_unit_of_measurement=DATA_MEGABYTES,
|
||||
icon="mdi:upload",
|
||||
index=0,
|
||||
value=lambda data: data[0] if data is not None else None,
|
||||
value=lambda data: data[0],
|
||||
),
|
||||
NetgearSensorEntityDescription(
|
||||
key="NewWeekUpload",
|
||||
@@ -123,7 +126,7 @@ SENSOR_TRAFFIC_TYPES = [
|
||||
native_unit_of_measurement=DATA_MEGABYTES,
|
||||
icon="mdi:upload",
|
||||
index=1,
|
||||
value=lambda data: data[1] if data is not None else None,
|
||||
value=lambda data: data[1],
|
||||
),
|
||||
NetgearSensorEntityDescription(
|
||||
key="NewWeekDownload",
|
||||
@@ -132,7 +135,7 @@ SENSOR_TRAFFIC_TYPES = [
|
||||
native_unit_of_measurement=DATA_MEGABYTES,
|
||||
icon="mdi:download",
|
||||
index=0,
|
||||
value=lambda data: data[0] if data is not None else None,
|
||||
value=lambda data: data[0],
|
||||
),
|
||||
NetgearSensorEntityDescription(
|
||||
key="NewWeekDownload",
|
||||
@@ -141,7 +144,7 @@ SENSOR_TRAFFIC_TYPES = [
|
||||
native_unit_of_measurement=DATA_MEGABYTES,
|
||||
icon="mdi:download",
|
||||
index=1,
|
||||
value=lambda data: data[1] if data is not None else None,
|
||||
value=lambda data: data[1],
|
||||
),
|
||||
NetgearSensorEntityDescription(
|
||||
key="NewMonthUpload",
|
||||
@@ -150,7 +153,7 @@ SENSOR_TRAFFIC_TYPES = [
|
||||
native_unit_of_measurement=DATA_MEGABYTES,
|
||||
icon="mdi:upload",
|
||||
index=0,
|
||||
value=lambda data: data[0] if data is not None else None,
|
||||
value=lambda data: data[0],
|
||||
),
|
||||
NetgearSensorEntityDescription(
|
||||
key="NewMonthUpload",
|
||||
@@ -159,7 +162,7 @@ SENSOR_TRAFFIC_TYPES = [
|
||||
native_unit_of_measurement=DATA_MEGABYTES,
|
||||
icon="mdi:upload",
|
||||
index=1,
|
||||
value=lambda data: data[1] if data is not None else None,
|
||||
value=lambda data: data[1],
|
||||
),
|
||||
NetgearSensorEntityDescription(
|
||||
key="NewMonthDownload",
|
||||
@@ -168,7 +171,7 @@ SENSOR_TRAFFIC_TYPES = [
|
||||
native_unit_of_measurement=DATA_MEGABYTES,
|
||||
icon="mdi:download",
|
||||
index=0,
|
||||
value=lambda data: data[0] if data is not None else None,
|
||||
value=lambda data: data[0],
|
||||
),
|
||||
NetgearSensorEntityDescription(
|
||||
key="NewMonthDownload",
|
||||
@@ -177,7 +180,7 @@ SENSOR_TRAFFIC_TYPES = [
|
||||
native_unit_of_measurement=DATA_MEGABYTES,
|
||||
icon="mdi:download",
|
||||
index=1,
|
||||
value=lambda data: data[1] if data is not None else None,
|
||||
value=lambda data: data[1],
|
||||
),
|
||||
NetgearSensorEntityDescription(
|
||||
key="NewLastMonthUpload",
|
||||
@@ -186,7 +189,7 @@ SENSOR_TRAFFIC_TYPES = [
|
||||
native_unit_of_measurement=DATA_MEGABYTES,
|
||||
icon="mdi:upload",
|
||||
index=0,
|
||||
value=lambda data: data[0] if data is not None else None,
|
||||
value=lambda data: data[0],
|
||||
),
|
||||
NetgearSensorEntityDescription(
|
||||
key="NewLastMonthUpload",
|
||||
@@ -195,7 +198,7 @@ SENSOR_TRAFFIC_TYPES = [
|
||||
native_unit_of_measurement=DATA_MEGABYTES,
|
||||
icon="mdi:upload",
|
||||
index=1,
|
||||
value=lambda data: data[1] if data is not None else None,
|
||||
value=lambda data: data[1],
|
||||
),
|
||||
NetgearSensorEntityDescription(
|
||||
key="NewLastMonthDownload",
|
||||
@@ -204,7 +207,7 @@ SENSOR_TRAFFIC_TYPES = [
|
||||
native_unit_of_measurement=DATA_MEGABYTES,
|
||||
icon="mdi:download",
|
||||
index=0,
|
||||
value=lambda data: data[0] if data is not None else None,
|
||||
value=lambda data: data[0],
|
||||
),
|
||||
NetgearSensorEntityDescription(
|
||||
key="NewLastMonthDownload",
|
||||
@@ -213,7 +216,7 @@ SENSOR_TRAFFIC_TYPES = [
|
||||
native_unit_of_measurement=DATA_MEGABYTES,
|
||||
icon="mdi:download",
|
||||
index=1,
|
||||
value=lambda data: data[1] if data is not None else None,
|
||||
value=lambda data: data[1],
|
||||
),
|
||||
]
|
||||
|
||||
@@ -372,6 +375,17 @@ class NetgearRouterSensorEntity(NetgearRouterEntity, RestoreSensor):
|
||||
@callback
|
||||
def async_update_device(self) -> None:
|
||||
"""Update the Netgear device."""
|
||||
if self.coordinator.data is not None:
|
||||
data = self.coordinator.data.get(self.entity_description.key)
|
||||
self._value = self.entity_description.value(data)
|
||||
if self.coordinator.data is None:
|
||||
return
|
||||
|
||||
data = self.coordinator.data.get(self.entity_description.key)
|
||||
if data is None:
|
||||
self._value = None
|
||||
_LOGGER.debug(
|
||||
"key '%s' not in Netgear router response '%s'",
|
||||
self.entity_description.key,
|
||||
data,
|
||||
)
|
||||
return
|
||||
|
||||
self._value = self.entity_description.value(data)
|
||||
|
||||
@@ -139,8 +139,11 @@ async def async_setup_entry(
|
||||
entry, coordinator, controller, description
|
||||
)
|
||||
for description in BINARY_SENSOR_DESCRIPTIONS
|
||||
if (coordinator := coordinators[description.api_category]) is not None
|
||||
and key_exists(coordinator.data, description.data_key)
|
||||
if (
|
||||
(coordinator := coordinators[description.api_category]) is not None
|
||||
and coordinator.data
|
||||
and key_exists(coordinator.data, description.data_key)
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "RainMachine",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rainmachine",
|
||||
"requirements": ["regenmaschine==2022.05.1"],
|
||||
"requirements": ["regenmaschine==2022.06.1"],
|
||||
"codeowners": ["@bachya"],
|
||||
"iot_class": "local_polling",
|
||||
"homekit": {
|
||||
|
||||
@@ -4,6 +4,8 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from regenmaschine.controller import Controller
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
@@ -13,8 +15,9 @@ from homeassistant.components.sensor import (
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import TEMP_CELSIUS, VOLUME_CUBIC_METERS
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity import EntityCategory
|
||||
from homeassistant.helpers.entity import EntityCategory, EntityDescription
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from . import RainMachineEntity
|
||||
@@ -133,8 +136,11 @@ async def async_setup_entry(
|
||||
entry, coordinator, controller, description
|
||||
)
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
if (coordinator := coordinators[description.api_category]) is not None
|
||||
and key_exists(coordinator.data, description.data_key)
|
||||
if (
|
||||
(coordinator := coordinators[description.api_category]) is not None
|
||||
and coordinator.data
|
||||
and key_exists(coordinator.data, description.data_key)
|
||||
)
|
||||
]
|
||||
|
||||
zone_coordinator = coordinators[DATA_ZONES]
|
||||
@@ -202,16 +208,33 @@ class ZoneTimeRemainingSensor(RainMachineEntity, SensorEntity):
|
||||
|
||||
entity_description: RainMachineSensorDescriptionUid
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
entry: ConfigEntry,
|
||||
coordinator: DataUpdateCoordinator,
|
||||
controller: Controller,
|
||||
description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(entry, coordinator, controller, description)
|
||||
|
||||
self._running_or_queued: bool = False
|
||||
|
||||
@callback
|
||||
def update_from_latest_data(self) -> None:
|
||||
"""Update the state."""
|
||||
data = self.coordinator.data[self.entity_description.uid]
|
||||
now = utcnow()
|
||||
|
||||
if RUN_STATE_MAP.get(data["state"]) != RunStates.RUNNING:
|
||||
# If the zone isn't actively running, return immediately:
|
||||
if RUN_STATE_MAP.get(data["state"]) == RunStates.NOT_RUNNING:
|
||||
if self._running_or_queued:
|
||||
# If we go from running to not running, update the state to be right
|
||||
# now (i.e., the time the zone stopped running):
|
||||
self._attr_native_value = now
|
||||
self._running_or_queued = False
|
||||
return
|
||||
|
||||
self._running_or_queued = True
|
||||
new_timestamp = now + timedelta(seconds=data["remaining"])
|
||||
|
||||
if self._attr_native_value:
|
||||
|
||||
@@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable, Iterable
|
||||
from concurrent.futures import CancelledError
|
||||
import contextlib
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
@@ -518,9 +519,16 @@ class Recorder(threading.Thread):
|
||||
|
||||
def _wait_startup_or_shutdown(self) -> object | None:
|
||||
"""Wait for startup or shutdown before starting."""
|
||||
return asyncio.run_coroutine_threadsafe(
|
||||
self._async_wait_for_started(), self.hass.loop
|
||||
).result()
|
||||
try:
|
||||
return asyncio.run_coroutine_threadsafe(
|
||||
self._async_wait_for_started(), self.hass.loop
|
||||
).result()
|
||||
except CancelledError as ex:
|
||||
_LOGGER.warning(
|
||||
"Recorder startup was externally canceled before it could complete: %s",
|
||||
ex,
|
||||
)
|
||||
return SHUTDOWN_TASK
|
||||
|
||||
def run(self) -> None:
|
||||
"""Start processing events to save."""
|
||||
|
||||
@@ -5,7 +5,7 @@ from collections.abc import Callable, Iterable
|
||||
import json
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import JSON, Column, Text, cast, not_, or_
|
||||
from sqlalchemy import Column, Text, cast, not_, or_
|
||||
from sqlalchemy.sql.elements import ClauseList
|
||||
|
||||
from homeassistant.const import CONF_DOMAINS, CONF_ENTITIES, CONF_EXCLUDE, CONF_INCLUDE
|
||||
@@ -16,6 +16,7 @@ from .models import ENTITY_ID_IN_EVENT, OLD_ENTITY_ID_IN_EVENT, States
|
||||
|
||||
DOMAIN = "history"
|
||||
HISTORY_FILTERS = "history_filters"
|
||||
JSON_NULL = json.dumps(None)
|
||||
|
||||
GLOB_TO_SQL_CHARS = {
|
||||
ord("*"): "%",
|
||||
@@ -36,7 +37,7 @@ def extract_include_exclude_filter_conf(conf: ConfigType) -> dict[str, Any]:
|
||||
"""
|
||||
return {
|
||||
filter_type: {
|
||||
matcher: set(conf.get(filter_type, {}).get(matcher, []))
|
||||
matcher: set(conf.get(filter_type, {}).get(matcher) or [])
|
||||
for matcher in FITLER_MATCHERS
|
||||
}
|
||||
for filter_type in FILTER_TYPES
|
||||
@@ -88,14 +89,32 @@ class Filters:
|
||||
self.included_domains: Iterable[str] = []
|
||||
self.included_entity_globs: Iterable[str] = []
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return human readable excludes/includes."""
|
||||
return (
|
||||
f"<Filters excluded_entities={self.excluded_entities} excluded_domains={self.excluded_domains} "
|
||||
f"excluded_entity_globs={self.excluded_entity_globs} "
|
||||
f"included_entities={self.included_entities} included_domains={self.included_domains} "
|
||||
f"included_entity_globs={self.included_entity_globs}>"
|
||||
)
|
||||
|
||||
@property
|
||||
def has_config(self) -> bool:
|
||||
"""Determine if there is any filter configuration."""
|
||||
return bool(self._have_exclude or self._have_include)
|
||||
|
||||
@property
|
||||
def _have_exclude(self) -> bool:
|
||||
return bool(
|
||||
self.excluded_entities
|
||||
or self.excluded_domains
|
||||
or self.excluded_entity_globs
|
||||
or self.included_entities
|
||||
)
|
||||
|
||||
@property
|
||||
def _have_include(self) -> bool:
|
||||
return bool(
|
||||
self.included_entities
|
||||
or self.included_domains
|
||||
or self.included_entity_globs
|
||||
)
|
||||
@@ -103,36 +122,67 @@ class Filters:
|
||||
def _generate_filter_for_columns(
|
||||
self, columns: Iterable[Column], encoder: Callable[[Any], Any]
|
||||
) -> ClauseList:
|
||||
includes = []
|
||||
if self.included_domains:
|
||||
includes.append(_domain_matcher(self.included_domains, columns, encoder))
|
||||
if self.included_entities:
|
||||
includes.append(_entity_matcher(self.included_entities, columns, encoder))
|
||||
if self.included_entity_globs:
|
||||
includes.append(
|
||||
_globs_to_like(self.included_entity_globs, columns, encoder)
|
||||
)
|
||||
"""Generate a filter from pre-comuted sets and pattern lists.
|
||||
|
||||
excludes = []
|
||||
if self.excluded_domains:
|
||||
excludes.append(_domain_matcher(self.excluded_domains, columns, encoder))
|
||||
if self.excluded_entities:
|
||||
excludes.append(_entity_matcher(self.excluded_entities, columns, encoder))
|
||||
if self.excluded_entity_globs:
|
||||
excludes.append(
|
||||
_globs_to_like(self.excluded_entity_globs, columns, encoder)
|
||||
)
|
||||
This must match exactly how homeassistant.helpers.entityfilter works.
|
||||
"""
|
||||
i_domains = _domain_matcher(self.included_domains, columns, encoder)
|
||||
i_entities = _entity_matcher(self.included_entities, columns, encoder)
|
||||
i_entity_globs = _globs_to_like(self.included_entity_globs, columns, encoder)
|
||||
includes = [i_domains, i_entities, i_entity_globs]
|
||||
|
||||
if not includes and not excludes:
|
||||
e_domains = _domain_matcher(self.excluded_domains, columns, encoder)
|
||||
e_entities = _entity_matcher(self.excluded_entities, columns, encoder)
|
||||
e_entity_globs = _globs_to_like(self.excluded_entity_globs, columns, encoder)
|
||||
excludes = [e_domains, e_entities, e_entity_globs]
|
||||
|
||||
have_exclude = self._have_exclude
|
||||
have_include = self._have_include
|
||||
|
||||
# Case 1 - no includes or excludes - pass all entities
|
||||
if not have_include and not have_exclude:
|
||||
return None
|
||||
|
||||
if includes and not excludes:
|
||||
# Case 2 - includes, no excludes - only include specified entities
|
||||
if have_include and not have_exclude:
|
||||
return or_(*includes).self_group()
|
||||
|
||||
if not includes and excludes:
|
||||
# Case 3 - excludes, no includes - only exclude specified entities
|
||||
if not have_include and have_exclude:
|
||||
return not_(or_(*excludes).self_group())
|
||||
|
||||
return or_(*includes).self_group() & not_(or_(*excludes).self_group())
|
||||
# Case 4 - both includes and excludes specified
|
||||
# Case 4a - include domain or glob specified
|
||||
# - if domain is included, pass if entity not excluded
|
||||
# - if glob is included, pass if entity and domain not excluded
|
||||
# - if domain and glob are not included, pass if entity is included
|
||||
# note: if both include domain matches then exclude domains ignored.
|
||||
# If glob matches then exclude domains and glob checked
|
||||
if self.included_domains or self.included_entity_globs:
|
||||
return or_(
|
||||
(i_domains & ~(e_entities | e_entity_globs)),
|
||||
(
|
||||
~i_domains
|
||||
& or_(
|
||||
(i_entity_globs & ~(or_(*excludes))),
|
||||
(~i_entity_globs & i_entities),
|
||||
)
|
||||
),
|
||||
).self_group()
|
||||
|
||||
# Case 4b - exclude domain or glob specified, include has no domain or glob
|
||||
# In this one case the traditional include logic is inverted. Even though an
|
||||
# include is specified since its only a list of entity IDs its used only to
|
||||
# expose specific entities excluded by domain or glob. Any entities not
|
||||
# excluded are then presumed included. Logic is as follows
|
||||
# - if domain or glob is excluded, pass if entity is included
|
||||
# - if domain is not excluded, pass if entity not excluded by ID
|
||||
if self.excluded_domains or self.excluded_entity_globs:
|
||||
return (not_(or_(*excludes)) | i_entities).self_group()
|
||||
|
||||
# Case 4c - neither include or exclude domain specified
|
||||
# - Only pass if entity is included. Ignore entity excludes.
|
||||
return i_entities
|
||||
|
||||
def states_entity_filter(self) -> ClauseList:
|
||||
"""Generate the entity filter query."""
|
||||
@@ -147,7 +197,17 @@ class Filters:
|
||||
"""Generate the entity filter query."""
|
||||
_encoder = json.dumps
|
||||
return or_(
|
||||
(ENTITY_ID_IN_EVENT == JSON.NULL) & (OLD_ENTITY_ID_IN_EVENT == JSON.NULL),
|
||||
# sqlalchemy's SQLite json implementation always
|
||||
# wraps everything with JSON_QUOTE so it resolves to 'null'
|
||||
# when its empty
|
||||
#
|
||||
# For MySQL and PostgreSQL it will resolve to a literal
|
||||
# NULL when its empty
|
||||
#
|
||||
((ENTITY_ID_IN_EVENT == JSON_NULL) | ENTITY_ID_IN_EVENT.is_(None))
|
||||
& (
|
||||
(OLD_ENTITY_ID_IN_EVENT == JSON_NULL) | OLD_ENTITY_ID_IN_EVENT.is_(None)
|
||||
),
|
||||
self._generate_filter_for_columns(
|
||||
(ENTITY_ID_IN_EVENT, OLD_ENTITY_ID_IN_EVENT), _encoder
|
||||
).self_group(),
|
||||
@@ -158,29 +218,43 @@ def _globs_to_like(
|
||||
glob_strs: Iterable[str], columns: Iterable[Column], encoder: Callable[[Any], Any]
|
||||
) -> ClauseList:
|
||||
"""Translate glob to sql."""
|
||||
return or_(
|
||||
cast(column, Text()).like(
|
||||
encoder(glob_str).translate(GLOB_TO_SQL_CHARS), escape="\\"
|
||||
matchers = [
|
||||
(
|
||||
column.is_not(None)
|
||||
& cast(column, Text()).like(
|
||||
encoder(glob_str).translate(GLOB_TO_SQL_CHARS), escape="\\"
|
||||
)
|
||||
)
|
||||
for glob_str in glob_strs
|
||||
for column in columns
|
||||
)
|
||||
]
|
||||
return or_(*matchers) if matchers else or_(False)
|
||||
|
||||
|
||||
def _entity_matcher(
|
||||
entity_ids: Iterable[str], columns: Iterable[Column], encoder: Callable[[Any], Any]
|
||||
) -> ClauseList:
|
||||
return or_(
|
||||
cast(column, Text()).in_([encoder(entity_id) for entity_id in entity_ids])
|
||||
matchers = [
|
||||
(
|
||||
column.is_not(None)
|
||||
& cast(column, Text()).in_([encoder(entity_id) for entity_id in entity_ids])
|
||||
)
|
||||
for column in columns
|
||||
)
|
||||
]
|
||||
return or_(*matchers) if matchers else or_(False)
|
||||
|
||||
|
||||
def _domain_matcher(
|
||||
domains: Iterable[str], columns: Iterable[Column], encoder: Callable[[Any], Any]
|
||||
) -> ClauseList:
|
||||
return or_(
|
||||
cast(column, Text()).like(encoder(f"{domain}.%"))
|
||||
for domain in domains
|
||||
matchers = [
|
||||
(column.is_not(None) & cast(column, Text()).like(encoder(domain_matcher)))
|
||||
for domain_matcher in like_domain_matchers(domains)
|
||||
for column in columns
|
||||
)
|
||||
]
|
||||
return or_(*matchers) if matchers else or_(False)
|
||||
|
||||
|
||||
def like_domain_matchers(domains: Iterable[str]) -> list[str]:
|
||||
"""Convert a list of domains to sql LIKE matchers."""
|
||||
return [f"{domain}.%" for domain in domains]
|
||||
|
||||
@@ -9,12 +9,11 @@ import logging
|
||||
import time
|
||||
from typing import Any, cast
|
||||
|
||||
from sqlalchemy import Column, Text, and_, func, lambda_stmt, or_, select
|
||||
from sqlalchemy import Column, Text, and_, func, or_, select
|
||||
from sqlalchemy.engine.row import Row
|
||||
from sqlalchemy.orm.query import Query
|
||||
from sqlalchemy.orm.session import Session
|
||||
from sqlalchemy.sql.expression import literal
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.sql.selectable import Select, Subquery
|
||||
|
||||
from homeassistant.components import recorder
|
||||
from homeassistant.components.websocket_api.const import (
|
||||
@@ -35,7 +34,7 @@ from .models import (
|
||||
process_timestamp_to_utc_isoformat,
|
||||
row_to_compressed_state,
|
||||
)
|
||||
from .util import execute_stmt_lambda_element, session_scope
|
||||
from .util import execute_stmt, session_scope
|
||||
|
||||
# mypy: allow-untyped-defs, no-check-untyped-defs
|
||||
|
||||
@@ -115,22 +114,18 @@ def _schema_version(hass: HomeAssistant) -> int:
|
||||
return recorder.get_instance(hass).schema_version
|
||||
|
||||
|
||||
def lambda_stmt_and_join_attributes(
|
||||
def stmt_and_join_attributes(
|
||||
schema_version: int, no_attributes: bool, include_last_changed: bool = True
|
||||
) -> tuple[StatementLambdaElement, bool]:
|
||||
"""Return the lambda_stmt and if StateAttributes should be joined.
|
||||
|
||||
Because these are lambda_stmt the values inside the lambdas need
|
||||
to be explicitly written out to avoid caching the wrong values.
|
||||
"""
|
||||
) -> tuple[Select, bool]:
|
||||
"""Return the stmt and if StateAttributes should be joined."""
|
||||
# If no_attributes was requested we do the query
|
||||
# without the attributes fields and do not join the
|
||||
# state_attributes table
|
||||
if no_attributes:
|
||||
if include_last_changed:
|
||||
return lambda_stmt(lambda: select(*QUERY_STATE_NO_ATTR)), False
|
||||
return select(*QUERY_STATE_NO_ATTR), False
|
||||
return (
|
||||
lambda_stmt(lambda: select(*QUERY_STATE_NO_ATTR_NO_LAST_CHANGED)),
|
||||
select(*QUERY_STATE_NO_ATTR_NO_LAST_CHANGED),
|
||||
False,
|
||||
)
|
||||
# If we in the process of migrating schema we do
|
||||
@@ -139,19 +134,19 @@ def lambda_stmt_and_join_attributes(
|
||||
if schema_version < 25:
|
||||
if include_last_changed:
|
||||
return (
|
||||
lambda_stmt(lambda: select(*QUERY_STATES_PRE_SCHEMA_25)),
|
||||
select(*QUERY_STATES_PRE_SCHEMA_25),
|
||||
False,
|
||||
)
|
||||
return (
|
||||
lambda_stmt(lambda: select(*QUERY_STATES_PRE_SCHEMA_25_NO_LAST_CHANGED)),
|
||||
select(*QUERY_STATES_PRE_SCHEMA_25_NO_LAST_CHANGED),
|
||||
False,
|
||||
)
|
||||
# Finally if no migration is in progress and no_attributes
|
||||
# was not requested, we query both attributes columns and
|
||||
# join state_attributes
|
||||
if include_last_changed:
|
||||
return lambda_stmt(lambda: select(*QUERY_STATES)), True
|
||||
return lambda_stmt(lambda: select(*QUERY_STATES_NO_LAST_CHANGED)), True
|
||||
return select(*QUERY_STATES), True
|
||||
return select(*QUERY_STATES_NO_LAST_CHANGED), True
|
||||
|
||||
|
||||
def get_significant_states(
|
||||
@@ -183,7 +178,7 @@ def get_significant_states(
|
||||
)
|
||||
|
||||
|
||||
def _ignore_domains_filter(query: Query) -> Query:
|
||||
def _ignore_domains_filter(query: Select) -> Select:
|
||||
"""Add a filter to ignore domains we do not fetch history for."""
|
||||
return query.filter(
|
||||
and_(
|
||||
@@ -203,9 +198,9 @@ def _significant_states_stmt(
|
||||
filters: Filters | None,
|
||||
significant_changes_only: bool,
|
||||
no_attributes: bool,
|
||||
) -> StatementLambdaElement:
|
||||
) -> Select:
|
||||
"""Query the database for significant state changes."""
|
||||
stmt, join_attributes = lambda_stmt_and_join_attributes(
|
||||
stmt, join_attributes = stmt_and_join_attributes(
|
||||
schema_version, no_attributes, include_last_changed=not significant_changes_only
|
||||
)
|
||||
if (
|
||||
@@ -214,11 +209,11 @@ def _significant_states_stmt(
|
||||
and significant_changes_only
|
||||
and split_entity_id(entity_ids[0])[0] not in SIGNIFICANT_DOMAINS
|
||||
):
|
||||
stmt += lambda q: q.filter(
|
||||
stmt = stmt.filter(
|
||||
(States.last_changed == States.last_updated) | States.last_changed.is_(None)
|
||||
)
|
||||
elif significant_changes_only:
|
||||
stmt += lambda q: q.filter(
|
||||
stmt = stmt.filter(
|
||||
or_(
|
||||
*[
|
||||
States.entity_id.like(entity_domain)
|
||||
@@ -232,23 +227,22 @@ def _significant_states_stmt(
|
||||
)
|
||||
|
||||
if entity_ids:
|
||||
stmt += lambda q: q.filter(States.entity_id.in_(entity_ids))
|
||||
stmt = stmt.filter(States.entity_id.in_(entity_ids))
|
||||
else:
|
||||
stmt += _ignore_domains_filter
|
||||
stmt = _ignore_domains_filter(stmt)
|
||||
if filters and filters.has_config:
|
||||
entity_filter = filters.states_entity_filter()
|
||||
stmt += lambda q: q.filter(entity_filter)
|
||||
stmt = stmt.filter(entity_filter)
|
||||
|
||||
stmt += lambda q: q.filter(States.last_updated > start_time)
|
||||
stmt = stmt.filter(States.last_updated > start_time)
|
||||
if end_time:
|
||||
stmt += lambda q: q.filter(States.last_updated < end_time)
|
||||
stmt = stmt.filter(States.last_updated < end_time)
|
||||
|
||||
if join_attributes:
|
||||
stmt += lambda q: q.outerjoin(
|
||||
stmt = stmt.outerjoin(
|
||||
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
||||
)
|
||||
stmt += lambda q: q.order_by(States.entity_id, States.last_updated)
|
||||
return stmt
|
||||
return stmt.order_by(States.entity_id, States.last_updated)
|
||||
|
||||
|
||||
def get_significant_states_with_session(
|
||||
@@ -285,9 +279,7 @@ def get_significant_states_with_session(
|
||||
significant_changes_only,
|
||||
no_attributes,
|
||||
)
|
||||
states = execute_stmt_lambda_element(
|
||||
session, stmt, None if entity_ids else start_time, end_time
|
||||
)
|
||||
states = execute_stmt(session, stmt, None if entity_ids else start_time, end_time)
|
||||
return _sorted_states_to_dict(
|
||||
hass,
|
||||
session,
|
||||
@@ -339,27 +331,28 @@ def _state_changed_during_period_stmt(
|
||||
no_attributes: bool,
|
||||
descending: bool,
|
||||
limit: int | None,
|
||||
) -> StatementLambdaElement:
|
||||
stmt, join_attributes = lambda_stmt_and_join_attributes(
|
||||
) -> Select:
|
||||
stmt, join_attributes = stmt_and_join_attributes(
|
||||
schema_version, no_attributes, include_last_changed=False
|
||||
)
|
||||
stmt += lambda q: q.filter(
|
||||
stmt = stmt.filter(
|
||||
((States.last_changed == States.last_updated) | States.last_changed.is_(None))
|
||||
& (States.last_updated > start_time)
|
||||
)
|
||||
if end_time:
|
||||
stmt += lambda q: q.filter(States.last_updated < end_time)
|
||||
stmt += lambda q: q.filter(States.entity_id == entity_id)
|
||||
stmt = stmt.filter(States.last_updated < end_time)
|
||||
if entity_id:
|
||||
stmt = stmt.filter(States.entity_id == entity_id)
|
||||
if join_attributes:
|
||||
stmt += lambda q: q.outerjoin(
|
||||
stmt = stmt.outerjoin(
|
||||
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
||||
)
|
||||
if descending:
|
||||
stmt += lambda q: q.order_by(States.entity_id, States.last_updated.desc())
|
||||
stmt = stmt.order_by(States.entity_id, States.last_updated.desc())
|
||||
else:
|
||||
stmt += lambda q: q.order_by(States.entity_id, States.last_updated)
|
||||
stmt = stmt.order_by(States.entity_id, States.last_updated)
|
||||
if limit:
|
||||
stmt += lambda q: q.limit(limit)
|
||||
stmt = stmt.limit(limit)
|
||||
return stmt
|
||||
|
||||
|
||||
@@ -375,6 +368,7 @@ def state_changes_during_period(
|
||||
) -> MutableMapping[str, list[State]]:
|
||||
"""Return states changes during UTC period start_time - end_time."""
|
||||
entity_id = entity_id.lower() if entity_id is not None else None
|
||||
entity_ids = [entity_id] if entity_id is not None else None
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
stmt = _state_changed_during_period_stmt(
|
||||
@@ -386,11 +380,9 @@ def state_changes_during_period(
|
||||
descending,
|
||||
limit,
|
||||
)
|
||||
states = execute_stmt_lambda_element(
|
||||
states = execute_stmt(
|
||||
session, stmt, None if entity_id else start_time, end_time
|
||||
)
|
||||
entity_ids = [entity_id] if entity_id is not None else None
|
||||
|
||||
return cast(
|
||||
MutableMapping[str, list[State]],
|
||||
_sorted_states_to_dict(
|
||||
@@ -405,38 +397,38 @@ def state_changes_during_period(
|
||||
|
||||
|
||||
def _get_last_state_changes_stmt(
|
||||
schema_version: int, number_of_states: int, entity_id: str
|
||||
) -> StatementLambdaElement:
|
||||
stmt, join_attributes = lambda_stmt_and_join_attributes(
|
||||
schema_version: int, number_of_states: int, entity_id: str | None
|
||||
) -> Select:
|
||||
stmt, join_attributes = stmt_and_join_attributes(
|
||||
schema_version, False, include_last_changed=False
|
||||
)
|
||||
stmt += lambda q: q.filter(
|
||||
stmt = stmt.filter(
|
||||
(States.last_changed == States.last_updated) | States.last_changed.is_(None)
|
||||
).filter(States.entity_id == entity_id)
|
||||
)
|
||||
if entity_id:
|
||||
stmt = stmt.filter(States.entity_id == entity_id)
|
||||
if join_attributes:
|
||||
stmt += lambda q: q.outerjoin(
|
||||
stmt = stmt.outerjoin(
|
||||
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
||||
)
|
||||
stmt += lambda q: q.order_by(States.entity_id, States.last_updated.desc()).limit(
|
||||
return stmt.order_by(States.entity_id, States.last_updated.desc()).limit(
|
||||
number_of_states
|
||||
)
|
||||
return stmt
|
||||
|
||||
|
||||
def get_last_state_changes(
|
||||
hass: HomeAssistant, number_of_states: int, entity_id: str
|
||||
hass: HomeAssistant, number_of_states: int, entity_id: str | None
|
||||
) -> MutableMapping[str, list[State]]:
|
||||
"""Return the last number_of_states."""
|
||||
start_time = dt_util.utcnow()
|
||||
entity_id = entity_id.lower() if entity_id is not None else None
|
||||
entity_ids = [entity_id] if entity_id is not None else None
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
stmt = _get_last_state_changes_stmt(
|
||||
_schema_version(hass), number_of_states, entity_id
|
||||
)
|
||||
states = list(execute_stmt_lambda_element(session, stmt))
|
||||
entity_ids = [entity_id] if entity_id is not None else None
|
||||
|
||||
states = list(execute_stmt(session, stmt))
|
||||
return cast(
|
||||
MutableMapping[str, list[State]],
|
||||
_sorted_states_to_dict(
|
||||
@@ -456,14 +448,14 @@ def _get_states_for_entites_stmt(
|
||||
utc_point_in_time: datetime,
|
||||
entity_ids: list[str],
|
||||
no_attributes: bool,
|
||||
) -> StatementLambdaElement:
|
||||
) -> Select:
|
||||
"""Baked query to get states for specific entities."""
|
||||
stmt, join_attributes = lambda_stmt_and_join_attributes(
|
||||
stmt, join_attributes = stmt_and_join_attributes(
|
||||
schema_version, no_attributes, include_last_changed=True
|
||||
)
|
||||
# We got an include-list of entities, accelerate the query by filtering already
|
||||
# in the inner query.
|
||||
stmt += lambda q: q.where(
|
||||
stmt = stmt.where(
|
||||
States.state_id
|
||||
== (
|
||||
select(func.max(States.state_id).label("max_state_id"))
|
||||
@@ -477,28 +469,18 @@ def _get_states_for_entites_stmt(
|
||||
).c.max_state_id
|
||||
)
|
||||
if join_attributes:
|
||||
stmt += lambda q: q.outerjoin(
|
||||
stmt = stmt.outerjoin(
|
||||
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
|
||||
)
|
||||
return stmt
|
||||
|
||||
|
||||
def _get_states_for_all_stmt(
|
||||
schema_version: int,
|
||||
def _generate_most_recent_states_by_date(
|
||||
run_start: datetime,
|
||||
utc_point_in_time: datetime,
|
||||
filters: Filters | None,
|
||||
no_attributes: bool,
|
||||
) -> StatementLambdaElement:
|
||||
"""Baked query to get states for all entities."""
|
||||
stmt, join_attributes = lambda_stmt_and_join_attributes(
|
||||
schema_version, no_attributes, include_last_changed=True
|
||||
)
|
||||
# We did not get an include-list of entities, query all states in the inner
|
||||
# query, then filter out unwanted domains as well as applying the custom filter.
|
||||
# This filtering can't be done in the inner query because the domain column is
|
||||
# not indexed and we can't control what's in the custom filter.
|
||||
most_recent_states_by_date = (
|
||||
) -> Subquery:
|
||||
"""Generate the sub query for the most recent states by data."""
|
||||
return (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
func.max(States.last_updated).label("max_last_updated"),
|
||||
@@ -510,7 +492,27 @@ def _get_states_for_all_stmt(
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
)
|
||||
stmt += lambda q: q.where(
|
||||
|
||||
|
||||
def _get_states_for_all_stmt(
|
||||
schema_version: int,
|
||||
run_start: datetime,
|
||||
utc_point_in_time: datetime,
|
||||
filters: Filters | None,
|
||||
no_attributes: bool,
|
||||
) -> Select:
|
||||
"""Baked query to get states for all entities."""
|
||||
stmt, join_attributes = stmt_and_join_attributes(
|
||||
schema_version, no_attributes, include_last_changed=True
|
||||
)
|
||||
# We did not get an include-list of entities, query all states in the inner
|
||||
# query, then filter out unwanted domains as well as applying the custom filter.
|
||||
# This filtering can't be done in the inner query because the domain column is
|
||||
# not indexed and we can't control what's in the custom filter.
|
||||
most_recent_states_by_date = _generate_most_recent_states_by_date(
|
||||
run_start, utc_point_in_time
|
||||
)
|
||||
stmt = stmt.where(
|
||||
States.state_id
|
||||
== (
|
||||
select(func.max(States.state_id).label("max_state_id"))
|
||||
@@ -526,12 +528,12 @@ def _get_states_for_all_stmt(
|
||||
.subquery()
|
||||
).c.max_state_id,
|
||||
)
|
||||
stmt += _ignore_domains_filter
|
||||
stmt = _ignore_domains_filter(stmt)
|
||||
if filters and filters.has_config:
|
||||
entity_filter = filters.states_entity_filter()
|
||||
stmt += lambda q: q.filter(entity_filter)
|
||||
stmt = stmt.filter(entity_filter)
|
||||
if join_attributes:
|
||||
stmt += lambda q: q.outerjoin(
|
||||
stmt = stmt.outerjoin(
|
||||
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
|
||||
)
|
||||
return stmt
|
||||
@@ -549,7 +551,7 @@ def _get_rows_with_session(
|
||||
"""Return the states at a specific point in time."""
|
||||
schema_version = _schema_version(hass)
|
||||
if entity_ids and len(entity_ids) == 1:
|
||||
return execute_stmt_lambda_element(
|
||||
return execute_stmt(
|
||||
session,
|
||||
_get_single_entity_states_stmt(
|
||||
schema_version, utc_point_in_time, entity_ids[0], no_attributes
|
||||
@@ -574,7 +576,7 @@ def _get_rows_with_session(
|
||||
schema_version, run.start, utc_point_in_time, filters, no_attributes
|
||||
)
|
||||
|
||||
return execute_stmt_lambda_element(session, stmt)
|
||||
return execute_stmt(session, stmt)
|
||||
|
||||
|
||||
def _get_single_entity_states_stmt(
|
||||
@@ -582,14 +584,14 @@ def _get_single_entity_states_stmt(
|
||||
utc_point_in_time: datetime,
|
||||
entity_id: str,
|
||||
no_attributes: bool = False,
|
||||
) -> StatementLambdaElement:
|
||||
) -> Select:
|
||||
# Use an entirely different (and extremely fast) query if we only
|
||||
# have a single entity id
|
||||
stmt, join_attributes = lambda_stmt_and_join_attributes(
|
||||
stmt, join_attributes = stmt_and_join_attributes(
|
||||
schema_version, no_attributes, include_last_changed=True
|
||||
)
|
||||
stmt += (
|
||||
lambda q: q.filter(
|
||||
stmt = (
|
||||
stmt.filter(
|
||||
States.last_updated < utc_point_in_time,
|
||||
States.entity_id == entity_id,
|
||||
)
|
||||
@@ -597,7 +599,7 @@ def _get_single_entity_states_stmt(
|
||||
.limit(1)
|
||||
)
|
||||
if join_attributes:
|
||||
stmt += lambda q: q.outerjoin(
|
||||
stmt = stmt.outerjoin(
|
||||
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
||||
)
|
||||
return stmt
|
||||
|
||||
@@ -715,14 +715,13 @@ def _apply_update( # noqa: C901
|
||||
if engine.dialect.name == SupportedDialect.MYSQL:
|
||||
# Ensure the row format is dynamic or the index
|
||||
# unique will be too large
|
||||
with session_scope(session=session_maker()) as session:
|
||||
connection = session.connection()
|
||||
# This is safe to run multiple times and fast since the table is small
|
||||
connection.execute(
|
||||
text(
|
||||
"ALTER TABLE statistics_meta ENGINE=InnoDB, ROW_FORMAT=DYNAMIC"
|
||||
with contextlib.suppress(SQLAlchemyError):
|
||||
with session_scope(session=session_maker()) as session:
|
||||
connection = session.connection()
|
||||
# This is safe to run multiple times and fast since the table is small
|
||||
connection.execute(
|
||||
text("ALTER TABLE statistics_meta ROW_FORMAT=DYNAMIC")
|
||||
)
|
||||
)
|
||||
try:
|
||||
_create_index(
|
||||
session_maker, "statistics_meta", "ix_statistics_meta_statistic_id"
|
||||
|
||||
@@ -93,6 +93,8 @@ TABLES_TO_CHECK = [
|
||||
|
||||
LAST_UPDATED_INDEX = "ix_states_last_updated"
|
||||
ENTITY_ID_LAST_UPDATED_INDEX = "ix_states_entity_id_last_updated"
|
||||
EVENTS_CONTEXT_ID_INDEX = "ix_events_context_id"
|
||||
STATES_CONTEXT_ID_INDEX = "ix_states_context_id"
|
||||
|
||||
EMPTY_JSON_OBJECT = "{}"
|
||||
|
||||
|
||||
@@ -14,12 +14,12 @@ import re
|
||||
from statistics import mean
|
||||
from typing import TYPE_CHECKING, Any, Literal, overload
|
||||
|
||||
from sqlalchemy import bindparam, func, lambda_stmt, select
|
||||
from sqlalchemy import bindparam, func, select
|
||||
from sqlalchemy.engine.row import Row
|
||||
from sqlalchemy.exc import SQLAlchemyError, StatementError
|
||||
from sqlalchemy.orm.session import Session
|
||||
from sqlalchemy.sql.expression import literal_column, true
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.sql.selectable import Select, Subquery
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
@@ -52,12 +52,7 @@ from .models import (
|
||||
process_timestamp,
|
||||
process_timestamp_to_utc_isoformat,
|
||||
)
|
||||
from .util import (
|
||||
execute,
|
||||
execute_stmt_lambda_element,
|
||||
retryable_database_job,
|
||||
session_scope,
|
||||
)
|
||||
from .util import execute, execute_stmt, retryable_database_job, session_scope
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import Recorder
|
||||
@@ -482,16 +477,15 @@ def delete_statistics_meta_duplicates(session: Session) -> None:
|
||||
|
||||
def _compile_hourly_statistics_summary_mean_stmt(
|
||||
start_time: datetime, end_time: datetime
|
||||
) -> StatementLambdaElement:
|
||||
) -> Select:
|
||||
"""Generate the summary mean statement for hourly statistics."""
|
||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SUMMARY_MEAN))
|
||||
stmt += (
|
||||
lambda q: q.filter(StatisticsShortTerm.start >= start_time)
|
||||
return (
|
||||
select(*QUERY_STATISTICS_SUMMARY_MEAN)
|
||||
.filter(StatisticsShortTerm.start >= start_time)
|
||||
.filter(StatisticsShortTerm.start < end_time)
|
||||
.group_by(StatisticsShortTerm.metadata_id)
|
||||
.order_by(StatisticsShortTerm.metadata_id)
|
||||
)
|
||||
return stmt
|
||||
|
||||
|
||||
def compile_hourly_statistics(
|
||||
@@ -509,7 +503,7 @@ def compile_hourly_statistics(
|
||||
# Compute last hour's average, min, max
|
||||
summary: dict[str, StatisticData] = {}
|
||||
stmt = _compile_hourly_statistics_summary_mean_stmt(start_time, end_time)
|
||||
stats = execute_stmt_lambda_element(session, stmt)
|
||||
stats = execute_stmt(session, stmt)
|
||||
|
||||
if stats:
|
||||
for stat in stats:
|
||||
@@ -691,17 +685,17 @@ def _generate_get_metadata_stmt(
|
||||
statistic_ids: list[str] | tuple[str] | None = None,
|
||||
statistic_type: Literal["mean"] | Literal["sum"] | None = None,
|
||||
statistic_source: str | None = None,
|
||||
) -> StatementLambdaElement:
|
||||
) -> Select:
|
||||
"""Generate a statement to fetch metadata."""
|
||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTIC_META))
|
||||
stmt = select(*QUERY_STATISTIC_META)
|
||||
if statistic_ids is not None:
|
||||
stmt += lambda q: q.where(StatisticsMeta.statistic_id.in_(statistic_ids))
|
||||
stmt = stmt.where(StatisticsMeta.statistic_id.in_(statistic_ids))
|
||||
if statistic_source is not None:
|
||||
stmt += lambda q: q.where(StatisticsMeta.source == statistic_source)
|
||||
stmt = stmt.where(StatisticsMeta.source == statistic_source)
|
||||
if statistic_type == "mean":
|
||||
stmt += lambda q: q.where(StatisticsMeta.has_mean == true())
|
||||
stmt = stmt.where(StatisticsMeta.has_mean == true())
|
||||
elif statistic_type == "sum":
|
||||
stmt += lambda q: q.where(StatisticsMeta.has_sum == true())
|
||||
stmt = stmt.where(StatisticsMeta.has_sum == true())
|
||||
return stmt
|
||||
|
||||
|
||||
@@ -723,7 +717,7 @@ def get_metadata_with_session(
|
||||
|
||||
# Fetch metatadata from the database
|
||||
stmt = _generate_get_metadata_stmt(statistic_ids, statistic_type, statistic_source)
|
||||
result = execute_stmt_lambda_element(session, stmt)
|
||||
result = execute_stmt(session, stmt)
|
||||
if not result:
|
||||
return {}
|
||||
|
||||
@@ -984,29 +978,31 @@ def _reduce_statistics_per_month(
|
||||
def _statistics_during_period_stmt(
|
||||
start_time: datetime,
|
||||
end_time: datetime | None,
|
||||
statistic_ids: list[str] | None,
|
||||
metadata_ids: list[int] | None,
|
||||
table: type[Statistics | StatisticsShortTerm],
|
||||
) -> StatementLambdaElement:
|
||||
"""Prepare a database query for statistics during a given period.
|
||||
|
||||
This prepares a lambda_stmt query, so we don't insert the parameters yet.
|
||||
"""
|
||||
if table == StatisticsShortTerm:
|
||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SHORT_TERM))
|
||||
else:
|
||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS))
|
||||
|
||||
stmt += lambda q: q.filter(table.start >= start_time)
|
||||
|
||||
) -> Select:
|
||||
"""Prepare a database query for statistics during a given period."""
|
||||
stmt = select(*QUERY_STATISTICS).filter(Statistics.start >= start_time)
|
||||
if end_time is not None:
|
||||
stmt += lambda q: q.filter(table.start < end_time)
|
||||
stmt = stmt.filter(Statistics.start < end_time)
|
||||
if metadata_ids:
|
||||
stmt = stmt.filter(Statistics.metadata_id.in_(metadata_ids))
|
||||
return stmt.order_by(Statistics.metadata_id, Statistics.start)
|
||||
|
||||
if statistic_ids is not None:
|
||||
stmt += lambda q: q.filter(table.metadata_id.in_(metadata_ids))
|
||||
|
||||
stmt += lambda q: q.order_by(table.metadata_id, table.start)
|
||||
return stmt
|
||||
def _statistics_during_period_stmt_short_term(
|
||||
start_time: datetime,
|
||||
end_time: datetime | None,
|
||||
metadata_ids: list[int] | None,
|
||||
) -> Select:
|
||||
"""Prepare a database query for short term statistics during a given period."""
|
||||
stmt = select(*QUERY_STATISTICS_SHORT_TERM).filter(
|
||||
StatisticsShortTerm.start >= start_time
|
||||
)
|
||||
if end_time is not None:
|
||||
stmt = stmt.filter(StatisticsShortTerm.start < end_time)
|
||||
if metadata_ids:
|
||||
stmt = stmt.filter(StatisticsShortTerm.metadata_id.in_(metadata_ids))
|
||||
return stmt.order_by(StatisticsShortTerm.metadata_id, StatisticsShortTerm.start)
|
||||
|
||||
|
||||
def statistics_during_period(
|
||||
@@ -1035,13 +1031,13 @@ def statistics_during_period(
|
||||
|
||||
if period == "5minute":
|
||||
table = StatisticsShortTerm
|
||||
stmt = _statistics_during_period_stmt_short_term(
|
||||
start_time, end_time, metadata_ids
|
||||
)
|
||||
else:
|
||||
table = Statistics
|
||||
|
||||
stmt = _statistics_during_period_stmt(
|
||||
start_time, end_time, statistic_ids, metadata_ids, table
|
||||
)
|
||||
stats = execute_stmt_lambda_element(session, stmt)
|
||||
stmt = _statistics_during_period_stmt(start_time, end_time, metadata_ids)
|
||||
stats = execute_stmt(session, stmt)
|
||||
|
||||
if not stats:
|
||||
return {}
|
||||
@@ -1072,19 +1068,27 @@ def statistics_during_period(
|
||||
def _get_last_statistics_stmt(
|
||||
metadata_id: int,
|
||||
number_of_stats: int,
|
||||
table: type[Statistics | StatisticsShortTerm],
|
||||
) -> StatementLambdaElement:
|
||||
) -> Select:
|
||||
"""Generate a statement for number_of_stats statistics for a given statistic_id."""
|
||||
if table == StatisticsShortTerm:
|
||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SHORT_TERM))
|
||||
else:
|
||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS))
|
||||
stmt += (
|
||||
lambda q: q.filter_by(metadata_id=metadata_id)
|
||||
.order_by(table.metadata_id, table.start.desc())
|
||||
return (
|
||||
select(*QUERY_STATISTICS)
|
||||
.filter_by(metadata_id=metadata_id)
|
||||
.order_by(Statistics.metadata_id, Statistics.start.desc())
|
||||
.limit(number_of_stats)
|
||||
)
|
||||
|
||||
|
||||
def _get_last_statistics_short_term_stmt(
|
||||
metadata_id: int,
|
||||
number_of_stats: int,
|
||||
) -> Select:
|
||||
"""Generate a statement for number_of_stats short term statistics for a given statistic_id."""
|
||||
return (
|
||||
select(*QUERY_STATISTICS_SHORT_TERM)
|
||||
.filter_by(metadata_id=metadata_id)
|
||||
.order_by(StatisticsShortTerm.metadata_id, StatisticsShortTerm.start.desc())
|
||||
.limit(number_of_stats)
|
||||
)
|
||||
return stmt
|
||||
|
||||
|
||||
def _get_last_statistics(
|
||||
@@ -1102,8 +1106,11 @@ def _get_last_statistics(
|
||||
if not metadata:
|
||||
return {}
|
||||
metadata_id = metadata[statistic_id][0]
|
||||
stmt = _get_last_statistics_stmt(metadata_id, number_of_stats, table)
|
||||
stats = execute_stmt_lambda_element(session, stmt)
|
||||
if table == Statistics:
|
||||
stmt = _get_last_statistics_stmt(metadata_id, number_of_stats)
|
||||
else:
|
||||
stmt = _get_last_statistics_short_term_stmt(metadata_id, number_of_stats)
|
||||
stats = execute_stmt(session, stmt)
|
||||
|
||||
if not stats:
|
||||
return {}
|
||||
@@ -1139,12 +1146,9 @@ def get_last_short_term_statistics(
|
||||
)
|
||||
|
||||
|
||||
def _latest_short_term_statistics_stmt(
|
||||
metadata_ids: list[int],
|
||||
) -> StatementLambdaElement:
|
||||
"""Create the statement for finding the latest short term stat rows."""
|
||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SHORT_TERM))
|
||||
most_recent_statistic_row = (
|
||||
def _generate_most_recent_statistic_row(metadata_ids: list[int]) -> Subquery:
|
||||
"""Generate the subquery to find the most recent statistic row."""
|
||||
return (
|
||||
select(
|
||||
StatisticsShortTerm.metadata_id,
|
||||
func.max(StatisticsShortTerm.start).label("start_max"),
|
||||
@@ -1152,7 +1156,15 @@ def _latest_short_term_statistics_stmt(
|
||||
.where(StatisticsShortTerm.metadata_id.in_(metadata_ids))
|
||||
.group_by(StatisticsShortTerm.metadata_id)
|
||||
).subquery()
|
||||
stmt += lambda s: s.join(
|
||||
|
||||
|
||||
def _latest_short_term_statistics_stmt(
|
||||
metadata_ids: list[int],
|
||||
) -> Select:
|
||||
"""Create the statement for finding the latest short term stat rows."""
|
||||
stmt = select(*QUERY_STATISTICS_SHORT_TERM)
|
||||
most_recent_statistic_row = _generate_most_recent_statistic_row(metadata_ids)
|
||||
return stmt.join(
|
||||
most_recent_statistic_row,
|
||||
(
|
||||
StatisticsShortTerm.metadata_id # pylint: disable=comparison-with-callable
|
||||
@@ -1160,7 +1172,6 @@ def _latest_short_term_statistics_stmt(
|
||||
)
|
||||
& (StatisticsShortTerm.start == most_recent_statistic_row.c.start_max),
|
||||
)
|
||||
return stmt
|
||||
|
||||
|
||||
def get_latest_short_term_statistics(
|
||||
@@ -1183,7 +1194,7 @@ def get_latest_short_term_statistics(
|
||||
if statistic_id in metadata
|
||||
]
|
||||
stmt = _latest_short_term_statistics_stmt(metadata_ids)
|
||||
stats = execute_stmt_lambda_element(session, stmt)
|
||||
stats = execute_stmt(session, stmt)
|
||||
if not stats:
|
||||
return {}
|
||||
|
||||
|
||||
@@ -5,15 +5,18 @@ from sqlalchemy import text
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
|
||||
def db_size_bytes(session: Session, database_name: str) -> float:
|
||||
def db_size_bytes(session: Session, database_name: str) -> float | None:
|
||||
"""Get the mysql database size."""
|
||||
return float(
|
||||
session.execute(
|
||||
text(
|
||||
"SELECT ROUND(SUM(DATA_LENGTH + INDEX_LENGTH), 2) "
|
||||
"FROM information_schema.TABLES WHERE "
|
||||
"TABLE_SCHEMA=:database_name"
|
||||
),
|
||||
{"database_name": database_name},
|
||||
).first()[0]
|
||||
)
|
||||
size = session.execute(
|
||||
text(
|
||||
"SELECT ROUND(SUM(DATA_LENGTH + INDEX_LENGTH), 2) "
|
||||
"FROM information_schema.TABLES WHERE "
|
||||
"TABLE_SCHEMA=:database_name"
|
||||
),
|
||||
{"database_name": database_name},
|
||||
).first()[0]
|
||||
|
||||
if size is None:
|
||||
return None
|
||||
|
||||
return float(size)
|
||||
|
||||
@@ -22,7 +22,6 @@ from sqlalchemy.engine.row import Row
|
||||
from sqlalchemy.exc import OperationalError, SQLAlchemyError
|
||||
from sqlalchemy.orm.query import Query
|
||||
from sqlalchemy.orm.session import Session
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from typing_extensions import Concatenate, ParamSpec
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -167,9 +166,9 @@ def execute(
|
||||
assert False # unreachable # pragma: no cover
|
||||
|
||||
|
||||
def execute_stmt_lambda_element(
|
||||
def execute_stmt(
|
||||
session: Session,
|
||||
stmt: StatementLambdaElement,
|
||||
query: Query,
|
||||
start_time: datetime | None = None,
|
||||
end_time: datetime | None = None,
|
||||
yield_per: int | None = DEFAULT_YIELD_STATES_ROWS,
|
||||
@@ -185,11 +184,12 @@ def execute_stmt_lambda_element(
|
||||
specific entities) since they are usually faster
|
||||
with .all().
|
||||
"""
|
||||
executed = session.execute(stmt)
|
||||
use_all = not start_time or ((end_time or dt_util.utcnow()) - start_time).days <= 1
|
||||
for tryno in range(0, RETRIES):
|
||||
try:
|
||||
return executed.all() if use_all else executed.yield_per(yield_per) # type: ignore[no-any-return]
|
||||
if use_all:
|
||||
return session.execute(query).all() # type: ignore[no-any-return]
|
||||
return session.execute(query).yield_per(yield_per) # type: ignore[no-any-return]
|
||||
except SQLAlchemyError as err:
|
||||
_LOGGER.error("Error executing query: %s", err)
|
||||
if tryno == RETRIES - 1:
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"samsungctl[websocket]==0.7.1",
|
||||
"samsungtvws[async,encrypted]==2.5.0",
|
||||
"wakeonlan==2.0.1",
|
||||
"async-upnp-client==0.30.1"
|
||||
"async-upnp-client==0.31.1"
|
||||
],
|
||||
"ssdp": [
|
||||
{
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "SimpliSafe",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/simplisafe",
|
||||
"requirements": ["simplisafe-python==2022.05.2"],
|
||||
"requirements": ["simplisafe-python==2022.06.0"],
|
||||
"codeowners": ["@bachya"],
|
||||
"iot_class": "cloud_polling",
|
||||
"dhcp": [
|
||||
|
||||
@@ -205,13 +205,15 @@ class SonosMedia:
|
||||
self, position_info: dict[str, int], force_update: bool = False
|
||||
) -> None:
|
||||
"""Update state when playing music tracks."""
|
||||
if (duration := position_info.get(DURATION_SECONDS)) == 0:
|
||||
duration = position_info.get(DURATION_SECONDS)
|
||||
current_position = position_info.get(POSITION_SECONDS)
|
||||
|
||||
if not (duration or current_position):
|
||||
self.clear_position()
|
||||
return
|
||||
|
||||
should_update = force_update
|
||||
self.duration = duration
|
||||
current_position = position_info.get(POSITION_SECONDS)
|
||||
|
||||
# player started reporting position?
|
||||
if current_position is not None and self.position is None:
|
||||
|
||||
@@ -25,7 +25,6 @@ from homeassistant.components.media_player import (
|
||||
)
|
||||
from homeassistant.components.media_player.const import (
|
||||
ATTR_INPUT_SOURCE,
|
||||
ATTR_MEDIA_ANNOUNCE,
|
||||
ATTR_MEDIA_ENQUEUE,
|
||||
MEDIA_TYPE_ALBUM,
|
||||
MEDIA_TYPE_ARTIST,
|
||||
@@ -544,9 +543,6 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
||||
"""
|
||||
# Use 'replace' as the default enqueue option
|
||||
enqueue = kwargs.get(ATTR_MEDIA_ENQUEUE, MediaPlayerEnqueue.REPLACE)
|
||||
if kwargs.get(ATTR_MEDIA_ANNOUNCE):
|
||||
# Temporary workaround until announce support is added
|
||||
enqueue = MediaPlayerEnqueue.PLAY
|
||||
|
||||
if spotify.is_spotify_media_type(media_type):
|
||||
media_type = spotify.resolve_spotify_media_type(media_type)
|
||||
@@ -755,17 +751,23 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
||||
media_content_type,
|
||||
)
|
||||
|
||||
def join_players(self, group_members):
|
||||
async def async_join_players(self, group_members):
|
||||
"""Join `group_members` as a player group with the current player."""
|
||||
speakers = []
|
||||
for entity_id in group_members:
|
||||
if speaker := self.hass.data[DATA_SONOS].entity_id_mappings.get(entity_id):
|
||||
speakers.append(speaker)
|
||||
else:
|
||||
raise HomeAssistantError(f"Not a known Sonos entity_id: {entity_id}")
|
||||
async with self.hass.data[DATA_SONOS].topology_condition:
|
||||
speakers = []
|
||||
for entity_id in group_members:
|
||||
if speaker := self.hass.data[DATA_SONOS].entity_id_mappings.get(
|
||||
entity_id
|
||||
):
|
||||
speakers.append(speaker)
|
||||
else:
|
||||
raise HomeAssistantError(
|
||||
f"Not a known Sonos entity_id: {entity_id}"
|
||||
)
|
||||
|
||||
self.speaker.join(speakers)
|
||||
await self.hass.async_add_executor_job(self.speaker.join, speakers)
|
||||
|
||||
def unjoin_player(self):
|
||||
async def async_unjoin_player(self):
|
||||
"""Remove this player from any group."""
|
||||
self.speaker.unjoin()
|
||||
async with self.hass.data[DATA_SONOS].topology_condition:
|
||||
await self.hass.async_add_executor_job(self.speaker.unjoin)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "ssdp",
|
||||
"name": "Simple Service Discovery Protocol (SSDP)",
|
||||
"documentation": "https://www.home-assistant.io/integrations/ssdp",
|
||||
"requirements": ["async-upnp-client==0.30.1"],
|
||||
"requirements": ["async-upnp-client==0.31.1"],
|
||||
"dependencies": ["network"],
|
||||
"after_dependencies": ["zeroconf"],
|
||||
"codeowners": [],
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Support for balance data via the Starling Bank API."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import requests
|
||||
@@ -26,6 +27,7 @@ DEFAULT_SANDBOX = False
|
||||
DEFAULT_ACCOUNT_NAME = "Starling"
|
||||
|
||||
ICON = "mdi:currency-gbp"
|
||||
SCAN_INTERVAL = timedelta(seconds=180)
|
||||
|
||||
ACCOUNT_SCHEMA = vol.Schema(
|
||||
{
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from synology_dsm import SynologyDSM
|
||||
@@ -98,7 +97,7 @@ class SynoApi:
|
||||
self._async_setup_api_requests()
|
||||
|
||||
await self._hass.async_add_executor_job(self._fetch_device_configuration)
|
||||
await self.async_update()
|
||||
await self.async_update(first_setup=True)
|
||||
|
||||
@callback
|
||||
def subscribe(self, api_key: str, unique_id: str) -> Callable[[], None]:
|
||||
@@ -251,7 +250,7 @@ class SynoApi:
|
||||
# ignore API errors during logout
|
||||
pass
|
||||
|
||||
async def async_update(self, now: timedelta | None = None) -> None:
|
||||
async def async_update(self, first_setup: bool = False) -> None:
|
||||
"""Update function for updating API information."""
|
||||
LOGGER.debug("Start data update for '%s'", self._entry.unique_id)
|
||||
self._async_setup_api_requests()
|
||||
@@ -259,14 +258,22 @@ class SynoApi:
|
||||
await self._hass.async_add_executor_job(
|
||||
self.dsm.update, self._with_information
|
||||
)
|
||||
except (SynologyDSMLoginFailedException, SynologyDSMRequestException) as err:
|
||||
LOGGER.warning(
|
||||
"Connection error during update, fallback by reloading the entry"
|
||||
)
|
||||
except (
|
||||
SynologyDSMLoginFailedException,
|
||||
SynologyDSMRequestException,
|
||||
SynologyDSMAPIErrorException,
|
||||
) as err:
|
||||
LOGGER.debug(
|
||||
"Connection error during update of '%s' with exception: %s",
|
||||
self._entry.unique_id,
|
||||
err,
|
||||
)
|
||||
|
||||
if first_setup:
|
||||
raise err
|
||||
|
||||
LOGGER.warning(
|
||||
"Connection error during update, fallback by reloading the entry"
|
||||
)
|
||||
await self._hass.config_entries.async_reload(self._entry.entry_id)
|
||||
return
|
||||
|
||||
@@ -198,13 +198,16 @@ class TomorrowioWeatherEntity(TomorrowioEntity, WeatherEntity):
|
||||
max_forecasts = MAX_FORECASTS[self.forecast_type]
|
||||
forecast_count = 0
|
||||
|
||||
# Convert utcnow to local to be compatible with tests
|
||||
today = dt_util.as_local(dt_util.utcnow()).date()
|
||||
|
||||
# Set default values (in cases where keys don't exist), None will be
|
||||
# returned. Override properties per forecast type as needed
|
||||
for forecast in raw_forecasts:
|
||||
forecast_dt = dt_util.parse_datetime(forecast[TMRW_ATTR_TIMESTAMP])
|
||||
|
||||
# Throw out past data
|
||||
if forecast_dt.date() < dt_util.utcnow().date():
|
||||
if dt_util.as_local(forecast_dt).date() < today:
|
||||
continue
|
||||
|
||||
values = forecast["values"]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user