mirror of
https://github.com/home-assistant/core.git
synced 2026-01-07 08:07:16 +01:00
Compare commits
52 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0d31d94532 | ||
|
|
4678466560 | ||
|
|
a886c6110d | ||
|
|
33f282af46 | ||
|
|
2f3232f087 | ||
|
|
54ff6ddd41 | ||
|
|
eef79e2912 | ||
|
|
93aad108a7 | ||
|
|
792ebbb600 | ||
|
|
c47774e273 | ||
|
|
22bdeab1e7 | ||
|
|
ca05cde6ba | ||
|
|
1e59ce19f5 | ||
|
|
7bdada7898 | ||
|
|
06a2fe94d3 | ||
|
|
854b0dbb2d | ||
|
|
bd8424d184 | ||
|
|
b50e3d5ce7 | ||
|
|
39c6a57c35 | ||
|
|
c0482bdbfd | ||
|
|
d9a41d10ff | ||
|
|
b401f16583 | ||
|
|
373634cc50 | ||
|
|
10fb3035d6 | ||
|
|
f3136c811c | ||
|
|
085eee88c9 | ||
|
|
6a3b74adf6 | ||
|
|
5c512ad5cb | ||
|
|
65cb82765b | ||
|
|
9f8fe7fca6 | ||
|
|
73536c07d7 | ||
|
|
e0ca5bafda | ||
|
|
ddc8c0a3b7 | ||
|
|
ff687a8248 | ||
|
|
0d9330c39e | ||
|
|
69e8f5bb98 | ||
|
|
6a8a97b57c | ||
|
|
f5e0363117 | ||
|
|
f1bcfedf84 | ||
|
|
0e985284c9 | ||
|
|
12e6f143a4 | ||
|
|
2b77db2597 | ||
|
|
a4297c0411 | ||
|
|
54b94c4826 | ||
|
|
b28b204b86 | ||
|
|
8558ea2f9a | ||
|
|
01b3da1554 | ||
|
|
880590da64 | ||
|
|
b74bd1aa0a | ||
|
|
caa79d8462 | ||
|
|
9295cc4df9 | ||
|
|
24e148ab8e |
@@ -26,7 +26,7 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up BAF fan auto comfort."""
|
||||
data: BAFData = hass.data[DOMAIN][entry.entry_id]
|
||||
if data.device.has_fan:
|
||||
if data.device.has_fan and data.device.has_auto_comfort:
|
||||
async_add_entities(
|
||||
[BAFAutoComfort(data.device, f"{data.device.name} Auto Comfort")]
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Big Ass Fans",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/baf",
|
||||
"requirements": ["aiobafi6==0.3.0"],
|
||||
"requirements": ["aiobafi6==0.5.0"],
|
||||
"codeowners": ["@bdraco", "@jfroy"],
|
||||
"iot_class": "local_push",
|
||||
"zeroconf": [
|
||||
|
||||
@@ -36,27 +36,7 @@ class BAFNumberDescription(NumberEntityDescription, BAFNumberDescriptionMixin):
|
||||
"""Class describing BAF sensor entities."""
|
||||
|
||||
|
||||
FAN_NUMBER_DESCRIPTIONS = (
|
||||
BAFNumberDescription(
|
||||
key="return_to_auto_timeout",
|
||||
name="Return to Auto Timeout",
|
||||
min_value=ONE_MIN_SECS,
|
||||
max_value=HALF_DAY_SECS,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
unit_of_measurement=TIME_SECONDS,
|
||||
value_fn=lambda device: cast(Optional[int], device.return_to_auto_timeout),
|
||||
mode=NumberMode.SLIDER,
|
||||
),
|
||||
BAFNumberDescription(
|
||||
key="motion_sense_timeout",
|
||||
name="Motion Sense Timeout",
|
||||
min_value=ONE_MIN_SECS,
|
||||
max_value=ONE_DAY_SECS,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
unit_of_measurement=TIME_SECONDS,
|
||||
value_fn=lambda device: cast(Optional[int], device.motion_sense_timeout),
|
||||
mode=NumberMode.SLIDER,
|
||||
),
|
||||
AUTO_COMFORT_NUMBER_DESCRIPTIONS = (
|
||||
BAFNumberDescription(
|
||||
key="comfort_min_speed",
|
||||
name="Auto Comfort Minimum Speed",
|
||||
@@ -86,6 +66,29 @@ FAN_NUMBER_DESCRIPTIONS = (
|
||||
),
|
||||
)
|
||||
|
||||
FAN_NUMBER_DESCRIPTIONS = (
|
||||
BAFNumberDescription(
|
||||
key="return_to_auto_timeout",
|
||||
name="Return to Auto Timeout",
|
||||
min_value=ONE_MIN_SECS,
|
||||
max_value=HALF_DAY_SECS,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
unit_of_measurement=TIME_SECONDS,
|
||||
value_fn=lambda device: cast(Optional[int], device.return_to_auto_timeout),
|
||||
mode=NumberMode.SLIDER,
|
||||
),
|
||||
BAFNumberDescription(
|
||||
key="motion_sense_timeout",
|
||||
name="Motion Sense Timeout",
|
||||
min_value=ONE_MIN_SECS,
|
||||
max_value=ONE_DAY_SECS,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
unit_of_measurement=TIME_SECONDS,
|
||||
value_fn=lambda device: cast(Optional[int], device.motion_sense_timeout),
|
||||
mode=NumberMode.SLIDER,
|
||||
),
|
||||
)
|
||||
|
||||
LIGHT_NUMBER_DESCRIPTIONS = (
|
||||
BAFNumberDescription(
|
||||
key="light_return_to_auto_timeout",
|
||||
@@ -125,6 +128,8 @@ async def async_setup_entry(
|
||||
descriptions.extend(FAN_NUMBER_DESCRIPTIONS)
|
||||
if device.has_light:
|
||||
descriptions.extend(LIGHT_NUMBER_DESCRIPTIONS)
|
||||
if device.has_auto_comfort:
|
||||
descriptions.extend(AUTO_COMFORT_NUMBER_DESCRIPTIONS)
|
||||
async_add_entities(BAFNumber(device, description) for description in descriptions)
|
||||
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ class BAFSensorDescription(
|
||||
"""Class describing BAF sensor entities."""
|
||||
|
||||
|
||||
BASE_SENSORS = (
|
||||
AUTO_COMFORT_SENSORS = (
|
||||
BAFSensorDescription(
|
||||
key="temperature",
|
||||
name="Temperature",
|
||||
@@ -103,10 +103,12 @@ async def async_setup_entry(
|
||||
"""Set up BAF fan sensors."""
|
||||
data: BAFData = hass.data[DOMAIN][entry.entry_id]
|
||||
device = data.device
|
||||
sensors_descriptions = list(BASE_SENSORS)
|
||||
sensors_descriptions: list[BAFSensorDescription] = []
|
||||
for description in DEFINED_ONLY_SENSORS:
|
||||
if getattr(device, description.key):
|
||||
sensors_descriptions.append(description)
|
||||
if device.has_auto_comfort:
|
||||
sensors_descriptions.extend(AUTO_COMFORT_SENSORS)
|
||||
if device.has_fan:
|
||||
sensors_descriptions.extend(FAN_SENSORS)
|
||||
async_add_entities(
|
||||
|
||||
@@ -48,13 +48,16 @@ BASE_SWITCHES = [
|
||||
),
|
||||
]
|
||||
|
||||
FAN_SWITCHES = [
|
||||
AUTO_COMFORT_SWITCHES = [
|
||||
BAFSwitchDescription(
|
||||
key="comfort_heat_assist_enable",
|
||||
name="Auto Comfort Heat Assist",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
value_fn=lambda device: cast(Optional[bool], device.comfort_heat_assist_enable),
|
||||
),
|
||||
]
|
||||
|
||||
FAN_SWITCHES = [
|
||||
BAFSwitchDescription(
|
||||
key="fan_beep_enable",
|
||||
name="Beep",
|
||||
@@ -120,6 +123,8 @@ async def async_setup_entry(
|
||||
descriptions.extend(FAN_SWITCHES)
|
||||
if device.has_light:
|
||||
descriptions.extend(LIGHT_SWITCHES)
|
||||
if device.has_auto_comfort:
|
||||
descriptions.extend(AUTO_COMFORT_SWITCHES)
|
||||
async_add_entities(BAFSwitch(device, description) for description in descriptions)
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "bmw_connected_drive",
|
||||
"name": "BMW Connected Drive",
|
||||
"documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive",
|
||||
"requirements": ["bimmer_connected==0.9.3"],
|
||||
"requirements": ["bimmer_connected==0.9.4"],
|
||||
"codeowners": ["@gerard33", "@rikroe"],
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling",
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for WebDav Calendar."""
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
import re
|
||||
@@ -143,15 +142,13 @@ class WebDavCalendarEntity(CalendarEntity):
|
||||
def update(self):
|
||||
"""Update event data."""
|
||||
self.data.update()
|
||||
event = copy.deepcopy(self.data.event)
|
||||
if event is None:
|
||||
self._event = event
|
||||
return
|
||||
(summary, offset) = extract_offset(event.summary, OFFSET)
|
||||
event.summary = summary
|
||||
self._event = event
|
||||
self._event = self.data.event
|
||||
self._attr_extra_state_attributes = {
|
||||
"offset_reached": is_offset_reached(event.start_datetime_local, offset)
|
||||
"offset_reached": is_offset_reached(
|
||||
self._event.start_datetime_local, self.data.offset
|
||||
)
|
||||
if self._event
|
||||
else False
|
||||
}
|
||||
|
||||
|
||||
@@ -165,6 +162,7 @@ class WebDavCalendarData:
|
||||
self.include_all_day = include_all_day
|
||||
self.search = search
|
||||
self.event = None
|
||||
self.offset = None
|
||||
|
||||
async def async_get_events(
|
||||
self, hass: HomeAssistant, start_date: datetime, end_date: datetime
|
||||
@@ -264,13 +262,15 @@ class WebDavCalendarData:
|
||||
return
|
||||
|
||||
# Populate the entity attributes with the event values
|
||||
(summary, offset) = extract_offset(vevent.summary.value, OFFSET)
|
||||
self.event = CalendarEvent(
|
||||
summary=vevent.summary.value,
|
||||
summary=summary,
|
||||
start=vevent.dtstart.value,
|
||||
end=self.get_end_date(vevent),
|
||||
location=self.get_attr_value(vevent, "location"),
|
||||
description=self.get_attr_value(vevent, "description"),
|
||||
)
|
||||
self.offset = offset
|
||||
|
||||
@staticmethod
|
||||
def is_matching(vevent, search):
|
||||
|
||||
@@ -39,7 +39,6 @@ class CloudGoogleConfig(AbstractConfig):
|
||||
self._cur_entity_prefs = self._prefs.google_entity_configs
|
||||
self._cur_default_expose = self._prefs.google_default_expose
|
||||
self._sync_entities_lock = asyncio.Lock()
|
||||
self._sync_on_started = False
|
||||
|
||||
@property
|
||||
def enabled(self):
|
||||
@@ -224,7 +223,7 @@ class CloudGoogleConfig(AbstractConfig):
|
||||
self._cur_entity_prefs = prefs.google_entity_configs
|
||||
self._cur_default_expose = prefs.google_default_expose
|
||||
|
||||
if sync_entities:
|
||||
if sync_entities and self.hass.is_running:
|
||||
await self.async_sync_entities_all()
|
||||
|
||||
@callback
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from enum import Enum
|
||||
import logging
|
||||
import re
|
||||
from types import MappingProxyType
|
||||
@@ -481,7 +482,10 @@ class ElkEntity(Entity):
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the default attributes of the element."""
|
||||
return {**self._element.as_dict(), **self.initial_attrs()}
|
||||
dict_as_str = {}
|
||||
for key, val in self._element.as_dict().items():
|
||||
dict_as_str[key] = val.value if isinstance(val, Enum) else val
|
||||
return {**dict_as_str, **self.initial_attrs()}
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
|
||||
@@ -46,6 +46,8 @@ class FibaroCover(FibaroDevice, CoverEntity):
|
||||
self._attr_supported_features = (
|
||||
CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
|
||||
)
|
||||
if "stop" in self.fibaro_device.actions:
|
||||
self._attr_supported_features |= CoverEntityFeature.STOP
|
||||
|
||||
@staticmethod
|
||||
def bound(position):
|
||||
|
||||
@@ -9,7 +9,7 @@ import logging
|
||||
from bleak import BleakScanner
|
||||
from bleak.backends.device import BLEDevice
|
||||
from bleak.backends.scanner import AdvertisementData
|
||||
from fjaraskupan import DEVICE_NAME, Device, State, device_filter
|
||||
from fjaraskupan import Device, State, device_filter
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
@@ -90,7 +90,7 @@ class EntryState:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Fjäråskupan from a config entry."""
|
||||
|
||||
scanner = BleakScanner(filters={"Pattern": DEVICE_NAME, "DuplicateData": True})
|
||||
scanner = BleakScanner(filters={"DuplicateData": True})
|
||||
|
||||
state = EntryState(scanner, {})
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
|
||||
@@ -7,7 +7,7 @@ import async_timeout
|
||||
from bleak import BleakScanner
|
||||
from bleak.backends.device import BLEDevice
|
||||
from bleak.backends.scanner import AdvertisementData
|
||||
from fjaraskupan import DEVICE_NAME, device_filter
|
||||
from fjaraskupan import device_filter
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.config_entry_flow import register_discovery_flow
|
||||
@@ -28,7 +28,7 @@ async def _async_has_devices(hass: HomeAssistant) -> bool:
|
||||
|
||||
async with BleakScanner(
|
||||
detection_callback=detection,
|
||||
filters={"Pattern": DEVICE_NAME, "DuplicateData": True},
|
||||
filters={"DuplicateData": True},
|
||||
):
|
||||
try:
|
||||
async with async_timeout.timeout(CONST_WAIT_TIME):
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "frontend",
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"requirements": ["home-assistant-frontend==20220531.0"],
|
||||
"requirements": ["home-assistant-frontend==20220601.0"],
|
||||
"dependencies": [
|
||||
"api",
|
||||
"auth",
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
from collections.abc import Awaitable, Callable
|
||||
import datetime
|
||||
import logging
|
||||
import time
|
||||
from typing import Any, cast
|
||||
|
||||
import aiohttp
|
||||
@@ -50,12 +49,16 @@ class DeviceAuth(AuthImplementation):
|
||||
async def async_resolve_external_data(self, external_data: Any) -> dict:
|
||||
"""Resolve a Google API Credentials object to Home Assistant token."""
|
||||
creds: Credentials = external_data[DEVICE_AUTH_CREDS]
|
||||
delta = creds.token_expiry.replace(tzinfo=datetime.timezone.utc) - dt.utcnow()
|
||||
_LOGGER.debug(
|
||||
"Token expires at %s (in %s)", creds.token_expiry, delta.total_seconds()
|
||||
)
|
||||
return {
|
||||
"access_token": creds.access_token,
|
||||
"refresh_token": creds.refresh_token,
|
||||
"scope": " ".join(creds.scopes),
|
||||
"token_type": "Bearer",
|
||||
"expires_in": creds.token_expiry.timestamp() - time.time(),
|
||||
"expires_in": delta.total_seconds(),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -67,13 +67,15 @@ class HistoryStats:
|
||||
current_period_end_timestamp = floored_timestamp(current_period_end)
|
||||
previous_period_start_timestamp = floored_timestamp(previous_period_start)
|
||||
previous_period_end_timestamp = floored_timestamp(previous_period_end)
|
||||
now_timestamp = floored_timestamp(datetime.datetime.now())
|
||||
utc_now = dt_util.utcnow()
|
||||
now_timestamp = floored_timestamp(utc_now)
|
||||
|
||||
if now_timestamp < current_period_start_timestamp:
|
||||
if current_period_start > utc_now:
|
||||
# History cannot tell the future
|
||||
self._history_current_period = []
|
||||
self._previous_run_before_start = True
|
||||
|
||||
self._state = HistoryStatsState(None, None, self._period)
|
||||
return self._state
|
||||
#
|
||||
# We avoid querying the database if the below did NOT happen:
|
||||
#
|
||||
@@ -82,7 +84,7 @@ class HistoryStats:
|
||||
# - The period shrank in size
|
||||
# - The previous period ended before now
|
||||
#
|
||||
elif (
|
||||
if (
|
||||
not self._previous_run_before_start
|
||||
and current_period_start_timestamp == previous_period_start_timestamp
|
||||
and (
|
||||
@@ -117,10 +119,6 @@ class HistoryStats:
|
||||
)
|
||||
self._previous_run_before_start = False
|
||||
|
||||
if not self._history_current_period:
|
||||
self._state = HistoryStatsState(None, None, self._period)
|
||||
return self._state
|
||||
|
||||
hours_matched, match_count = self._async_compute_hours_and_changes(
|
||||
now_timestamp,
|
||||
current_period_start_timestamp,
|
||||
|
||||
@@ -75,14 +75,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Hive from a config entry."""
|
||||
|
||||
websession = aiohttp_client.async_get_clientsession(hass)
|
||||
web_session = aiohttp_client.async_get_clientsession(hass)
|
||||
hive_config = dict(entry.data)
|
||||
hive = Hive(
|
||||
websession,
|
||||
deviceGroupKey=hive_config["device_data"][0],
|
||||
deviceKey=hive_config["device_data"][1],
|
||||
devicePassword=hive_config["device_data"][2],
|
||||
)
|
||||
hive = Hive(web_session)
|
||||
|
||||
hive_config["options"] = {}
|
||||
hive_config["options"].update(
|
||||
|
||||
@@ -102,6 +102,7 @@ class HiveFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
raise UnknownHiveError
|
||||
|
||||
# Setup the config entry
|
||||
await self.hive_auth.device_registration("Home Assistant")
|
||||
self.data["tokens"] = self.tokens
|
||||
self.data["device_data"] = await self.hive_auth.getDeviceData()
|
||||
if self.context["source"] == config_entries.SOURCE_REAUTH:
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Hive",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/hive",
|
||||
"requirements": ["pyhiveapi==0.5.4"],
|
||||
"requirements": ["pyhiveapi==0.5.5"],
|
||||
"codeowners": ["@Rendili", "@KJonline"],
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["apyhiveapi"]
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "ialarm_xr",
|
||||
"name": "Antifurto365 iAlarmXR",
|
||||
"documentation": "https://www.home-assistant.io/integrations/ialarm_xr",
|
||||
"requirements": ["pyialarmxr==1.0.18"],
|
||||
"requirements": ["pyialarmxr-homeassistant==1.0.18"],
|
||||
"codeowners": ["@bigmoby"],
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling",
|
||||
|
||||
@@ -117,7 +117,7 @@ class ISYThermostatEntity(ISYNodeEntity, ClimateEntity):
|
||||
"""Return the current humidity."""
|
||||
if not (humidity := self._node.aux_properties.get(PROP_HUMIDITY)):
|
||||
return None
|
||||
if humidity == ISY_VALUE_UNKNOWN:
|
||||
if humidity.value == ISY_VALUE_UNKNOWN:
|
||||
return None
|
||||
return int(humidity.value)
|
||||
|
||||
|
||||
@@ -636,11 +636,6 @@ class KodiEntity(MediaPlayerEntity):
|
||||
|
||||
return None
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
"""Return True if entity is available."""
|
||||
return not self._connect_error
|
||||
|
||||
async def async_turn_on(self):
|
||||
"""Turn the media player on."""
|
||||
_LOGGER.debug("Firing event to turn on device")
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "LCN",
|
||||
"config_flow": false,
|
||||
"documentation": "https://www.home-assistant.io/integrations/lcn",
|
||||
"requirements": ["pypck==0.7.14"],
|
||||
"requirements": ["pypck==0.7.15"],
|
||||
"codeowners": ["@alengwenus"],
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pypck"]
|
||||
|
||||
@@ -2,9 +2,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED
|
||||
from homeassistant.components.counter import DOMAIN as COUNTER_DOMAIN
|
||||
from homeassistant.components.proximity import DOMAIN as PROXIMITY_DOMAIN
|
||||
from homeassistant.components.script import EVENT_SCRIPT_STARTED
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.const import EVENT_CALL_SERVICE, EVENT_LOGBOOK_ENTRY
|
||||
|
||||
# Domains that are always continuous
|
||||
ALWAYS_CONTINUOUS_DOMAINS = {COUNTER_DOMAIN, PROXIMITY_DOMAIN}
|
||||
|
||||
# Domains that are continuous if there is a UOM set on the entity
|
||||
CONDITIONALLY_CONTINUOUS_DOMAINS = {SENSOR_DOMAIN}
|
||||
|
||||
ATTR_MESSAGE = "message"
|
||||
|
||||
DOMAIN = "logbook"
|
||||
@@ -30,13 +39,11 @@ LOGBOOK_ENTRY_NAME = "name"
|
||||
LOGBOOK_ENTRY_STATE = "state"
|
||||
LOGBOOK_ENTRY_WHEN = "when"
|
||||
|
||||
ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED = {EVENT_LOGBOOK_ENTRY, EVENT_CALL_SERVICE}
|
||||
ENTITY_EVENTS_WITHOUT_CONFIG_ENTRY = {
|
||||
EVENT_LOGBOOK_ENTRY,
|
||||
EVENT_AUTOMATION_TRIGGERED,
|
||||
EVENT_SCRIPT_STARTED,
|
||||
}
|
||||
# Automation events that can affect an entity_id or device_id
|
||||
AUTOMATION_EVENTS = {EVENT_AUTOMATION_TRIGGERED, EVENT_SCRIPT_STARTED}
|
||||
|
||||
# Events that are built-in to the logbook or core
|
||||
BUILT_IN_EVENTS = {EVENT_LOGBOOK_ENTRY, EVENT_CALL_SERVICE}
|
||||
|
||||
LOGBOOK_FILTERS = "logbook_filters"
|
||||
LOGBOOK_ENTITIES_FILTER = "entities_filter"
|
||||
|
||||
@@ -7,6 +7,7 @@ from typing import Any
|
||||
from homeassistant.components.sensor import ATTR_STATE_CLASS
|
||||
from homeassistant.const import (
|
||||
ATTR_DEVICE_ID,
|
||||
ATTR_DOMAIN,
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_UNIT_OF_MEASUREMENT,
|
||||
EVENT_LOGBOOK_ENTRY,
|
||||
@@ -19,15 +20,13 @@ from homeassistant.core import (
|
||||
State,
|
||||
callback,
|
||||
is_callback,
|
||||
split_entity_id,
|
||||
)
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.entityfilter import EntityFilter
|
||||
from homeassistant.helpers.event import async_track_state_change_event
|
||||
|
||||
from .const import (
|
||||
ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED,
|
||||
DOMAIN,
|
||||
ENTITY_EVENTS_WITHOUT_CONFIG_ENTRY,
|
||||
)
|
||||
from .const import ALWAYS_CONTINUOUS_DOMAINS, AUTOMATION_EVENTS, BUILT_IN_EVENTS, DOMAIN
|
||||
from .models import LazyEventPartialState
|
||||
|
||||
|
||||
@@ -41,6 +40,25 @@ def async_filter_entities(hass: HomeAssistant, entity_ids: list[str]) -> list[st
|
||||
]
|
||||
|
||||
|
||||
@callback
|
||||
def _async_config_entries_for_ids(
|
||||
hass: HomeAssistant, entity_ids: list[str] | None, device_ids: list[str] | None
|
||||
) -> set[str]:
|
||||
"""Find the config entry ids for a set of entities or devices."""
|
||||
config_entry_ids: set[str] = set()
|
||||
if entity_ids:
|
||||
eng_reg = er.async_get(hass)
|
||||
for entity_id in entity_ids:
|
||||
if (entry := eng_reg.async_get(entity_id)) and entry.config_entry_id:
|
||||
config_entry_ids.add(entry.config_entry_id)
|
||||
if device_ids:
|
||||
dev_reg = dr.async_get(hass)
|
||||
for device_id in device_ids:
|
||||
if (device := dev_reg.async_get(device_id)) and device.config_entries:
|
||||
config_entry_ids |= device.config_entries
|
||||
return config_entry_ids
|
||||
|
||||
|
||||
def async_determine_event_types(
|
||||
hass: HomeAssistant, entity_ids: list[str] | None, device_ids: list[str] | None
|
||||
) -> tuple[str, ...]:
|
||||
@@ -49,42 +67,91 @@ def async_determine_event_types(
|
||||
str, tuple[str, Callable[[LazyEventPartialState], dict[str, Any]]]
|
||||
] = hass.data.get(DOMAIN, {})
|
||||
if not entity_ids and not device_ids:
|
||||
return (*ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED, *external_events)
|
||||
config_entry_ids: set[str] = set()
|
||||
intrested_event_types: set[str] = set()
|
||||
return (*BUILT_IN_EVENTS, *external_events)
|
||||
|
||||
interested_domains: set[str] = set()
|
||||
for entry_id in _async_config_entries_for_ids(hass, entity_ids, device_ids):
|
||||
if entry := hass.config_entries.async_get_entry(entry_id):
|
||||
interested_domains.add(entry.domain)
|
||||
|
||||
#
|
||||
# automations and scripts can refer to entities or devices
|
||||
# but they do not have a config entry so we need
|
||||
# to add them since we have historically included
|
||||
# them when matching only on entities
|
||||
#
|
||||
intrested_event_types: set[str] = {
|
||||
external_event
|
||||
for external_event, domain_call in external_events.items()
|
||||
if domain_call[0] in interested_domains
|
||||
} | AUTOMATION_EVENTS
|
||||
if entity_ids:
|
||||
#
|
||||
# Home Assistant doesn't allow firing events from
|
||||
# entities so we have a limited list to check
|
||||
#
|
||||
# automations and scripts can refer to entities
|
||||
# but they do not have a config entry so we need
|
||||
# to add them.
|
||||
#
|
||||
# We also allow entity_ids to be recorded via
|
||||
# manual logbook entries.
|
||||
#
|
||||
intrested_event_types |= ENTITY_EVENTS_WITHOUT_CONFIG_ENTRY
|
||||
# We also allow entity_ids to be recorded via manual logbook entries.
|
||||
intrested_event_types.add(EVENT_LOGBOOK_ENTRY)
|
||||
|
||||
if device_ids:
|
||||
dev_reg = dr.async_get(hass)
|
||||
for device_id in device_ids:
|
||||
if (device := dev_reg.async_get(device_id)) and device.config_entries:
|
||||
config_entry_ids |= device.config_entries
|
||||
interested_domains: set[str] = set()
|
||||
for entry_id in config_entry_ids:
|
||||
if entry := hass.config_entries.async_get_entry(entry_id):
|
||||
interested_domains.add(entry.domain)
|
||||
for external_event, domain_call in external_events.items():
|
||||
if domain_call[0] in interested_domains:
|
||||
intrested_event_types.add(external_event)
|
||||
return tuple(intrested_event_types)
|
||||
|
||||
return tuple(
|
||||
event_type
|
||||
for event_type in (EVENT_LOGBOOK_ENTRY, *external_events)
|
||||
if event_type in intrested_event_types
|
||||
)
|
||||
|
||||
@callback
|
||||
def extract_attr(source: dict[str, Any], attr: str) -> list[str]:
|
||||
"""Extract an attribute as a list or string."""
|
||||
if (value := source.get(attr)) is None:
|
||||
return []
|
||||
if isinstance(value, list):
|
||||
return value
|
||||
return str(value).split(",")
|
||||
|
||||
|
||||
@callback
|
||||
def event_forwarder_filtered(
|
||||
target: Callable[[Event], None],
|
||||
entities_filter: EntityFilter | None,
|
||||
entity_ids: list[str] | None,
|
||||
device_ids: list[str] | None,
|
||||
) -> Callable[[Event], None]:
|
||||
"""Make a callable to filter events."""
|
||||
if not entities_filter and not entity_ids and not device_ids:
|
||||
# No filter
|
||||
# - Script Trace (context ids)
|
||||
# - Automation Trace (context ids)
|
||||
return target
|
||||
|
||||
if entities_filter:
|
||||
# We have an entity filter:
|
||||
# - Logbook panel
|
||||
|
||||
@callback
|
||||
def _forward_events_filtered_by_entities_filter(event: Event) -> None:
|
||||
assert entities_filter is not None
|
||||
event_data = event.data
|
||||
entity_ids = extract_attr(event_data, ATTR_ENTITY_ID)
|
||||
if entity_ids and not any(
|
||||
entities_filter(entity_id) for entity_id in entity_ids
|
||||
):
|
||||
return
|
||||
domain = event_data.get(ATTR_DOMAIN)
|
||||
if domain and not entities_filter(f"{domain}._"):
|
||||
return
|
||||
target(event)
|
||||
|
||||
return _forward_events_filtered_by_entities_filter
|
||||
|
||||
# We are filtering on entity_ids and/or device_ids:
|
||||
# - Areas
|
||||
# - Devices
|
||||
# - Logbook Card
|
||||
entity_ids_set = set(entity_ids) if entity_ids else set()
|
||||
device_ids_set = set(device_ids) if device_ids else set()
|
||||
|
||||
@callback
|
||||
def _forward_events_filtered_by_device_entity_ids(event: Event) -> None:
|
||||
event_data = event.data
|
||||
if entity_ids_set.intersection(
|
||||
extract_attr(event_data, ATTR_ENTITY_ID)
|
||||
) or device_ids_set.intersection(extract_attr(event_data, ATTR_DEVICE_ID)):
|
||||
target(event)
|
||||
|
||||
return _forward_events_filtered_by_device_entity_ids
|
||||
|
||||
|
||||
@callback
|
||||
@@ -93,6 +160,7 @@ def async_subscribe_events(
|
||||
subscriptions: list[CALLBACK_TYPE],
|
||||
target: Callable[[Event], None],
|
||||
event_types: tuple[str, ...],
|
||||
entities_filter: EntityFilter | None,
|
||||
entity_ids: list[str] | None,
|
||||
device_ids: list[str] | None,
|
||||
) -> None:
|
||||
@@ -103,41 +171,31 @@ def async_subscribe_events(
|
||||
"""
|
||||
ent_reg = er.async_get(hass)
|
||||
assert is_callback(target), "target must be a callback"
|
||||
event_forwarder = target
|
||||
|
||||
if entity_ids or device_ids:
|
||||
entity_ids_set = set(entity_ids) if entity_ids else set()
|
||||
device_ids_set = set(device_ids) if device_ids else set()
|
||||
|
||||
@callback
|
||||
def _forward_events_filtered(event: Event) -> None:
|
||||
event_data = event.data
|
||||
if (
|
||||
entity_ids_set and event_data.get(ATTR_ENTITY_ID) in entity_ids_set
|
||||
) or (device_ids_set and event_data.get(ATTR_DEVICE_ID) in device_ids_set):
|
||||
target(event)
|
||||
|
||||
event_forwarder = _forward_events_filtered
|
||||
|
||||
event_forwarder = event_forwarder_filtered(
|
||||
target, entities_filter, entity_ids, device_ids
|
||||
)
|
||||
for event_type in event_types:
|
||||
subscriptions.append(
|
||||
hass.bus.async_listen(event_type, event_forwarder, run_immediately=True)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _forward_state_events_filtered(event: Event) -> None:
|
||||
if event.data.get("old_state") is None or event.data.get("new_state") is None:
|
||||
return
|
||||
state: State = event.data["new_state"]
|
||||
if not _is_state_filtered(ent_reg, state):
|
||||
target(event)
|
||||
|
||||
if device_ids and not entity_ids:
|
||||
# No entities to subscribe to but we are filtering
|
||||
# on device ids so we do not want to get any state
|
||||
# changed events
|
||||
return
|
||||
|
||||
@callback
|
||||
def _forward_state_events_filtered(event: Event) -> None:
|
||||
if event.data.get("old_state") is None or event.data.get("new_state") is None:
|
||||
return
|
||||
state: State = event.data["new_state"]
|
||||
if _is_state_filtered(ent_reg, state) or (
|
||||
entities_filter and not entities_filter(state.entity_id)
|
||||
):
|
||||
return
|
||||
target(event)
|
||||
|
||||
if entity_ids:
|
||||
subscriptions.append(
|
||||
async_track_state_change_event(
|
||||
@@ -178,7 +236,8 @@ def _is_state_filtered(ent_reg: er.EntityRegistry, state: State) -> bool:
|
||||
we only get significant changes (state.last_changed != state.last_updated)
|
||||
"""
|
||||
return bool(
|
||||
state.last_changed != state.last_updated
|
||||
split_entity_id(state.entity_id)[0] in ALWAYS_CONTINUOUS_DOMAINS
|
||||
or state.last_changed != state.last_updated
|
||||
or ATTR_UNIT_OF_MEASUREMENT in state.attributes
|
||||
or is_sensor_continuous(ent_reg, state.entity_id)
|
||||
)
|
||||
@@ -193,7 +252,8 @@ def _is_entity_id_filtered(
|
||||
from the database when a list of entities is requested.
|
||||
"""
|
||||
return bool(
|
||||
(state := hass.states.get(entity_id))
|
||||
split_entity_id(entity_id)[0] in ALWAYS_CONTINUOUS_DOMAINS
|
||||
or (state := hass.states.get(entity_id))
|
||||
and (ATTR_UNIT_OF_MEASUREMENT in state.attributes)
|
||||
or is_sensor_continuous(ent_reg, entity_id)
|
||||
)
|
||||
|
||||
@@ -5,8 +5,6 @@ from collections.abc import Callable, Generator
|
||||
from contextlib import suppress
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime as dt
|
||||
import logging
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy.engine.row import Row
|
||||
@@ -30,7 +28,6 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, split_entity_id
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entityfilter import EntityFilter
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import (
|
||||
@@ -46,7 +43,6 @@ from .const import (
|
||||
CONTEXT_STATE,
|
||||
CONTEXT_USER_ID,
|
||||
DOMAIN,
|
||||
LOGBOOK_ENTITIES_FILTER,
|
||||
LOGBOOK_ENTRY_DOMAIN,
|
||||
LOGBOOK_ENTRY_ENTITY_ID,
|
||||
LOGBOOK_ENTRY_ICON,
|
||||
@@ -62,11 +58,6 @@ from .models import EventAsRow, LazyEventPartialState, async_event_to_row
|
||||
from .queries import statement_for_request
|
||||
from .queries.common import PSUEDO_EVENT_STATE_CHANGED
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ENTITY_ID_JSON_EXTRACT = re.compile('"entity_id": ?"([^"]+)"')
|
||||
DOMAIN_JSON_EXTRACT = re.compile('"domain": ?"([^"]+)"')
|
||||
|
||||
|
||||
@dataclass
|
||||
class LogbookRun:
|
||||
@@ -106,10 +97,6 @@ class EventProcessor:
|
||||
self.device_ids = device_ids
|
||||
self.context_id = context_id
|
||||
self.filters: Filters | None = hass.data[LOGBOOK_FILTERS]
|
||||
if self.limited_select:
|
||||
self.entities_filter: EntityFilter | Callable[[str], bool] | None = None
|
||||
else:
|
||||
self.entities_filter = hass.data[LOGBOOK_ENTITIES_FILTER]
|
||||
format_time = (
|
||||
_row_time_fired_timestamp if timestamp else _row_time_fired_isoformat
|
||||
)
|
||||
@@ -183,7 +170,6 @@ class EventProcessor:
|
||||
return list(
|
||||
_humanify(
|
||||
row_generator,
|
||||
self.entities_filter,
|
||||
self.ent_reg,
|
||||
self.logbook_run,
|
||||
self.context_augmenter,
|
||||
@@ -193,7 +179,6 @@ class EventProcessor:
|
||||
|
||||
def _humanify(
|
||||
rows: Generator[Row | EventAsRow, None, None],
|
||||
entities_filter: EntityFilter | Callable[[str], bool] | None,
|
||||
ent_reg: er.EntityRegistry,
|
||||
logbook_run: LogbookRun,
|
||||
context_augmenter: ContextAugmenter,
|
||||
@@ -208,29 +193,13 @@ def _humanify(
|
||||
include_entity_name = logbook_run.include_entity_name
|
||||
format_time = logbook_run.format_time
|
||||
|
||||
def _keep_row(row: EventAsRow) -> bool:
|
||||
"""Check if the entity_filter rejects a row."""
|
||||
assert entities_filter is not None
|
||||
if entity_id := row.entity_id:
|
||||
return entities_filter(entity_id)
|
||||
if entity_id := row.data.get(ATTR_ENTITY_ID):
|
||||
return entities_filter(entity_id)
|
||||
if domain := row.data.get(ATTR_DOMAIN):
|
||||
return entities_filter(f"{domain}._")
|
||||
return True
|
||||
|
||||
# Process rows
|
||||
for row in rows:
|
||||
context_id = context_lookup.memorize(row)
|
||||
if row.context_only:
|
||||
continue
|
||||
event_type = row.event_type
|
||||
if event_type == EVENT_CALL_SERVICE or (
|
||||
entities_filter
|
||||
# We literally mean is EventAsRow not a subclass of EventAsRow
|
||||
and type(row) is EventAsRow # pylint: disable=unidiomatic-typecheck
|
||||
and not _keep_row(row)
|
||||
):
|
||||
if event_type == EVENT_CALL_SERVICE:
|
||||
continue
|
||||
if event_type is PSUEDO_EVENT_STATE_CHANGED:
|
||||
entity_id = row.entity_id
|
||||
@@ -417,12 +386,6 @@ def _rows_match(row: Row | EventAsRow, other_row: Row | EventAsRow) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def _row_event_data_extract(row: Row | EventAsRow, extractor: re.Pattern) -> str | None:
|
||||
"""Extract from event_data row."""
|
||||
result = extractor.search(row.shared_data or row.event_data or "")
|
||||
return result.group(1) if result else None
|
||||
|
||||
|
||||
def _row_time_fired_isoformat(row: Row | EventAsRow) -> str:
|
||||
"""Convert the row timed_fired to isoformat."""
|
||||
return process_timestamp_to_utc_isoformat(row.time_fired or dt_util.utcnow())
|
||||
|
||||
@@ -10,25 +10,31 @@ from sqlalchemy.sql.elements import ClauseList
|
||||
from sqlalchemy.sql.expression import literal
|
||||
from sqlalchemy.sql.selectable import Select
|
||||
|
||||
from homeassistant.components.proximity import DOMAIN as PROXIMITY_DOMAIN
|
||||
from homeassistant.components.recorder.filters import like_domain_matchers
|
||||
from homeassistant.components.recorder.models import (
|
||||
EVENTS_CONTEXT_ID_INDEX,
|
||||
OLD_FORMAT_ATTRS_JSON,
|
||||
OLD_STATE,
|
||||
SHARED_ATTRS_JSON,
|
||||
STATES_CONTEXT_ID_INDEX,
|
||||
EventData,
|
||||
Events,
|
||||
StateAttributes,
|
||||
States,
|
||||
)
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
|
||||
CONTINUOUS_DOMAINS = {PROXIMITY_DOMAIN, SENSOR_DOMAIN}
|
||||
CONTINUOUS_ENTITY_ID_LIKE = [f"{domain}.%" for domain in CONTINUOUS_DOMAINS]
|
||||
from ..const import ALWAYS_CONTINUOUS_DOMAINS, CONDITIONALLY_CONTINUOUS_DOMAINS
|
||||
|
||||
# Domains that are continuous if there is a UOM set on the entity
|
||||
CONDITIONALLY_CONTINUOUS_ENTITY_ID_LIKE = like_domain_matchers(
|
||||
CONDITIONALLY_CONTINUOUS_DOMAINS
|
||||
)
|
||||
# Domains that are always continuous
|
||||
ALWAYS_CONTINUOUS_ENTITY_ID_LIKE = like_domain_matchers(ALWAYS_CONTINUOUS_DOMAINS)
|
||||
|
||||
UNIT_OF_MEASUREMENT_JSON = '"unit_of_measurement":'
|
||||
UNIT_OF_MEASUREMENT_JSON_LIKE = f"%{UNIT_OF_MEASUREMENT_JSON}%"
|
||||
|
||||
|
||||
PSUEDO_EVENT_STATE_CHANGED = None
|
||||
# Since we don't store event_types and None
|
||||
# and we don't store state_changed in events
|
||||
@@ -121,9 +127,7 @@ def select_events_context_only() -> Select:
|
||||
By marking them as context_only we know they are only for
|
||||
linking context ids and we can avoid processing them.
|
||||
"""
|
||||
return select(*EVENT_ROWS_NO_STATES, CONTEXT_ONLY).outerjoin(
|
||||
EventData, (Events.data_id == EventData.data_id)
|
||||
)
|
||||
return select(*EVENT_ROWS_NO_STATES, CONTEXT_ONLY)
|
||||
|
||||
|
||||
def select_states_context_only() -> Select:
|
||||
@@ -220,29 +224,44 @@ def _missing_state_matcher() -> sqlalchemy.and_:
|
||||
def _not_continuous_entity_matcher() -> sqlalchemy.or_:
|
||||
"""Match non continuous entities."""
|
||||
return sqlalchemy.or_(
|
||||
_not_continuous_domain_matcher(),
|
||||
# First exclude domains that may be continuous
|
||||
_not_possible_continuous_domain_matcher(),
|
||||
# But let in the entities in the possible continuous domains
|
||||
# that are not actually continuous sensors because they lack a UOM
|
||||
sqlalchemy.and_(
|
||||
_continuous_domain_matcher, _not_uom_attributes_matcher()
|
||||
_conditionally_continuous_domain_matcher, _not_uom_attributes_matcher()
|
||||
).self_group(),
|
||||
)
|
||||
|
||||
|
||||
def _not_continuous_domain_matcher() -> sqlalchemy.and_:
|
||||
"""Match not continuous domains."""
|
||||
def _not_possible_continuous_domain_matcher() -> sqlalchemy.and_:
|
||||
"""Match not continuous domains.
|
||||
|
||||
This matches domain that are always considered continuous
|
||||
and domains that are conditionally (if they have a UOM)
|
||||
continuous domains.
|
||||
"""
|
||||
return sqlalchemy.and_(
|
||||
*[
|
||||
~States.entity_id.like(entity_domain)
|
||||
for entity_domain in CONTINUOUS_ENTITY_ID_LIKE
|
||||
for entity_domain in (
|
||||
*ALWAYS_CONTINUOUS_ENTITY_ID_LIKE,
|
||||
*CONDITIONALLY_CONTINUOUS_ENTITY_ID_LIKE,
|
||||
)
|
||||
],
|
||||
).self_group()
|
||||
|
||||
|
||||
def _continuous_domain_matcher() -> sqlalchemy.or_:
|
||||
"""Match continuous domains."""
|
||||
def _conditionally_continuous_domain_matcher() -> sqlalchemy.or_:
|
||||
"""Match conditionally continuous domains.
|
||||
|
||||
This matches domain that are only considered
|
||||
continuous if a UOM is set.
|
||||
"""
|
||||
return sqlalchemy.or_(
|
||||
*[
|
||||
States.entity_id.like(entity_domain)
|
||||
for entity_domain in CONTINUOUS_ENTITY_ID_LIKE
|
||||
for entity_domain in CONDITIONALLY_CONTINUOUS_ENTITY_ID_LIKE
|
||||
],
|
||||
).self_group()
|
||||
|
||||
@@ -252,3 +271,17 @@ def _not_uom_attributes_matcher() -> ClauseList:
|
||||
return ~StateAttributes.shared_attrs.like(
|
||||
UNIT_OF_MEASUREMENT_JSON_LIKE
|
||||
) | ~States.attributes.like(UNIT_OF_MEASUREMENT_JSON_LIKE)
|
||||
|
||||
|
||||
def apply_states_context_hints(query: Query) -> Query:
|
||||
"""Force mysql to use the right index on large context_id selects."""
|
||||
return query.with_hint(
|
||||
States, f"FORCE INDEX ({STATES_CONTEXT_ID_INDEX})", dialect_name="mysql"
|
||||
)
|
||||
|
||||
|
||||
def apply_events_context_hints(query: Query) -> Query:
|
||||
"""Force mysql to use the right index on large context_id selects."""
|
||||
return query.with_hint(
|
||||
Events, f"FORCE INDEX ({EVENTS_CONTEXT_ID_INDEX})", dialect_name="mysql"
|
||||
)
|
||||
|
||||
@@ -4,15 +4,22 @@ from __future__ import annotations
|
||||
from collections.abc import Iterable
|
||||
from datetime import datetime as dt
|
||||
|
||||
from sqlalchemy import lambda_stmt, select, union_all
|
||||
from sqlalchemy import lambda_stmt, select
|
||||
from sqlalchemy.orm import Query
|
||||
from sqlalchemy.sql.elements import ClauseList
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.sql.selectable import CTE, CompoundSelect
|
||||
|
||||
from homeassistant.components.recorder.models import DEVICE_ID_IN_EVENT, Events, States
|
||||
from homeassistant.components.recorder.models import (
|
||||
DEVICE_ID_IN_EVENT,
|
||||
EventData,
|
||||
Events,
|
||||
States,
|
||||
)
|
||||
|
||||
from .common import (
|
||||
apply_events_context_hints,
|
||||
apply_states_context_hints,
|
||||
select_events_context_id_subquery,
|
||||
select_events_context_only,
|
||||
select_events_without_states,
|
||||
@@ -27,13 +34,10 @@ def _select_device_id_context_ids_sub_query(
|
||||
json_quotable_device_ids: list[str],
|
||||
) -> CompoundSelect:
|
||||
"""Generate a subquery to find context ids for multiple devices."""
|
||||
return select(
|
||||
union_all(
|
||||
select_events_context_id_subquery(start_day, end_day, event_types).where(
|
||||
apply_event_device_id_matchers(json_quotable_device_ids)
|
||||
),
|
||||
).c.context_id
|
||||
inner = select_events_context_id_subquery(start_day, end_day, event_types).where(
|
||||
apply_event_device_id_matchers(json_quotable_device_ids)
|
||||
)
|
||||
return select(inner.c.context_id).group_by(inner.c.context_id)
|
||||
|
||||
|
||||
def _apply_devices_context_union(
|
||||
@@ -51,8 +55,16 @@ def _apply_devices_context_union(
|
||||
json_quotable_device_ids,
|
||||
).cte()
|
||||
return query.union_all(
|
||||
select_events_context_only().where(Events.context_id.in_(devices_cte.select())),
|
||||
select_states_context_only().where(States.context_id.in_(devices_cte.select())),
|
||||
apply_events_context_hints(
|
||||
select_events_context_only()
|
||||
.select_from(devices_cte)
|
||||
.outerjoin(Events, devices_cte.c.context_id == Events.context_id)
|
||||
).outerjoin(EventData, (Events.data_id == EventData.data_id)),
|
||||
apply_states_context_hints(
|
||||
select_states_context_only()
|
||||
.select_from(devices_cte)
|
||||
.outerjoin(States, devices_cte.c.context_id == States.context_id)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -14,11 +14,14 @@ from homeassistant.components.recorder.models import (
|
||||
ENTITY_ID_IN_EVENT,
|
||||
ENTITY_ID_LAST_UPDATED_INDEX,
|
||||
OLD_ENTITY_ID_IN_EVENT,
|
||||
EventData,
|
||||
Events,
|
||||
States,
|
||||
)
|
||||
|
||||
from .common import (
|
||||
apply_events_context_hints,
|
||||
apply_states_context_hints,
|
||||
apply_states_filters,
|
||||
select_events_context_id_subquery,
|
||||
select_events_context_only,
|
||||
@@ -36,16 +39,15 @@ def _select_entities_context_ids_sub_query(
|
||||
json_quotable_entity_ids: list[str],
|
||||
) -> CompoundSelect:
|
||||
"""Generate a subquery to find context ids for multiple entities."""
|
||||
return select(
|
||||
union_all(
|
||||
select_events_context_id_subquery(start_day, end_day, event_types).where(
|
||||
apply_event_entity_id_matchers(json_quotable_entity_ids)
|
||||
),
|
||||
apply_entities_hints(select(States.context_id))
|
||||
.filter((States.last_updated > start_day) & (States.last_updated < end_day))
|
||||
.where(States.entity_id.in_(entity_ids)),
|
||||
).c.context_id
|
||||
union = union_all(
|
||||
select_events_context_id_subquery(start_day, end_day, event_types).where(
|
||||
apply_event_entity_id_matchers(json_quotable_entity_ids)
|
||||
),
|
||||
apply_entities_hints(select(States.context_id))
|
||||
.filter((States.last_updated > start_day) & (States.last_updated < end_day))
|
||||
.where(States.entity_id.in_(entity_ids)),
|
||||
)
|
||||
return select(union.c.context_id).group_by(union.c.context_id)
|
||||
|
||||
|
||||
def _apply_entities_context_union(
|
||||
@@ -64,14 +66,23 @@ def _apply_entities_context_union(
|
||||
entity_ids,
|
||||
json_quotable_entity_ids,
|
||||
).cte()
|
||||
# We used to optimize this to exclude rows we already in the union with
|
||||
# a States.entity_id.not_in(entity_ids) but that made the
|
||||
# query much slower on MySQL, and since we already filter them away
|
||||
# in the python code anyways since they will have context_only
|
||||
# set on them the impact is minimal.
|
||||
return query.union_all(
|
||||
states_query_for_entity_ids(start_day, end_day, entity_ids),
|
||||
select_events_context_only().where(
|
||||
Events.context_id.in_(entities_cte.select())
|
||||
apply_events_context_hints(
|
||||
select_events_context_only()
|
||||
.select_from(entities_cte)
|
||||
.outerjoin(Events, entities_cte.c.context_id == Events.context_id)
|
||||
).outerjoin(EventData, (Events.data_id == EventData.data_id)),
|
||||
apply_states_context_hints(
|
||||
select_states_context_only()
|
||||
.select_from(entities_cte)
|
||||
.outerjoin(States, entities_cte.c.context_id == States.context_id)
|
||||
),
|
||||
select_states_context_only()
|
||||
.where(States.entity_id.not_in(entity_ids))
|
||||
.where(States.context_id.in_(entities_cte.select())),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -10,9 +10,11 @@ from sqlalchemy.orm import Query
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.sql.selectable import CTE, CompoundSelect
|
||||
|
||||
from homeassistant.components.recorder.models import Events, States
|
||||
from homeassistant.components.recorder.models import EventData, Events, States
|
||||
|
||||
from .common import (
|
||||
apply_events_context_hints,
|
||||
apply_states_context_hints,
|
||||
select_events_context_id_subquery,
|
||||
select_events_context_only,
|
||||
select_events_without_states,
|
||||
@@ -35,18 +37,17 @@ def _select_entities_device_id_context_ids_sub_query(
|
||||
json_quotable_device_ids: list[str],
|
||||
) -> CompoundSelect:
|
||||
"""Generate a subquery to find context ids for multiple entities and multiple devices."""
|
||||
return select(
|
||||
union_all(
|
||||
select_events_context_id_subquery(start_day, end_day, event_types).where(
|
||||
_apply_event_entity_id_device_id_matchers(
|
||||
json_quotable_entity_ids, json_quotable_device_ids
|
||||
)
|
||||
),
|
||||
apply_entities_hints(select(States.context_id))
|
||||
.filter((States.last_updated > start_day) & (States.last_updated < end_day))
|
||||
.where(States.entity_id.in_(entity_ids)),
|
||||
).c.context_id
|
||||
union = union_all(
|
||||
select_events_context_id_subquery(start_day, end_day, event_types).where(
|
||||
_apply_event_entity_id_device_id_matchers(
|
||||
json_quotable_entity_ids, json_quotable_device_ids
|
||||
)
|
||||
),
|
||||
apply_entities_hints(select(States.context_id))
|
||||
.filter((States.last_updated > start_day) & (States.last_updated < end_day))
|
||||
.where(States.entity_id.in_(entity_ids)),
|
||||
)
|
||||
return select(union.c.context_id).group_by(union.c.context_id)
|
||||
|
||||
|
||||
def _apply_entities_devices_context_union(
|
||||
@@ -66,14 +67,23 @@ def _apply_entities_devices_context_union(
|
||||
json_quotable_entity_ids,
|
||||
json_quotable_device_ids,
|
||||
).cte()
|
||||
# We used to optimize this to exclude rows we already in the union with
|
||||
# a States.entity_id.not_in(entity_ids) but that made the
|
||||
# query much slower on MySQL, and since we already filter them away
|
||||
# in the python code anyways since they will have context_only
|
||||
# set on them the impact is minimal.
|
||||
return query.union_all(
|
||||
states_query_for_entity_ids(start_day, end_day, entity_ids),
|
||||
select_events_context_only().where(
|
||||
Events.context_id.in_(devices_entities_cte.select())
|
||||
apply_events_context_hints(
|
||||
select_events_context_only()
|
||||
.select_from(devices_entities_cte)
|
||||
.outerjoin(Events, devices_entities_cte.c.context_id == Events.context_id)
|
||||
).outerjoin(EventData, (Events.data_id == EventData.data_id)),
|
||||
apply_states_context_hints(
|
||||
select_states_context_only()
|
||||
.select_from(devices_entities_cte)
|
||||
.outerjoin(States, devices_entities_cte.c.context_id == States.context_id)
|
||||
),
|
||||
select_states_context_only()
|
||||
.where(States.entity_id.not_in(entity_ids))
|
||||
.where(States.context_id.in_(devices_entities_cte.select())),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -16,9 +16,11 @@ from homeassistant.components.websocket_api import messages
|
||||
from homeassistant.components.websocket_api.connection import ActiveConnection
|
||||
from homeassistant.components.websocket_api.const import JSON_DUMP
|
||||
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, callback
|
||||
from homeassistant.helpers.entityfilter import EntityFilter
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import LOGBOOK_ENTITIES_FILTER
|
||||
from .helpers import (
|
||||
async_determine_event_types,
|
||||
async_filter_entities,
|
||||
@@ -67,6 +69,23 @@ async def _async_wait_for_recorder_sync(hass: HomeAssistant) -> None:
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_send_empty_response(
|
||||
connection: ActiveConnection, msg_id: int, start_time: dt, end_time: dt | None
|
||||
) -> None:
|
||||
"""Send an empty response.
|
||||
|
||||
The current case for this is when they ask for entity_ids
|
||||
that will all be filtered away because they have UOMs or
|
||||
state_class.
|
||||
"""
|
||||
connection.send_result(msg_id)
|
||||
stream_end_time = end_time or dt_util.utcnow()
|
||||
empty_stream_message = _generate_stream_message([], start_time, stream_end_time)
|
||||
empty_response = messages.event_message(msg_id, empty_stream_message)
|
||||
connection.send_message(JSON_DUMP(empty_response))
|
||||
|
||||
|
||||
async def _async_send_historical_events(
|
||||
hass: HomeAssistant,
|
||||
connection: ActiveConnection,
|
||||
@@ -171,6 +190,17 @@ async def _async_get_ws_stream_events(
|
||||
)
|
||||
|
||||
|
||||
def _generate_stream_message(
|
||||
events: list[dict[str, Any]], start_day: dt, end_day: dt
|
||||
) -> dict[str, Any]:
|
||||
"""Generate a logbook stream message response."""
|
||||
return {
|
||||
"events": events,
|
||||
"start_time": dt_util.utc_to_timestamp(start_day),
|
||||
"end_time": dt_util.utc_to_timestamp(end_day),
|
||||
}
|
||||
|
||||
|
||||
def _ws_stream_get_events(
|
||||
msg_id: int,
|
||||
start_day: dt,
|
||||
@@ -184,11 +214,7 @@ def _ws_stream_get_events(
|
||||
last_time = None
|
||||
if events:
|
||||
last_time = dt_util.utc_from_timestamp(events[-1]["when"])
|
||||
message = {
|
||||
"events": events,
|
||||
"start_time": dt_util.utc_to_timestamp(start_day),
|
||||
"end_time": dt_util.utc_to_timestamp(end_day),
|
||||
}
|
||||
message = _generate_stream_message(events, start_day, end_day)
|
||||
if partial:
|
||||
# This is a hint to consumers of the api that
|
||||
# we are about to send a another block of historical
|
||||
@@ -275,6 +301,10 @@ async def ws_event_stream(
|
||||
entity_ids = msg.get("entity_ids")
|
||||
if entity_ids:
|
||||
entity_ids = async_filter_entities(hass, entity_ids)
|
||||
if not entity_ids:
|
||||
_async_send_empty_response(connection, msg_id, start_time, end_time)
|
||||
return
|
||||
|
||||
event_types = async_determine_event_types(hass, entity_ids, device_ids)
|
||||
event_processor = EventProcessor(
|
||||
hass,
|
||||
@@ -337,8 +367,18 @@ async def ws_event_stream(
|
||||
)
|
||||
_unsub()
|
||||
|
||||
entities_filter: EntityFilter | None = None
|
||||
if not event_processor.limited_select:
|
||||
entities_filter = hass.data[LOGBOOK_ENTITIES_FILTER]
|
||||
|
||||
async_subscribe_events(
|
||||
hass, subscriptions, _queue_or_cancel, event_types, entity_ids, device_ids
|
||||
hass,
|
||||
subscriptions,
|
||||
_queue_or_cancel,
|
||||
event_types,
|
||||
entities_filter,
|
||||
entity_ids,
|
||||
device_ids,
|
||||
)
|
||||
subscriptions_setup_complete_time = dt_util.utcnow()
|
||||
connection.subscriptions[msg_id] = _unsub
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "LOOKin",
|
||||
"documentation": "https://www.home-assistant.io/integrations/lookin/",
|
||||
"codeowners": ["@ANMalko", "@bdraco"],
|
||||
"requirements": ["aiolookin==0.1.0"],
|
||||
"requirements": ["aiolookin==0.1.1"],
|
||||
"zeroconf": ["_lookin._tcp.local."],
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push",
|
||||
|
||||
@@ -110,7 +110,7 @@ def _state_schema(state):
|
||||
|
||||
PLATFORM_SCHEMA = vol.Schema(
|
||||
vol.All(
|
||||
mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend(
|
||||
mqtt.config.MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_PLATFORM): "manual_mqtt",
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_ALARM_NAME): cv.string,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,6 @@
|
||||
"""This platform enables the possibility to control a MQTT alarm."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
import logging
|
||||
import re
|
||||
@@ -31,8 +30,8 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import DEFAULT_RETAIN, MQTT_BASE_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TEMPLATE,
|
||||
CONF_COMMAND_TOPIC,
|
||||
@@ -45,11 +44,13 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate, MqttValueTemplate
|
||||
from .util import valid_publish_topic, valid_subscribe_topic
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -85,7 +86,7 @@ DEFAULT_NAME = "MQTT Alarm"
|
||||
REMOTE_CODE = "REMOTE_CODE"
|
||||
REMOTE_CODE_TEXT = "REMOTE_CODE_TEXT"
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_CODE): cv.string,
|
||||
vol.Optional(CONF_CODE_ARM_REQUIRED, default=True): cv.boolean,
|
||||
@@ -94,7 +95,7 @@ PLATFORM_SCHEMA_MODERN = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
vol.Optional(
|
||||
CONF_COMMAND_TEMPLATE, default=DEFAULT_COMMAND_TEMPLATE
|
||||
): cv.template,
|
||||
vol.Required(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Required(CONF_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_ARM_AWAY, default=DEFAULT_ARM_AWAY): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_ARM_HOME, default=DEFAULT_ARM_HOME): cv.string,
|
||||
@@ -107,8 +108,8 @@ PLATFORM_SCHEMA_MODERN = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_DISARM, default=DEFAULT_DISARM): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_TRIGGER, default=DEFAULT_TRIGGER): cv.string,
|
||||
vol.Optional(CONF_RETAIN, default=mqtt.DEFAULT_RETAIN): cv.boolean,
|
||||
vol.Required(CONF_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
vol.Required(CONF_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
|
||||
}
|
||||
).extend(MQTT_ENTITY_COMMON_SCHEMA.schema)
|
||||
@@ -131,7 +132,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT alarm control panel configured under the alarm_control_panel key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, alarm.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
alarm.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -142,13 +147,8 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT alarm control panel through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, alarm.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(hass, alarm.DOMAIN, PLATFORM_SCHEMA_MODERN)
|
||||
)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT binary sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import functools
|
||||
import logging
|
||||
@@ -34,19 +33,20 @@ from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RO_SCHEMA
|
||||
from .const import CONF_ENCODING, CONF_QOS, CONF_STATE_TOPIC, PAYLOAD_NONE
|
||||
from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttAvailability,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttValueTemplate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -57,7 +57,7 @@ DEFAULT_PAYLOAD_ON = "ON"
|
||||
DEFAULT_FORCE_UPDATE = False
|
||||
CONF_EXPIRE_AFTER = "expire_after"
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_RO_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_RO_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_EXPIRE_AFTER): cv.positive_int,
|
||||
@@ -87,7 +87,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT binary sensor configured under the fan platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, binary_sensor.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
binary_sensor.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -98,12 +102,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT binary sensor through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, binary_sensor.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, binary_sensor.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT buttons."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -15,8 +14,7 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttCommandTemplate
|
||||
from .. import mqtt
|
||||
from .config import DEFAULT_RETAIN, MQTT_BASE_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TEMPLATE,
|
||||
CONF_COMMAND_TOPIC,
|
||||
@@ -27,24 +25,26 @@ from .const import (
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate
|
||||
from .util import valid_publish_topic
|
||||
|
||||
CONF_PAYLOAD_PRESS = "payload_press"
|
||||
DEFAULT_NAME = "MQTT Button"
|
||||
DEFAULT_PAYLOAD_PRESS = "PRESS"
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Required(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Required(CONF_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_DEVICE_CLASS): button.DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_PRESS, default=DEFAULT_PAYLOAD_PRESS): cv.string,
|
||||
vol.Optional(CONF_RETAIN, default=mqtt.DEFAULT_RETAIN): cv.boolean,
|
||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
}
|
||||
).extend(MQTT_ENTITY_COMMON_SCHEMA.schema)
|
||||
|
||||
@@ -67,7 +67,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT button configured under the fan platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, button.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
button.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -78,12 +82,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT button through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, button.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, button.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Camera that loads a picture from an MQTT topic."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from base64 import b64decode
|
||||
import functools
|
||||
|
||||
@@ -17,17 +16,18 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import subscription
|
||||
from .. import mqtt
|
||||
from .config import MQTT_BASE_SCHEMA
|
||||
from .const import CONF_ENCODING, CONF_QOS, CONF_TOPIC
|
||||
from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .util import valid_subscribe_topic
|
||||
|
||||
DEFAULT_NAME = "MQTT Camera"
|
||||
|
||||
@@ -40,10 +40,10 @@ MQTT_CAMERA_ATTRIBUTES_BLOCKED = frozenset(
|
||||
}
|
||||
)
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Required(CONF_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Required(CONF_TOPIC): valid_subscribe_topic,
|
||||
}
|
||||
).extend(MQTT_ENTITY_COMMON_SCHEMA.schema)
|
||||
|
||||
@@ -65,7 +65,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT camera configured under the camera platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, camera.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
camera.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -76,12 +80,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT camera through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, camera.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, camera.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
659
homeassistant/components/mqtt/client.py
Normal file
659
homeassistant/components/mqtt/client.py
Normal file
@@ -0,0 +1,659 @@
|
||||
"""Support for MQTT message handling."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable, Callable
|
||||
from functools import lru_cache, partial, wraps
|
||||
import inspect
|
||||
from itertools import groupby
|
||||
import logging
|
||||
from operator import attrgetter
|
||||
import ssl
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Any, Union, cast
|
||||
import uuid
|
||||
|
||||
import attr
|
||||
import certifi
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_CLIENT_ID,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_PROTOCOL,
|
||||
CONF_USERNAME,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
)
|
||||
from homeassistant.core import CoreState, HassJob, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.async_ import run_callback_threadsafe
|
||||
from homeassistant.util.logging import catch_log_exception
|
||||
|
||||
from .const import (
|
||||
ATTR_TOPIC,
|
||||
CONF_BIRTH_MESSAGE,
|
||||
CONF_BROKER,
|
||||
CONF_CERTIFICATE,
|
||||
CONF_CLIENT_CERT,
|
||||
CONF_CLIENT_KEY,
|
||||
CONF_KEEPALIVE,
|
||||
CONF_TLS_INSECURE,
|
||||
CONF_WILL_MESSAGE,
|
||||
DATA_MQTT,
|
||||
DEFAULT_ENCODING,
|
||||
DEFAULT_QOS,
|
||||
MQTT_CONNECTED,
|
||||
MQTT_DISCONNECTED,
|
||||
PROTOCOL_31,
|
||||
)
|
||||
from .discovery import LAST_DISCOVERY
|
||||
from .models import (
|
||||
AsyncMessageCallbackType,
|
||||
MessageCallbackType,
|
||||
PublishMessage,
|
||||
PublishPayloadType,
|
||||
ReceiveMessage,
|
||||
ReceivePayloadType,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# Only import for paho-mqtt type checking here, imports are done locally
|
||||
# because integrations should be able to optionally rely on MQTT.
|
||||
import paho.mqtt.client as mqtt
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DISCOVERY_COOLDOWN = 2
|
||||
TIMEOUT_ACK = 10
|
||||
|
||||
SubscribePayloadType = Union[str, bytes] # Only bytes if encoding is None
|
||||
|
||||
|
||||
def publish(
|
||||
hass: HomeAssistant,
|
||||
topic: str,
|
||||
payload: PublishPayloadType,
|
||||
qos: int | None = 0,
|
||||
retain: bool | None = False,
|
||||
encoding: str | None = DEFAULT_ENCODING,
|
||||
) -> None:
|
||||
"""Publish message to a MQTT topic."""
|
||||
hass.add_job(async_publish, hass, topic, payload, qos, retain, encoding)
|
||||
|
||||
|
||||
async def async_publish(
|
||||
hass: HomeAssistant,
|
||||
topic: str,
|
||||
payload: PublishPayloadType,
|
||||
qos: int | None = 0,
|
||||
retain: bool | None = False,
|
||||
encoding: str | None = DEFAULT_ENCODING,
|
||||
) -> None:
|
||||
"""Publish message to a MQTT topic."""
|
||||
|
||||
outgoing_payload = payload
|
||||
if not isinstance(payload, bytes):
|
||||
if not encoding:
|
||||
_LOGGER.error(
|
||||
"Can't pass-through payload for publishing %s on %s with no encoding set, need 'bytes' got %s",
|
||||
payload,
|
||||
topic,
|
||||
type(payload),
|
||||
)
|
||||
return
|
||||
outgoing_payload = str(payload)
|
||||
if encoding != DEFAULT_ENCODING:
|
||||
# a string is encoded as utf-8 by default, other encoding requires bytes as payload
|
||||
try:
|
||||
outgoing_payload = outgoing_payload.encode(encoding)
|
||||
except (AttributeError, LookupError, UnicodeEncodeError):
|
||||
_LOGGER.error(
|
||||
"Can't encode payload for publishing %s on %s with encoding %s",
|
||||
payload,
|
||||
topic,
|
||||
encoding,
|
||||
)
|
||||
return
|
||||
|
||||
await hass.data[DATA_MQTT].async_publish(topic, outgoing_payload, qos, retain)
|
||||
|
||||
|
||||
AsyncDeprecatedMessageCallbackType = Callable[
|
||||
[str, ReceivePayloadType, int], Awaitable[None]
|
||||
]
|
||||
DeprecatedMessageCallbackType = Callable[[str, ReceivePayloadType, int], None]
|
||||
|
||||
|
||||
def wrap_msg_callback(
|
||||
msg_callback: AsyncDeprecatedMessageCallbackType | DeprecatedMessageCallbackType,
|
||||
) -> AsyncMessageCallbackType | MessageCallbackType:
|
||||
"""Wrap an MQTT message callback to support deprecated signature."""
|
||||
# Check for partials to properly determine if coroutine function
|
||||
check_func = msg_callback
|
||||
while isinstance(check_func, partial):
|
||||
check_func = check_func.func
|
||||
|
||||
wrapper_func: AsyncMessageCallbackType | MessageCallbackType
|
||||
if asyncio.iscoroutinefunction(check_func):
|
||||
|
||||
@wraps(msg_callback)
|
||||
async def async_wrapper(msg: ReceiveMessage) -> None:
|
||||
"""Call with deprecated signature."""
|
||||
await cast(AsyncDeprecatedMessageCallbackType, msg_callback)(
|
||||
msg.topic, msg.payload, msg.qos
|
||||
)
|
||||
|
||||
wrapper_func = async_wrapper
|
||||
else:
|
||||
|
||||
@wraps(msg_callback)
|
||||
def wrapper(msg: ReceiveMessage) -> None:
|
||||
"""Call with deprecated signature."""
|
||||
msg_callback(msg.topic, msg.payload, msg.qos)
|
||||
|
||||
wrapper_func = wrapper
|
||||
return wrapper_func
|
||||
|
||||
|
||||
@bind_hass
|
||||
async def async_subscribe(
|
||||
hass: HomeAssistant,
|
||||
topic: str,
|
||||
msg_callback: AsyncMessageCallbackType
|
||||
| MessageCallbackType
|
||||
| DeprecatedMessageCallbackType
|
||||
| AsyncDeprecatedMessageCallbackType,
|
||||
qos: int = DEFAULT_QOS,
|
||||
encoding: str | None = "utf-8",
|
||||
):
|
||||
"""Subscribe to an MQTT topic.
|
||||
|
||||
Call the return value to unsubscribe.
|
||||
"""
|
||||
# Count callback parameters which don't have a default value
|
||||
non_default = 0
|
||||
if msg_callback:
|
||||
non_default = sum(
|
||||
p.default == inspect.Parameter.empty
|
||||
for _, p in inspect.signature(msg_callback).parameters.items()
|
||||
)
|
||||
|
||||
wrapped_msg_callback = msg_callback
|
||||
# If we have 3 parameters with no default value, wrap the callback
|
||||
if non_default == 3:
|
||||
module = inspect.getmodule(msg_callback)
|
||||
_LOGGER.warning(
|
||||
"Signature of MQTT msg_callback '%s.%s' is deprecated",
|
||||
module.__name__ if module else "<unknown>",
|
||||
msg_callback.__name__,
|
||||
)
|
||||
wrapped_msg_callback = wrap_msg_callback(
|
||||
cast(DeprecatedMessageCallbackType, msg_callback)
|
||||
)
|
||||
|
||||
async_remove = await hass.data[DATA_MQTT].async_subscribe(
|
||||
topic,
|
||||
catch_log_exception(
|
||||
wrapped_msg_callback,
|
||||
lambda msg: (
|
||||
f"Exception in {msg_callback.__name__} when handling msg on "
|
||||
f"'{msg.topic}': '{msg.payload}'"
|
||||
),
|
||||
),
|
||||
qos,
|
||||
encoding,
|
||||
)
|
||||
return async_remove
|
||||
|
||||
|
||||
@bind_hass
|
||||
def subscribe(
|
||||
hass: HomeAssistant,
|
||||
topic: str,
|
||||
msg_callback: MessageCallbackType,
|
||||
qos: int = DEFAULT_QOS,
|
||||
encoding: str = "utf-8",
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe to an MQTT topic."""
|
||||
async_remove = asyncio.run_coroutine_threadsafe(
|
||||
async_subscribe(hass, topic, msg_callback, qos, encoding), hass.loop
|
||||
).result()
|
||||
|
||||
def remove():
|
||||
"""Remove listener convert."""
|
||||
run_callback_threadsafe(hass.loop, async_remove).result()
|
||||
|
||||
return remove
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class Subscription:
|
||||
"""Class to hold data about an active subscription."""
|
||||
|
||||
topic: str = attr.ib()
|
||||
matcher: Any = attr.ib()
|
||||
job: HassJob = attr.ib()
|
||||
qos: int = attr.ib(default=0)
|
||||
encoding: str | None = attr.ib(default="utf-8")
|
||||
|
||||
|
||||
class MqttClientSetup:
|
||||
"""Helper class to setup the paho mqtt client from config."""
|
||||
|
||||
def __init__(self, config: ConfigType) -> None:
|
||||
"""Initialize the MQTT client setup helper."""
|
||||
|
||||
# We don't import on the top because some integrations
|
||||
# should be able to optionally rely on MQTT.
|
||||
import paho.mqtt.client as mqtt # pylint: disable=import-outside-toplevel
|
||||
|
||||
if config[CONF_PROTOCOL] == PROTOCOL_31:
|
||||
proto = mqtt.MQTTv31
|
||||
else:
|
||||
proto = mqtt.MQTTv311
|
||||
|
||||
if (client_id := config.get(CONF_CLIENT_ID)) is None:
|
||||
# PAHO MQTT relies on the MQTT server to generate random client IDs.
|
||||
# However, that feature is not mandatory so we generate our own.
|
||||
client_id = mqtt.base62(uuid.uuid4().int, padding=22)
|
||||
self._client = mqtt.Client(client_id, protocol=proto)
|
||||
|
||||
# Enable logging
|
||||
self._client.enable_logger()
|
||||
|
||||
username = config.get(CONF_USERNAME)
|
||||
password = config.get(CONF_PASSWORD)
|
||||
if username is not None:
|
||||
self._client.username_pw_set(username, password)
|
||||
|
||||
if (certificate := config.get(CONF_CERTIFICATE)) == "auto":
|
||||
certificate = certifi.where()
|
||||
|
||||
client_key = config.get(CONF_CLIENT_KEY)
|
||||
client_cert = config.get(CONF_CLIENT_CERT)
|
||||
tls_insecure = config.get(CONF_TLS_INSECURE)
|
||||
if certificate is not None:
|
||||
self._client.tls_set(
|
||||
certificate,
|
||||
certfile=client_cert,
|
||||
keyfile=client_key,
|
||||
tls_version=ssl.PROTOCOL_TLS,
|
||||
)
|
||||
|
||||
if tls_insecure is not None:
|
||||
self._client.tls_insecure_set(tls_insecure)
|
||||
|
||||
@property
|
||||
def client(self) -> mqtt.Client:
|
||||
"""Return the paho MQTT client."""
|
||||
return self._client
|
||||
|
||||
|
||||
class MQTT:
|
||||
"""Home Assistant MQTT client."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry,
|
||||
conf,
|
||||
) -> None:
|
||||
"""Initialize Home Assistant MQTT client."""
|
||||
# We don't import on the top because some integrations
|
||||
# should be able to optionally rely on MQTT.
|
||||
import paho.mqtt.client as mqtt # pylint: disable=import-outside-toplevel
|
||||
|
||||
self.hass = hass
|
||||
self.config_entry = config_entry
|
||||
self.conf = conf
|
||||
self.subscriptions: list[Subscription] = []
|
||||
self.connected = False
|
||||
self._ha_started = asyncio.Event()
|
||||
self._last_subscribe = time.time()
|
||||
self._mqttc: mqtt.Client = None
|
||||
self._paho_lock = asyncio.Lock()
|
||||
|
||||
self._pending_operations: dict[str, asyncio.Event] = {}
|
||||
|
||||
if self.hass.state == CoreState.running:
|
||||
self._ha_started.set()
|
||||
else:
|
||||
|
||||
@callback
|
||||
def ha_started(_):
|
||||
self._ha_started.set()
|
||||
|
||||
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, ha_started)
|
||||
|
||||
self.init_client()
|
||||
|
||||
def init_client(self):
|
||||
"""Initialize paho client."""
|
||||
self._mqttc = MqttClientSetup(self.conf).client
|
||||
self._mqttc.on_connect = self._mqtt_on_connect
|
||||
self._mqttc.on_disconnect = self._mqtt_on_disconnect
|
||||
self._mqttc.on_message = self._mqtt_on_message
|
||||
self._mqttc.on_publish = self._mqtt_on_callback
|
||||
self._mqttc.on_subscribe = self._mqtt_on_callback
|
||||
self._mqttc.on_unsubscribe = self._mqtt_on_callback
|
||||
|
||||
if (
|
||||
CONF_WILL_MESSAGE in self.conf
|
||||
and ATTR_TOPIC in self.conf[CONF_WILL_MESSAGE]
|
||||
):
|
||||
will_message = PublishMessage(**self.conf[CONF_WILL_MESSAGE])
|
||||
else:
|
||||
will_message = None
|
||||
|
||||
if will_message is not None:
|
||||
self._mqttc.will_set(
|
||||
topic=will_message.topic,
|
||||
payload=will_message.payload,
|
||||
qos=will_message.qos,
|
||||
retain=will_message.retain,
|
||||
)
|
||||
|
||||
async def async_publish(
|
||||
self, topic: str, payload: PublishPayloadType, qos: int, retain: bool
|
||||
) -> None:
|
||||
"""Publish a MQTT message."""
|
||||
async with self._paho_lock:
|
||||
msg_info = await self.hass.async_add_executor_job(
|
||||
self._mqttc.publish, topic, payload, qos, retain
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Transmitting message on %s: '%s', mid: %s",
|
||||
topic,
|
||||
payload,
|
||||
msg_info.mid,
|
||||
)
|
||||
_raise_on_error(msg_info.rc)
|
||||
await self._wait_for_mid(msg_info.mid)
|
||||
|
||||
async def async_connect(self) -> None:
|
||||
"""Connect to the host. Does not process messages yet."""
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
import paho.mqtt.client as mqtt
|
||||
|
||||
result: int | None = None
|
||||
try:
|
||||
result = await self.hass.async_add_executor_job(
|
||||
self._mqttc.connect,
|
||||
self.conf[CONF_BROKER],
|
||||
self.conf[CONF_PORT],
|
||||
self.conf[CONF_KEEPALIVE],
|
||||
)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Failed to connect to MQTT server due to exception: %s", err)
|
||||
|
||||
if result is not None and result != 0:
|
||||
_LOGGER.error(
|
||||
"Failed to connect to MQTT server: %s", mqtt.error_string(result)
|
||||
)
|
||||
|
||||
self._mqttc.loop_start()
|
||||
|
||||
async def async_disconnect(self):
|
||||
"""Stop the MQTT client."""
|
||||
|
||||
def stop():
|
||||
"""Stop the MQTT client."""
|
||||
# Do not disconnect, we want the broker to always publish will
|
||||
self._mqttc.loop_stop()
|
||||
|
||||
await self.hass.async_add_executor_job(stop)
|
||||
|
||||
async def async_subscribe(
|
||||
self,
|
||||
topic: str,
|
||||
msg_callback: MessageCallbackType,
|
||||
qos: int,
|
||||
encoding: str | None = None,
|
||||
) -> Callable[[], None]:
|
||||
"""Set up a subscription to a topic with the provided qos.
|
||||
|
||||
This method is a coroutine.
|
||||
"""
|
||||
if not isinstance(topic, str):
|
||||
raise HomeAssistantError("Topic needs to be a string!")
|
||||
|
||||
subscription = Subscription(
|
||||
topic, _matcher_for_topic(topic), HassJob(msg_callback), qos, encoding
|
||||
)
|
||||
self.subscriptions.append(subscription)
|
||||
self._matching_subscriptions.cache_clear()
|
||||
|
||||
# Only subscribe if currently connected.
|
||||
if self.connected:
|
||||
self._last_subscribe = time.time()
|
||||
await self._async_perform_subscription(topic, qos)
|
||||
|
||||
@callback
|
||||
def async_remove() -> None:
|
||||
"""Remove subscription."""
|
||||
if subscription not in self.subscriptions:
|
||||
raise HomeAssistantError("Can't remove subscription twice")
|
||||
self.subscriptions.remove(subscription)
|
||||
self._matching_subscriptions.cache_clear()
|
||||
|
||||
# Only unsubscribe if currently connected.
|
||||
if self.connected:
|
||||
self.hass.async_create_task(self._async_unsubscribe(topic))
|
||||
|
||||
return async_remove
|
||||
|
||||
async def _async_unsubscribe(self, topic: str) -> None:
|
||||
"""Unsubscribe from a topic.
|
||||
|
||||
This method is a coroutine.
|
||||
"""
|
||||
if any(other.topic == topic for other in self.subscriptions):
|
||||
# Other subscriptions on topic remaining - don't unsubscribe.
|
||||
return
|
||||
|
||||
async with self._paho_lock:
|
||||
result: int | None = None
|
||||
result, mid = await self.hass.async_add_executor_job(
|
||||
self._mqttc.unsubscribe, topic
|
||||
)
|
||||
_LOGGER.debug("Unsubscribing from %s, mid: %s", topic, mid)
|
||||
_raise_on_error(result)
|
||||
await self._wait_for_mid(mid)
|
||||
|
||||
async def _async_perform_subscription(self, topic: str, qos: int) -> None:
|
||||
"""Perform a paho-mqtt subscription."""
|
||||
async with self._paho_lock:
|
||||
result: int | None = None
|
||||
result, mid = await self.hass.async_add_executor_job(
|
||||
self._mqttc.subscribe, topic, qos
|
||||
)
|
||||
_LOGGER.debug("Subscribing to %s, mid: %s", topic, mid)
|
||||
_raise_on_error(result)
|
||||
await self._wait_for_mid(mid)
|
||||
|
||||
def _mqtt_on_connect(self, _mqttc, _userdata, _flags, result_code: int) -> None:
|
||||
"""On connect callback.
|
||||
|
||||
Resubscribe to all topics we were subscribed to and publish birth
|
||||
message.
|
||||
"""
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
import paho.mqtt.client as mqtt
|
||||
|
||||
if result_code != mqtt.CONNACK_ACCEPTED:
|
||||
_LOGGER.error(
|
||||
"Unable to connect to the MQTT broker: %s",
|
||||
mqtt.connack_string(result_code),
|
||||
)
|
||||
return
|
||||
|
||||
self.connected = True
|
||||
dispatcher_send(self.hass, MQTT_CONNECTED)
|
||||
_LOGGER.info(
|
||||
"Connected to MQTT server %s:%s (%s)",
|
||||
self.conf[CONF_BROKER],
|
||||
self.conf[CONF_PORT],
|
||||
result_code,
|
||||
)
|
||||
|
||||
# Group subscriptions to only re-subscribe once for each topic.
|
||||
keyfunc = attrgetter("topic")
|
||||
for topic, subs in groupby(sorted(self.subscriptions, key=keyfunc), keyfunc):
|
||||
# Re-subscribe with the highest requested qos
|
||||
max_qos = max(subscription.qos for subscription in subs)
|
||||
self.hass.add_job(self._async_perform_subscription, topic, max_qos)
|
||||
|
||||
if (
|
||||
CONF_BIRTH_MESSAGE in self.conf
|
||||
and ATTR_TOPIC in self.conf[CONF_BIRTH_MESSAGE]
|
||||
):
|
||||
|
||||
async def publish_birth_message(birth_message):
|
||||
await self._ha_started.wait() # Wait for Home Assistant to start
|
||||
await self._discovery_cooldown() # Wait for MQTT discovery to cool down
|
||||
await self.async_publish(
|
||||
topic=birth_message.topic,
|
||||
payload=birth_message.payload,
|
||||
qos=birth_message.qos,
|
||||
retain=birth_message.retain,
|
||||
)
|
||||
|
||||
birth_message = PublishMessage(**self.conf[CONF_BIRTH_MESSAGE])
|
||||
asyncio.run_coroutine_threadsafe(
|
||||
publish_birth_message(birth_message), self.hass.loop
|
||||
)
|
||||
|
||||
def _mqtt_on_message(self, _mqttc, _userdata, msg) -> None:
|
||||
"""Message received callback."""
|
||||
self.hass.add_job(self._mqtt_handle_message, msg)
|
||||
|
||||
@lru_cache(2048)
|
||||
def _matching_subscriptions(self, topic):
|
||||
subscriptions = []
|
||||
for subscription in self.subscriptions:
|
||||
if subscription.matcher(topic):
|
||||
subscriptions.append(subscription)
|
||||
return subscriptions
|
||||
|
||||
@callback
|
||||
def _mqtt_handle_message(self, msg) -> None:
|
||||
_LOGGER.debug(
|
||||
"Received message on %s%s: %s",
|
||||
msg.topic,
|
||||
" (retained)" if msg.retain else "",
|
||||
msg.payload[0:8192],
|
||||
)
|
||||
timestamp = dt_util.utcnow()
|
||||
|
||||
subscriptions = self._matching_subscriptions(msg.topic)
|
||||
|
||||
for subscription in subscriptions:
|
||||
|
||||
payload: SubscribePayloadType = msg.payload
|
||||
if subscription.encoding is not None:
|
||||
try:
|
||||
payload = msg.payload.decode(subscription.encoding)
|
||||
except (AttributeError, UnicodeDecodeError):
|
||||
_LOGGER.warning(
|
||||
"Can't decode payload %s on %s with encoding %s (for %s)",
|
||||
msg.payload[0:8192],
|
||||
msg.topic,
|
||||
subscription.encoding,
|
||||
subscription.job,
|
||||
)
|
||||
continue
|
||||
|
||||
self.hass.async_run_hass_job(
|
||||
subscription.job,
|
||||
ReceiveMessage(
|
||||
msg.topic,
|
||||
payload,
|
||||
msg.qos,
|
||||
msg.retain,
|
||||
subscription.topic,
|
||||
timestamp,
|
||||
),
|
||||
)
|
||||
|
||||
def _mqtt_on_callback(self, _mqttc, _userdata, mid, _granted_qos=None) -> None:
|
||||
"""Publish / Subscribe / Unsubscribe callback."""
|
||||
self.hass.add_job(self._mqtt_handle_mid, mid)
|
||||
|
||||
@callback
|
||||
def _mqtt_handle_mid(self, mid) -> None:
|
||||
# Create the mid event if not created, either _mqtt_handle_mid or _wait_for_mid
|
||||
# may be executed first.
|
||||
if mid not in self._pending_operations:
|
||||
self._pending_operations[mid] = asyncio.Event()
|
||||
self._pending_operations[mid].set()
|
||||
|
||||
def _mqtt_on_disconnect(self, _mqttc, _userdata, result_code: int) -> None:
|
||||
"""Disconnected callback."""
|
||||
self.connected = False
|
||||
dispatcher_send(self.hass, MQTT_DISCONNECTED)
|
||||
_LOGGER.warning(
|
||||
"Disconnected from MQTT server %s:%s (%s)",
|
||||
self.conf[CONF_BROKER],
|
||||
self.conf[CONF_PORT],
|
||||
result_code,
|
||||
)
|
||||
|
||||
async def _wait_for_mid(self, mid):
|
||||
"""Wait for ACK from broker."""
|
||||
# Create the mid event if not created, either _mqtt_handle_mid or _wait_for_mid
|
||||
# may be executed first.
|
||||
if mid not in self._pending_operations:
|
||||
self._pending_operations[mid] = asyncio.Event()
|
||||
try:
|
||||
await asyncio.wait_for(self._pending_operations[mid].wait(), TIMEOUT_ACK)
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"No ACK from MQTT server in %s seconds (mid: %s)", TIMEOUT_ACK, mid
|
||||
)
|
||||
finally:
|
||||
del self._pending_operations[mid]
|
||||
|
||||
async def _discovery_cooldown(self):
|
||||
now = time.time()
|
||||
# Reset discovery and subscribe cooldowns
|
||||
self.hass.data[LAST_DISCOVERY] = now
|
||||
self._last_subscribe = now
|
||||
|
||||
last_discovery = self.hass.data[LAST_DISCOVERY]
|
||||
last_subscribe = self._last_subscribe
|
||||
wait_until = max(
|
||||
last_discovery + DISCOVERY_COOLDOWN, last_subscribe + DISCOVERY_COOLDOWN
|
||||
)
|
||||
while now < wait_until:
|
||||
await asyncio.sleep(wait_until - now)
|
||||
now = time.time()
|
||||
last_discovery = self.hass.data[LAST_DISCOVERY]
|
||||
last_subscribe = self._last_subscribe
|
||||
wait_until = max(
|
||||
last_discovery + DISCOVERY_COOLDOWN, last_subscribe + DISCOVERY_COOLDOWN
|
||||
)
|
||||
|
||||
|
||||
def _raise_on_error(result_code: int | None) -> None:
|
||||
"""Raise error if error result."""
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
import paho.mqtt.client as mqtt
|
||||
|
||||
if result_code is not None and result_code != 0:
|
||||
raise HomeAssistantError(
|
||||
f"Error talking to MQTT: {mqtt.error_string(result_code)}"
|
||||
)
|
||||
|
||||
|
||||
def _matcher_for_topic(subscription: str) -> Any:
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
from paho.mqtt.matcher import MQTTMatcher
|
||||
|
||||
matcher = MQTTMatcher()
|
||||
matcher[subscription] = True
|
||||
|
||||
return lambda topic: next(matcher.iter_match(topic), False)
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT climate devices."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
import logging
|
||||
|
||||
@@ -44,18 +43,20 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import DEFAULT_RETAIN, MQTT_BASE_SCHEMA
|
||||
from .const import CONF_ENCODING, CONF_QOS, CONF_RETAIN, PAYLOAD_NONE
|
||||
from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate, MqttValueTemplate
|
||||
from .util import valid_publish_topic, valid_subscribe_topic
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -232,33 +233,33 @@ def valid_preset_mode_configuration(config):
|
||||
return config
|
||||
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
_PLATFORM_SCHEMA_BASE = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_AUX_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_AUX_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_AUX_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_AUX_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_AUX_STATE_TOPIC): valid_subscribe_topic,
|
||||
# AWAY and HOLD mode topics and templates are deprecated, support will be removed with release 2022.9
|
||||
vol.Optional(CONF_AWAY_MODE_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_AWAY_MODE_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_AWAY_MODE_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_AWAY_MODE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_AWAY_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_CURRENT_TEMP_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_CURRENT_TEMP_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_CURRENT_TEMP_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_FAN_MODE_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_FAN_MODE_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_FAN_MODE_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(
|
||||
CONF_FAN_MODE_LIST,
|
||||
default=[FAN_AUTO, FAN_LOW, FAN_MEDIUM, FAN_HIGH],
|
||||
): cv.ensure_list,
|
||||
vol.Optional(CONF_FAN_MODE_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_FAN_MODE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_FAN_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
# AWAY and HOLD mode topics and templates are deprecated, support will be removed with release 2022.9
|
||||
vol.Optional(CONF_HOLD_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_HOLD_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_HOLD_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_HOLD_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_HOLD_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_HOLD_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_HOLD_LIST): cv.ensure_list,
|
||||
vol.Optional(CONF_MODE_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_MODE_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_MODE_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(
|
||||
CONF_MODE_LIST,
|
||||
default=[
|
||||
@@ -271,54 +272,54 @@ _PLATFORM_SCHEMA_BASE = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
],
|
||||
): cv.ensure_list,
|
||||
vol.Optional(CONF_MODE_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_MODE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_ON, default="ON"): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_OFF, default="OFF"): cv.string,
|
||||
vol.Optional(CONF_POWER_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_POWER_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_POWER_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_POWER_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_POWER_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_PRECISION): vol.In(
|
||||
[PRECISION_TENTHS, PRECISION_HALVES, PRECISION_WHOLE]
|
||||
),
|
||||
vol.Optional(CONF_RETAIN, default=mqtt.DEFAULT_RETAIN): cv.boolean,
|
||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
# CONF_SEND_IF_OFF is deprecated, support will be removed with release 2022.9
|
||||
vol.Optional(CONF_SEND_IF_OFF): cv.boolean,
|
||||
vol.Optional(CONF_ACTION_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_ACTION_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_ACTION_TOPIC): valid_subscribe_topic,
|
||||
# CONF_PRESET_MODE_COMMAND_TOPIC and CONF_PRESET_MODES_LIST must be used together
|
||||
vol.Inclusive(
|
||||
CONF_PRESET_MODE_COMMAND_TOPIC, "preset_modes"
|
||||
): mqtt.valid_publish_topic,
|
||||
): valid_publish_topic,
|
||||
vol.Inclusive(
|
||||
CONF_PRESET_MODES_LIST, "preset_modes", default=[]
|
||||
): cv.ensure_list,
|
||||
vol.Optional(CONF_PRESET_MODE_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_PRESET_MODE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_PRESET_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_PRESET_MODE_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_SWING_MODE_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_SWING_MODE_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_SWING_MODE_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(
|
||||
CONF_SWING_MODE_LIST, default=[SWING_ON, SWING_OFF]
|
||||
): cv.ensure_list,
|
||||
vol.Optional(CONF_SWING_MODE_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_SWING_MODE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_SWING_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_TEMP_INITIAL, default=21): cv.positive_int,
|
||||
vol.Optional(CONF_TEMP_MIN, default=DEFAULT_MIN_TEMP): vol.Coerce(float),
|
||||
vol.Optional(CONF_TEMP_MAX, default=DEFAULT_MAX_TEMP): vol.Coerce(float),
|
||||
vol.Optional(CONF_TEMP_STEP, default=1.0): vol.Coerce(float),
|
||||
vol.Optional(CONF_TEMP_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_TEMP_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_TEMP_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_TEMP_HIGH_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_TEMP_HIGH_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_TEMP_HIGH_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_TEMP_HIGH_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_TEMP_HIGH_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_TEMP_HIGH_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_TEMP_LOW_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_TEMP_LOW_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_TEMP_LOW_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_TEMP_LOW_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_TEMP_LOW_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_TEMP_LOW_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_TEMP_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_TEMP_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_TEMP_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_TEMPERATURE_UNIT): cv.temperature_unit,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
|
||||
}
|
||||
@@ -375,7 +376,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT climate configured under the fan platform key (deprecated)."""
|
||||
# The use of PLATFORM_SCHEMA is deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, climate.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
climate.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -386,12 +391,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT climate device through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, climate.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, climate.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
148
homeassistant/components/mqtt/config.py
Normal file
148
homeassistant/components/mqtt/config.py
Normal file
@@ -0,0 +1,148 @@
|
||||
"""Support for MQTT message handling."""
|
||||
from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_CLIENT_ID,
|
||||
CONF_DISCOVERY,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_PROTOCOL,
|
||||
CONF_USERNAME,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import (
|
||||
ATTR_PAYLOAD,
|
||||
ATTR_QOS,
|
||||
ATTR_RETAIN,
|
||||
ATTR_TOPIC,
|
||||
CONF_BIRTH_MESSAGE,
|
||||
CONF_BROKER,
|
||||
CONF_CERTIFICATE,
|
||||
CONF_CLIENT_CERT,
|
||||
CONF_CLIENT_KEY,
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_DISCOVERY_PREFIX,
|
||||
CONF_ENCODING,
|
||||
CONF_KEEPALIVE,
|
||||
CONF_QOS,
|
||||
CONF_RETAIN,
|
||||
CONF_STATE_TOPIC,
|
||||
CONF_TLS_INSECURE,
|
||||
CONF_TLS_VERSION,
|
||||
CONF_WILL_MESSAGE,
|
||||
DEFAULT_BIRTH,
|
||||
DEFAULT_DISCOVERY,
|
||||
DEFAULT_ENCODING,
|
||||
DEFAULT_PREFIX,
|
||||
DEFAULT_QOS,
|
||||
DEFAULT_RETAIN,
|
||||
DEFAULT_WILL,
|
||||
PLATFORMS,
|
||||
PROTOCOL_31,
|
||||
PROTOCOL_311,
|
||||
)
|
||||
from .util import _VALID_QOS_SCHEMA, valid_publish_topic, valid_subscribe_topic
|
||||
|
||||
DEFAULT_PORT = 1883
|
||||
DEFAULT_KEEPALIVE = 60
|
||||
DEFAULT_PROTOCOL = PROTOCOL_311
|
||||
DEFAULT_TLS_PROTOCOL = "auto"
|
||||
|
||||
DEFAULT_VALUES = {
|
||||
CONF_BIRTH_MESSAGE: DEFAULT_BIRTH,
|
||||
CONF_DISCOVERY: DEFAULT_DISCOVERY,
|
||||
CONF_PORT: DEFAULT_PORT,
|
||||
CONF_TLS_VERSION: DEFAULT_TLS_PROTOCOL,
|
||||
CONF_WILL_MESSAGE: DEFAULT_WILL,
|
||||
}
|
||||
|
||||
CLIENT_KEY_AUTH_MSG = (
|
||||
"client_key and client_cert must both be present in "
|
||||
"the MQTT broker configuration"
|
||||
)
|
||||
|
||||
MQTT_WILL_BIRTH_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Inclusive(ATTR_TOPIC, "topic_payload"): valid_publish_topic,
|
||||
vol.Inclusive(ATTR_PAYLOAD, "topic_payload"): cv.string,
|
||||
vol.Optional(ATTR_QOS, default=DEFAULT_QOS): _VALID_QOS_SCHEMA,
|
||||
vol.Optional(ATTR_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
},
|
||||
required=True,
|
||||
)
|
||||
|
||||
PLATFORM_CONFIG_SCHEMA_BASE = vol.Schema(
|
||||
{vol.Optional(platform.value): cv.ensure_list for platform in PLATFORMS}
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA_BASE = PLATFORM_CONFIG_SCHEMA_BASE.extend(
|
||||
{
|
||||
vol.Optional(CONF_CLIENT_ID): cv.string,
|
||||
vol.Optional(CONF_KEEPALIVE, default=DEFAULT_KEEPALIVE): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=15)
|
||||
),
|
||||
vol.Optional(CONF_BROKER): cv.string,
|
||||
vol.Optional(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_USERNAME): cv.string,
|
||||
vol.Optional(CONF_PASSWORD): cv.string,
|
||||
vol.Optional(CONF_CERTIFICATE): vol.Any("auto", cv.isfile),
|
||||
vol.Inclusive(
|
||||
CONF_CLIENT_KEY, "client_key_auth", msg=CLIENT_KEY_AUTH_MSG
|
||||
): cv.isfile,
|
||||
vol.Inclusive(
|
||||
CONF_CLIENT_CERT, "client_key_auth", msg=CLIENT_KEY_AUTH_MSG
|
||||
): cv.isfile,
|
||||
vol.Optional(CONF_TLS_INSECURE): cv.boolean,
|
||||
vol.Optional(CONF_TLS_VERSION): vol.Any("auto", "1.0", "1.1", "1.2"),
|
||||
vol.Optional(CONF_PROTOCOL, default=DEFAULT_PROTOCOL): vol.All(
|
||||
cv.string, vol.In([PROTOCOL_31, PROTOCOL_311])
|
||||
),
|
||||
vol.Optional(CONF_WILL_MESSAGE): MQTT_WILL_BIRTH_SCHEMA,
|
||||
vol.Optional(CONF_BIRTH_MESSAGE): MQTT_WILL_BIRTH_SCHEMA,
|
||||
vol.Optional(CONF_DISCOVERY): cv.boolean,
|
||||
# discovery_prefix must be a valid publish topic because if no
|
||||
# state topic is specified, it will be created with the given prefix.
|
||||
vol.Optional(
|
||||
CONF_DISCOVERY_PREFIX, default=DEFAULT_PREFIX
|
||||
): valid_publish_topic,
|
||||
}
|
||||
)
|
||||
|
||||
DEPRECATED_CONFIG_KEYS = [
|
||||
CONF_BIRTH_MESSAGE,
|
||||
CONF_BROKER,
|
||||
CONF_DISCOVERY,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_TLS_VERSION,
|
||||
CONF_USERNAME,
|
||||
CONF_WILL_MESSAGE,
|
||||
]
|
||||
|
||||
SCHEMA_BASE = {
|
||||
vol.Optional(CONF_QOS, default=DEFAULT_QOS): _VALID_QOS_SCHEMA,
|
||||
vol.Optional(CONF_ENCODING, default=DEFAULT_ENCODING): cv.string,
|
||||
}
|
||||
|
||||
MQTT_BASE_SCHEMA = vol.Schema(SCHEMA_BASE)
|
||||
|
||||
# Sensor type platforms subscribe to MQTT events
|
||||
MQTT_RO_SCHEMA = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
|
||||
}
|
||||
)
|
||||
|
||||
# Switch type platforms publish to MQTT and may subscribe
|
||||
MQTT_RW_SCHEMA = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
vol.Optional(CONF_STATE_TOPIC): valid_subscribe_topic,
|
||||
}
|
||||
)
|
||||
@@ -17,7 +17,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
|
||||
from . import MqttClientSetup
|
||||
from .client import MqttClientSetup
|
||||
from .const import (
|
||||
ATTR_PAYLOAD,
|
||||
ATTR_QOS,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""Constants used by multiple MQTT modules."""
|
||||
from homeassistant.const import CONF_PAYLOAD
|
||||
from homeassistant.const import CONF_PAYLOAD, Platform
|
||||
|
||||
ATTR_DISCOVERY_HASH = "discovery_hash"
|
||||
ATTR_DISCOVERY_PAYLOAD = "discovery_payload"
|
||||
@@ -14,7 +14,9 @@ CONF_BROKER = "broker"
|
||||
CONF_BIRTH_MESSAGE = "birth_message"
|
||||
CONF_COMMAND_TEMPLATE = "command_template"
|
||||
CONF_COMMAND_TOPIC = "command_topic"
|
||||
CONF_DISCOVERY_PREFIX = "discovery_prefix"
|
||||
CONF_ENCODING = "encoding"
|
||||
CONF_KEEPALIVE = "keepalive"
|
||||
CONF_QOS = ATTR_QOS
|
||||
CONF_RETAIN = ATTR_RETAIN
|
||||
CONF_STATE_TOPIC = "state_topic"
|
||||
@@ -30,6 +32,7 @@ CONF_TLS_VERSION = "tls_version"
|
||||
|
||||
CONFIG_ENTRY_IS_SETUP = "mqtt_config_entry_is_setup"
|
||||
DATA_CONFIG_ENTRY_LOCK = "mqtt_config_entry_lock"
|
||||
DATA_MQTT = "mqtt"
|
||||
DATA_MQTT_CONFIG = "mqtt_config"
|
||||
DATA_MQTT_RELOAD_NEEDED = "mqtt_reload_needed"
|
||||
|
||||
@@ -66,3 +69,24 @@ PAYLOAD_NONE = "None"
|
||||
|
||||
PROTOCOL_31 = "3.1"
|
||||
PROTOCOL_311 = "3.1.1"
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.CAMERA,
|
||||
Platform.CLIMATE,
|
||||
Platform.DEVICE_TRACKER,
|
||||
Platform.COVER,
|
||||
Platform.FAN,
|
||||
Platform.HUMIDIFIER,
|
||||
Platform.LIGHT,
|
||||
Platform.LOCK,
|
||||
Platform.NUMBER,
|
||||
Platform.SELECT,
|
||||
Platform.SCENE,
|
||||
Platform.SENSOR,
|
||||
Platform.SIREN,
|
||||
Platform.SWITCH,
|
||||
Platform.VACUUM,
|
||||
]
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT cover devices."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
from json import JSONDecodeError, loads as json_loads
|
||||
import logging
|
||||
@@ -33,8 +32,8 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_BASE_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_ENCODING,
|
||||
@@ -46,11 +45,13 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate, MqttValueTemplate
|
||||
from .util import valid_publish_topic, valid_subscribe_topic
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -152,11 +153,11 @@ def validate_options(value):
|
||||
return value
|
||||
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
_PLATFORM_SCHEMA_BASE = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_GET_POSITION_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_GET_POSITION_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
|
||||
vol.Optional(CONF_PAYLOAD_CLOSE, default=DEFAULT_PAYLOAD_CLOSE): vol.Any(
|
||||
@@ -172,24 +173,24 @@ _PLATFORM_SCHEMA_BASE = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
vol.Optional(CONF_POSITION_OPEN, default=DEFAULT_POSITION_OPEN): int,
|
||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
vol.Optional(CONF_SET_POSITION_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_SET_POSITION_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_SET_POSITION_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_STATE_CLOSED, default=STATE_CLOSED): cv.string,
|
||||
vol.Optional(CONF_STATE_CLOSING, default=STATE_CLOSING): cv.string,
|
||||
vol.Optional(CONF_STATE_OPEN, default=STATE_OPEN): cv.string,
|
||||
vol.Optional(CONF_STATE_OPENING, default=STATE_OPENING): cv.string,
|
||||
vol.Optional(CONF_STATE_STOPPED, default=DEFAULT_STATE_STOPPED): cv.string,
|
||||
vol.Optional(CONF_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(
|
||||
CONF_TILT_CLOSED_POSITION, default=DEFAULT_TILT_CLOSED_POSITION
|
||||
): int,
|
||||
vol.Optional(CONF_TILT_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_TILT_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_TILT_MAX, default=DEFAULT_TILT_MAX): int,
|
||||
vol.Optional(CONF_TILT_MIN, default=DEFAULT_TILT_MIN): int,
|
||||
vol.Optional(CONF_TILT_OPEN_POSITION, default=DEFAULT_TILT_OPEN_POSITION): int,
|
||||
vol.Optional(
|
||||
CONF_TILT_STATE_OPTIMISTIC, default=DEFAULT_TILT_OPTIMISTIC
|
||||
): cv.boolean,
|
||||
vol.Optional(CONF_TILT_STATUS_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_TILT_STATUS_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_TILT_STATUS_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_GET_POSITION_TEMPLATE): cv.template,
|
||||
@@ -225,7 +226,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT covers configured under the fan platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, cover.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
cover.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -236,13 +241,8 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT cover through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, cover.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(hass, cover.DOMAIN, PLATFORM_SCHEMA_MODERN)
|
||||
)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
|
||||
@@ -3,8 +3,10 @@ import functools
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from . import device_trigger
|
||||
from .. import mqtt
|
||||
from .config import MQTT_BASE_SCHEMA
|
||||
from .mixins import async_setup_entry_helper
|
||||
|
||||
AUTOMATION_TYPE_TRIGGER = "trigger"
|
||||
@@ -12,10 +14,10 @@ AUTOMATION_TYPES = [AUTOMATION_TYPE_TRIGGER]
|
||||
AUTOMATION_TYPES_SCHEMA = vol.In(AUTOMATION_TYPES)
|
||||
CONF_AUTOMATION_TYPE = "automation_type"
|
||||
|
||||
PLATFORM_SCHEMA = mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend(
|
||||
{vol.Required(CONF_AUTOMATION_TYPE): AUTOMATION_TYPES_SCHEMA},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
).extend(MQTT_BASE_SCHEMA.schema)
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry):
|
||||
|
||||
@@ -19,8 +19,8 @@ from homeassistant.const import (
|
||||
from homeassistant.core import callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from .. import MqttValueTemplate, subscription
|
||||
from ... import mqtt
|
||||
from .. import subscription
|
||||
from ..config import MQTT_RO_SCHEMA
|
||||
from ..const import CONF_QOS, CONF_STATE_TOPIC
|
||||
from ..debug_info import log_messages
|
||||
from ..mixins import (
|
||||
@@ -29,12 +29,13 @@ from ..mixins import (
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
)
|
||||
from ..models import MqttValueTemplate
|
||||
|
||||
CONF_PAYLOAD_HOME = "payload_home"
|
||||
CONF_PAYLOAD_NOT_HOME = "payload_not_home"
|
||||
CONF_SOURCE_TYPE = "source_type"
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_RO_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_RO_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_HOME, default=STATE_HOME): cv.string,
|
||||
|
||||
@@ -7,16 +7,18 @@ from homeassistant.const import CONF_DEVICES, STATE_HOME, STATE_NOT_HOME
|
||||
from homeassistant.core import callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from ... import mqtt
|
||||
from ..client import async_subscribe
|
||||
from ..config import SCHEMA_BASE
|
||||
from ..const import CONF_QOS
|
||||
from ..util import valid_subscribe_topic
|
||||
|
||||
CONF_PAYLOAD_HOME = "payload_home"
|
||||
CONF_PAYLOAD_NOT_HOME = "payload_not_home"
|
||||
CONF_SOURCE_TYPE = "source_type"
|
||||
|
||||
PLATFORM_SCHEMA_YAML = PLATFORM_SCHEMA.extend(mqtt.SCHEMA_BASE).extend(
|
||||
PLATFORM_SCHEMA_YAML = PLATFORM_SCHEMA.extend(SCHEMA_BASE).extend(
|
||||
{
|
||||
vol.Required(CONF_DEVICES): {cv.string: mqtt.valid_subscribe_topic},
|
||||
vol.Required(CONF_DEVICES): {cv.string: valid_subscribe_topic},
|
||||
vol.Optional(CONF_PAYLOAD_HOME, default=STATE_HOME): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_NOT_HOME, default=STATE_NOT_HOME): cv.string,
|
||||
vol.Optional(CONF_SOURCE_TYPE): vol.In(SOURCE_TYPES),
|
||||
@@ -50,6 +52,6 @@ async def async_setup_scanner_from_yaml(hass, config, async_see, discovery_info=
|
||||
|
||||
hass.async_create_task(async_see(**see_args))
|
||||
|
||||
await mqtt.async_subscribe(hass, topic, async_message_received, qos)
|
||||
await async_subscribe(hass, topic, async_message_received, qos)
|
||||
|
||||
return True
|
||||
|
||||
@@ -29,8 +29,15 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import debug_info, trigger as mqtt_trigger
|
||||
from .. import mqtt
|
||||
from .const import ATTR_DISCOVERY_HASH, CONF_PAYLOAD, CONF_QOS, CONF_TOPIC, DOMAIN
|
||||
from .config import MQTT_BASE_SCHEMA
|
||||
from .const import (
|
||||
ATTR_DISCOVERY_HASH,
|
||||
CONF_ENCODING,
|
||||
CONF_PAYLOAD,
|
||||
CONF_QOS,
|
||||
CONF_TOPIC,
|
||||
DOMAIN,
|
||||
)
|
||||
from .discovery import MQTT_DISCOVERY_DONE
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_DEVICE_INFO_SCHEMA,
|
||||
@@ -64,7 +71,7 @@ TRIGGER_SCHEMA = DEVICE_TRIGGER_BASE_SCHEMA.extend(
|
||||
}
|
||||
)
|
||||
|
||||
TRIGGER_DISCOVERY_SCHEMA = mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend(
|
||||
TRIGGER_DISCOVERY_SCHEMA = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_AUTOMATION_TYPE): str,
|
||||
vol.Required(CONF_DEVICE): MQTT_ENTITY_DEVICE_INFO_SCHEMA,
|
||||
@@ -94,10 +101,10 @@ class TriggerInstance:
|
||||
async def async_attach_trigger(self) -> None:
|
||||
"""Attach MQTT trigger."""
|
||||
mqtt_config = {
|
||||
mqtt_trigger.CONF_PLATFORM: mqtt.DOMAIN,
|
||||
mqtt_trigger.CONF_TOPIC: self.trigger.topic,
|
||||
mqtt_trigger.CONF_ENCODING: DEFAULT_ENCODING,
|
||||
mqtt_trigger.CONF_QOS: self.trigger.qos,
|
||||
CONF_PLATFORM: DOMAIN,
|
||||
CONF_TOPIC: self.trigger.topic,
|
||||
CONF_ENCODING: DEFAULT_ENCODING,
|
||||
CONF_QOS: self.trigger.qos,
|
||||
}
|
||||
if self.trigger.payload:
|
||||
mqtt_config[CONF_PAYLOAD] = self.trigger.payload
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT fans."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
import logging
|
||||
import math
|
||||
@@ -34,8 +33,8 @@ from homeassistant.util.percentage import (
|
||||
ranged_value_to_percentage,
|
||||
)
|
||||
|
||||
from . import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RW_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TEMPLATE,
|
||||
CONF_COMMAND_TOPIC,
|
||||
@@ -50,11 +49,13 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate, MqttValueTemplate
|
||||
from .util import valid_publish_topic, valid_subscribe_topic
|
||||
|
||||
CONF_PERCENTAGE_STATE_TOPIC = "percentage_state_topic"
|
||||
CONF_PERCENTAGE_COMMAND_TOPIC = "percentage_command_topic"
|
||||
@@ -125,28 +126,28 @@ def valid_preset_mode_configuration(config):
|
||||
return config
|
||||
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
_PLATFORM_SCHEMA_BASE = MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
|
||||
vol.Optional(CONF_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_OSCILLATION_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_OSCILLATION_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_OSCILLATION_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_OSCILLATION_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_OSCILLATION_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_OSCILLATION_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_PERCENTAGE_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_PERCENTAGE_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_PERCENTAGE_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_PERCENTAGE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_PERCENTAGE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_PERCENTAGE_VALUE_TEMPLATE): cv.template,
|
||||
# CONF_PRESET_MODE_COMMAND_TOPIC and CONF_PRESET_MODES_LIST must be used together
|
||||
vol.Inclusive(
|
||||
CONF_PRESET_MODE_COMMAND_TOPIC, "preset_modes"
|
||||
): mqtt.valid_publish_topic,
|
||||
): valid_publish_topic,
|
||||
vol.Inclusive(
|
||||
CONF_PRESET_MODES_LIST, "preset_modes", default=[]
|
||||
): cv.ensure_list,
|
||||
vol.Optional(CONF_PRESET_MODE_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_PRESET_MODE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_PRESET_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_PRESET_MODE_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(
|
||||
CONF_SPEED_RANGE_MIN, default=DEFAULT_SPEED_RANGE_MIN
|
||||
@@ -168,8 +169,8 @@ _PLATFORM_SCHEMA_BASE = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
vol.Optional(
|
||||
CONF_PAYLOAD_OSCILLATION_ON, default=OSCILLATE_ON_PAYLOAD
|
||||
): cv.string,
|
||||
vol.Optional(CONF_SPEED_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_SPEED_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_SPEED_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_SPEED_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_SPEED_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_STATE_VALUE_TEMPLATE): cv.template,
|
||||
}
|
||||
@@ -215,7 +216,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT fans configured under the fan platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, fan.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
fan.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -226,13 +231,8 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT fan through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, fan.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(hass, fan.DOMAIN, PLATFORM_SCHEMA_MODERN)
|
||||
)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT humidifiers."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
import logging
|
||||
|
||||
@@ -30,8 +29,8 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RW_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TEMPLATE,
|
||||
CONF_COMMAND_TOPIC,
|
||||
@@ -46,11 +45,13 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate, MqttValueTemplate
|
||||
from .util import valid_publish_topic, valid_subscribe_topic
|
||||
|
||||
CONF_AVAILABLE_MODES_LIST = "modes"
|
||||
CONF_DEVICE_CLASS = "device_class"
|
||||
@@ -103,15 +104,13 @@ def valid_humidity_range_configuration(config):
|
||||
return config
|
||||
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
_PLATFORM_SCHEMA_BASE = MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
# CONF_AVAIALABLE_MODES_LIST and CONF_MODE_COMMAND_TOPIC must be used together
|
||||
vol.Inclusive(
|
||||
CONF_AVAILABLE_MODES_LIST, "available_modes", default=[]
|
||||
): cv.ensure_list,
|
||||
vol.Inclusive(
|
||||
CONF_MODE_COMMAND_TOPIC, "available_modes"
|
||||
): mqtt.valid_publish_topic,
|
||||
vol.Inclusive(CONF_MODE_COMMAND_TOPIC, "available_modes"): valid_publish_topic,
|
||||
vol.Optional(CONF_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(
|
||||
CONF_DEVICE_CLASS, default=HumidifierDeviceClass.HUMIDIFIER
|
||||
@@ -119,14 +118,14 @@ _PLATFORM_SCHEMA_BASE = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
[HumidifierDeviceClass.HUMIDIFIER, HumidifierDeviceClass.DEHUMIDIFIER]
|
||||
),
|
||||
vol.Optional(CONF_MODE_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_MODE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_MODE_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
|
||||
vol.Optional(CONF_PAYLOAD_OFF, default=DEFAULT_PAYLOAD_OFF): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_ON, default=DEFAULT_PAYLOAD_ON): cv.string,
|
||||
vol.Optional(CONF_STATE_VALUE_TEMPLATE): cv.template,
|
||||
vol.Required(CONF_TARGET_HUMIDITY_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Required(CONF_TARGET_HUMIDITY_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_TARGET_HUMIDITY_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(
|
||||
CONF_TARGET_HUMIDITY_MAX, default=DEFAULT_MAX_HUMIDITY
|
||||
@@ -135,7 +134,7 @@ _PLATFORM_SCHEMA_BASE = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
CONF_TARGET_HUMIDITY_MIN, default=DEFAULT_MIN_HUMIDITY
|
||||
): cv.positive_int,
|
||||
vol.Optional(CONF_TARGET_HUMIDITY_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_TARGET_HUMIDITY_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_TARGET_HUMIDITY_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(
|
||||
CONF_PAYLOAD_RESET_HUMIDITY, default=DEFAULT_PAYLOAD_RESET
|
||||
): cv.string,
|
||||
@@ -173,7 +172,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT humidifier configured under the fan platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, humidifier.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
humidifier.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -184,14 +187,12 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT humidifier through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, humidifier.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, humidifier.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
) # setup for discovery
|
||||
)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
_async_setup_entity, hass, async_add_entities, config_entry=config_entry
|
||||
)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT lights."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -14,8 +13,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from ..mixins import (
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
@@ -97,7 +96,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT light through configuration.yaml (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, light.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
light.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -108,13 +111,8 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT lights configured under the light platform key (deprecated)."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, light.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(hass, light.DOMAIN, PLATFORM_SCHEMA_MODERN)
|
||||
)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
|
||||
@@ -42,8 +42,8 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
import homeassistant.util.color as color_util
|
||||
|
||||
from .. import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from ... import mqtt
|
||||
from .. import subscription
|
||||
from ..config import MQTT_RW_SCHEMA
|
||||
from ..const import (
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_ENCODING,
|
||||
@@ -55,6 +55,8 @@ from ..const import (
|
||||
)
|
||||
from ..debug_info import log_messages
|
||||
from ..mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity
|
||||
from ..models import MqttCommandTemplate, MqttValueTemplate
|
||||
from ..util import valid_publish_topic, valid_subscribe_topic
|
||||
from .schema import MQTT_LIGHT_SCHEMA_SCHEMA
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -156,28 +158,28 @@ VALUE_TEMPLATE_KEYS = [
|
||||
]
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = (
|
||||
mqtt.MQTT_RW_SCHEMA.extend(
|
||||
MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_BRIGHTNESS_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_BRIGHTNESS_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_BRIGHTNESS_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(
|
||||
CONF_BRIGHTNESS_SCALE, default=DEFAULT_BRIGHTNESS_SCALE
|
||||
): vol.All(vol.Coerce(int), vol.Range(min=1)),
|
||||
vol.Optional(CONF_BRIGHTNESS_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_BRIGHTNESS_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_BRIGHTNESS_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_COLOR_MODE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_COLOR_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_COLOR_MODE_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_COLOR_TEMP_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_COLOR_TEMP_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_COLOR_TEMP_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_COLOR_TEMP_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_COLOR_TEMP_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_COLOR_TEMP_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_EFFECT_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_EFFECT_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_EFFECT_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_EFFECT_LIST): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(CONF_EFFECT_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_EFFECT_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_EFFECT_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_HS_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_HS_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_HS_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_HS_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_HS_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_MAX_MIREDS): cv.positive_int,
|
||||
vol.Optional(CONF_MIN_MIREDS): cv.positive_int,
|
||||
@@ -189,30 +191,30 @@ _PLATFORM_SCHEMA_BASE = (
|
||||
vol.Optional(CONF_PAYLOAD_OFF, default=DEFAULT_PAYLOAD_OFF): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_ON, default=DEFAULT_PAYLOAD_ON): cv.string,
|
||||
vol.Optional(CONF_RGB_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_RGB_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_RGB_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_RGB_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_RGB_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_RGB_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_RGBW_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_RGBW_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_RGBW_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_RGBW_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_RGBW_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_RGBW_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_RGBWW_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_RGBWW_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_RGBWW_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_RGBWW_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_RGBWW_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_RGBWW_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_STATE_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_WHITE_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_WHITE_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_WHITE_SCALE, default=DEFAULT_WHITE_SCALE): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1)
|
||||
),
|
||||
vol.Optional(CONF_WHITE_VALUE_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_WHITE_VALUE_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(
|
||||
CONF_WHITE_VALUE_SCALE, default=DEFAULT_WHITE_VALUE_SCALE
|
||||
): vol.All(vol.Coerce(int), vol.Range(min=1)),
|
||||
vol.Optional(CONF_WHITE_VALUE_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_WHITE_VALUE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_WHITE_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_XY_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_XY_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_XY_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_XY_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_XY_VALUE_TEMPLATE): cv.template,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -51,7 +51,7 @@ from homeassistant.helpers.typing import ConfigType
|
||||
import homeassistant.util.color as color_util
|
||||
|
||||
from .. import subscription
|
||||
from ... import mqtt
|
||||
from ..config import DEFAULT_QOS, DEFAULT_RETAIN, MQTT_RW_SCHEMA
|
||||
from ..const import (
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_ENCODING,
|
||||
@@ -61,6 +61,7 @@ from ..const import (
|
||||
)
|
||||
from ..debug_info import log_messages
|
||||
from ..mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity
|
||||
from ..util import valid_subscribe_topic
|
||||
from .schema import MQTT_LIGHT_SCHEMA_SCHEMA
|
||||
from .schema_basic import CONF_BRIGHTNESS_SCALE, MQTT_LIGHT_ATTRIBUTES_BLOCKED
|
||||
|
||||
@@ -103,7 +104,7 @@ def valid_color_configuration(config):
|
||||
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = (
|
||||
mqtt.MQTT_RW_SCHEMA.extend(
|
||||
MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_BRIGHTNESS, default=DEFAULT_BRIGHTNESS): cv.boolean,
|
||||
vol.Optional(
|
||||
@@ -126,12 +127,12 @@ _PLATFORM_SCHEMA_BASE = (
|
||||
vol.Optional(CONF_MIN_MIREDS): cv.positive_int,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
|
||||
vol.Optional(CONF_QOS, default=mqtt.DEFAULT_QOS): vol.All(
|
||||
vol.Optional(CONF_QOS, default=DEFAULT_QOS): vol.All(
|
||||
vol.Coerce(int), vol.In([0, 1, 2])
|
||||
),
|
||||
vol.Optional(CONF_RETAIN, default=mqtt.DEFAULT_RETAIN): cv.boolean,
|
||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
vol.Optional(CONF_RGB, default=DEFAULT_RGB): cv.boolean,
|
||||
vol.Optional(CONF_STATE_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Inclusive(CONF_SUPPORTED_COLOR_MODES, "color_mode"): vol.All(
|
||||
cv.ensure_list,
|
||||
[vol.In(VALID_COLOR_MODES)],
|
||||
|
||||
@@ -31,8 +31,8 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
import homeassistant.util.color as color_util
|
||||
|
||||
from .. import MqttValueTemplate, subscription
|
||||
from ... import mqtt
|
||||
from .. import subscription
|
||||
from ..config import MQTT_RW_SCHEMA
|
||||
from ..const import (
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_ENCODING,
|
||||
@@ -43,6 +43,7 @@ from ..const import (
|
||||
)
|
||||
from ..debug_info import log_messages
|
||||
from ..mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity
|
||||
from ..models import MqttValueTemplate
|
||||
from .schema import MQTT_LIGHT_SCHEMA_SCHEMA
|
||||
from .schema_basic import MQTT_LIGHT_ATTRIBUTES_BLOCKED
|
||||
|
||||
@@ -67,7 +68,7 @@ CONF_RED_TEMPLATE = "red_template"
|
||||
CONF_WHITE_VALUE_TEMPLATE = "white_value_template"
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = (
|
||||
mqtt.MQTT_RW_SCHEMA.extend(
|
||||
MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_BLUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_BRIGHTNESS_TEMPLATE): cv.template,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT locks."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -15,8 +14,8 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RW_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_ENCODING,
|
||||
@@ -28,11 +27,12 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttValueTemplate
|
||||
|
||||
CONF_PAYLOAD_LOCK = "payload_lock"
|
||||
CONF_PAYLOAD_UNLOCK = "payload_unlock"
|
||||
@@ -56,7 +56,7 @@ MQTT_LOCK_ATTRIBUTES_BLOCKED = frozenset(
|
||||
}
|
||||
)
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
|
||||
@@ -87,7 +87,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT locks configured under the lock platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, lock.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
lock.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -98,13 +102,8 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT lock through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, lock.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(hass, lock.DOMAIN, PLATFORM_SCHEMA_MODERN)
|
||||
)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
import json
|
||||
import logging
|
||||
@@ -27,10 +28,11 @@ from homeassistant.const import (
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, callback
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
discovery,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import EVENT_DEVICE_REGISTRY_UPDATED
|
||||
@@ -46,17 +48,14 @@ from homeassistant.helpers.entity import (
|
||||
async_generate_entity_id,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.reload import async_setup_reload_service
|
||||
from homeassistant.helpers.reload import (
|
||||
async_integration_yaml_config,
|
||||
async_setup_reload_service,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import (
|
||||
DATA_MQTT,
|
||||
PLATFORMS,
|
||||
MqttValueTemplate,
|
||||
async_publish,
|
||||
debug_info,
|
||||
subscription,
|
||||
)
|
||||
from . import debug_info, subscription
|
||||
from .client import async_publish
|
||||
from .const import (
|
||||
ATTR_DISCOVERY_HASH,
|
||||
ATTR_DISCOVERY_PAYLOAD,
|
||||
@@ -65,6 +64,7 @@ from .const import (
|
||||
CONF_ENCODING,
|
||||
CONF_QOS,
|
||||
CONF_TOPIC,
|
||||
DATA_MQTT,
|
||||
DATA_MQTT_CONFIG,
|
||||
DATA_MQTT_RELOAD_NEEDED,
|
||||
DEFAULT_ENCODING,
|
||||
@@ -73,6 +73,7 @@ from .const import (
|
||||
DOMAIN,
|
||||
MQTT_CONNECTED,
|
||||
MQTT_DISCONNECTED,
|
||||
PLATFORMS,
|
||||
)
|
||||
from .debug_info import log_message, log_messages
|
||||
from .discovery import (
|
||||
@@ -82,7 +83,7 @@ from .discovery import (
|
||||
clear_discovery_hash,
|
||||
set_discovery_hash,
|
||||
)
|
||||
from .models import PublishPayloadType, ReceiveMessage
|
||||
from .models import MqttValueTemplate, PublishPayloadType, ReceiveMessage
|
||||
from .subscription import (
|
||||
async_prepare_subscribe_topics,
|
||||
async_subscribe_topics,
|
||||
@@ -264,8 +265,44 @@ class SetupEntity(Protocol):
|
||||
"""Define setup_entities type."""
|
||||
|
||||
|
||||
async def async_setup_platform_discovery(
|
||||
hass: HomeAssistant, platform_domain: str, schema: vol.Schema
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Set up platform discovery for manual config."""
|
||||
|
||||
async def _async_discover_entities(event: Event | None) -> None:
|
||||
"""Discover entities for a platform."""
|
||||
if event:
|
||||
# The platform has been reloaded
|
||||
config_yaml = await async_integration_yaml_config(hass, DOMAIN)
|
||||
if not config_yaml:
|
||||
return
|
||||
config_yaml = config_yaml.get(DOMAIN, {})
|
||||
else:
|
||||
config_yaml = hass.data.get(DATA_MQTT_CONFIG, {})
|
||||
if not config_yaml:
|
||||
return
|
||||
if platform_domain not in config_yaml:
|
||||
return
|
||||
await asyncio.gather(
|
||||
*(
|
||||
discovery.async_load_platform(hass, platform_domain, DOMAIN, config, {})
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, platform_domain, schema, config_yaml
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
unsub = hass.bus.async_listen("event_mqtt_reloaded", _async_discover_entities)
|
||||
await _async_discover_entities(None)
|
||||
return unsub
|
||||
|
||||
|
||||
async def async_get_platform_config_from_yaml(
|
||||
hass: HomeAssistant, domain: str, schema: vol.Schema
|
||||
hass: HomeAssistant,
|
||||
platform_domain: str,
|
||||
schema: vol.Schema,
|
||||
config_yaml: ConfigType = None,
|
||||
) -> list[ConfigType]:
|
||||
"""Return a list of validated configurations for the domain."""
|
||||
|
||||
@@ -279,12 +316,15 @@ async def async_get_platform_config_from_yaml(
|
||||
try:
|
||||
validated_config.append(schema(config_item))
|
||||
except vol.MultipleInvalid as err:
|
||||
async_log_exception(err, domain, config_item, hass)
|
||||
async_log_exception(err, platform_domain, config_item, hass)
|
||||
|
||||
return validated_config
|
||||
|
||||
config_yaml: ConfigType = hass.data.get(DATA_MQTT_CONFIG, {})
|
||||
if not (platform_configs := config_yaml.get(domain)):
|
||||
if config_yaml is None:
|
||||
config_yaml = hass.data.get(DATA_MQTT_CONFIG)
|
||||
if not config_yaml:
|
||||
return []
|
||||
if not (platform_configs := config_yaml.get(platform_domain)):
|
||||
return []
|
||||
return async_validate_config(hass, platform_configs)
|
||||
|
||||
@@ -314,7 +354,7 @@ async def async_setup_entry_helper(hass, domain, async_setup, schema):
|
||||
async def async_setup_platform_helper(
|
||||
hass: HomeAssistant,
|
||||
platform_domain: str,
|
||||
config: ConfigType,
|
||||
config: ConfigType | DiscoveryInfoType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
async_setup_entities: SetupEntity,
|
||||
) -> None:
|
||||
|
||||
@@ -1,12 +1,21 @@
|
||||
"""Models used by multiple MQTT modules."""
|
||||
from __future__ import annotations
|
||||
|
||||
from ast import literal_eval
|
||||
from collections.abc import Awaitable, Callable
|
||||
import datetime as dt
|
||||
from typing import Union
|
||||
from typing import Any, Union
|
||||
|
||||
import attr
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_ID, ATTR_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import template
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.typing import TemplateVarsType
|
||||
|
||||
_SENTINEL = object()
|
||||
|
||||
PublishPayloadType = Union[str, bytes, int, float, None]
|
||||
ReceivePayloadType = Union[str, bytes]
|
||||
|
||||
@@ -35,3 +44,118 @@ class ReceiveMessage:
|
||||
|
||||
AsyncMessageCallbackType = Callable[[ReceiveMessage], Awaitable[None]]
|
||||
MessageCallbackType = Callable[[ReceiveMessage], None]
|
||||
|
||||
|
||||
class MqttCommandTemplate:
|
||||
"""Class for rendering MQTT payload with command templates."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
command_template: template.Template | None,
|
||||
*,
|
||||
hass: HomeAssistant | None = None,
|
||||
entity: Entity | None = None,
|
||||
) -> None:
|
||||
"""Instantiate a command template."""
|
||||
self._attr_command_template = command_template
|
||||
if command_template is None:
|
||||
return
|
||||
|
||||
self._entity = entity
|
||||
|
||||
command_template.hass = hass
|
||||
|
||||
if entity:
|
||||
command_template.hass = entity.hass
|
||||
|
||||
@callback
|
||||
def async_render(
|
||||
self,
|
||||
value: PublishPayloadType = None,
|
||||
variables: TemplateVarsType = None,
|
||||
) -> PublishPayloadType:
|
||||
"""Render or convert the command template with given value or variables."""
|
||||
|
||||
def _convert_outgoing_payload(
|
||||
payload: PublishPayloadType,
|
||||
) -> PublishPayloadType:
|
||||
"""Ensure correct raw MQTT payload is passed as bytes for publishing."""
|
||||
if isinstance(payload, str):
|
||||
try:
|
||||
native_object = literal_eval(payload)
|
||||
if isinstance(native_object, bytes):
|
||||
return native_object
|
||||
|
||||
except (ValueError, TypeError, SyntaxError, MemoryError):
|
||||
pass
|
||||
|
||||
return payload
|
||||
|
||||
if self._attr_command_template is None:
|
||||
return value
|
||||
|
||||
values = {"value": value}
|
||||
if self._entity:
|
||||
values[ATTR_ENTITY_ID] = self._entity.entity_id
|
||||
values[ATTR_NAME] = self._entity.name
|
||||
if variables is not None:
|
||||
values.update(variables)
|
||||
return _convert_outgoing_payload(
|
||||
self._attr_command_template.async_render(values, parse_result=False)
|
||||
)
|
||||
|
||||
|
||||
class MqttValueTemplate:
|
||||
"""Class for rendering MQTT value template with possible json values."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
value_template: template.Template | None,
|
||||
*,
|
||||
hass: HomeAssistant | None = None,
|
||||
entity: Entity | None = None,
|
||||
config_attributes: TemplateVarsType = None,
|
||||
) -> None:
|
||||
"""Instantiate a value template."""
|
||||
self._value_template = value_template
|
||||
self._config_attributes = config_attributes
|
||||
if value_template is None:
|
||||
return
|
||||
|
||||
value_template.hass = hass
|
||||
self._entity = entity
|
||||
|
||||
if entity:
|
||||
value_template.hass = entity.hass
|
||||
|
||||
@callback
|
||||
def async_render_with_possible_json_value(
|
||||
self,
|
||||
payload: ReceivePayloadType,
|
||||
default: ReceivePayloadType | object = _SENTINEL,
|
||||
variables: TemplateVarsType = None,
|
||||
) -> ReceivePayloadType:
|
||||
"""Render with possible json value or pass-though a received MQTT value."""
|
||||
if self._value_template is None:
|
||||
return payload
|
||||
|
||||
values: dict[str, Any] = {}
|
||||
|
||||
if variables is not None:
|
||||
values.update(variables)
|
||||
|
||||
if self._config_attributes is not None:
|
||||
values.update(self._config_attributes)
|
||||
|
||||
if self._entity:
|
||||
values[ATTR_ENTITY_ID] = self._entity.entity_id
|
||||
values[ATTR_NAME] = self._entity.name
|
||||
|
||||
if default == _SENTINEL:
|
||||
return self._value_template.async_render_with_possible_json_value(
|
||||
payload, variables=values
|
||||
)
|
||||
|
||||
return self._value_template.async_render_with_possible_json_value(
|
||||
payload, default, variables=values
|
||||
)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Configure number in a device through MQTT topic."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
import logging
|
||||
|
||||
@@ -27,8 +26,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RW_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TEMPLATE,
|
||||
CONF_COMMAND_TOPIC,
|
||||
@@ -41,11 +40,12 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate, MqttValueTemplate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -75,7 +75,7 @@ def validate_config(config):
|
||||
return config
|
||||
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
_PLATFORM_SCHEMA_BASE = MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_MAX, default=DEFAULT_MAX_VALUE): vol.Coerce(float),
|
||||
@@ -118,7 +118,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT number configured under the number platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, number.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
number.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -129,12 +133,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT number through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, number.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, number.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT scenes."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -15,25 +14,27 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .. import mqtt
|
||||
from .client import async_publish
|
||||
from .config import MQTT_BASE_SCHEMA
|
||||
from .const import CONF_COMMAND_TOPIC, CONF_ENCODING, CONF_QOS, CONF_RETAIN
|
||||
from .mixins import (
|
||||
CONF_ENABLED_BY_DEFAULT,
|
||||
CONF_OBJECT_ID,
|
||||
MQTT_AVAILABILITY_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .util import valid_publish_topic
|
||||
|
||||
DEFAULT_NAME = "MQTT Scene"
|
||||
DEFAULT_RETAIN = False
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Required(CONF_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_ON): cv.string,
|
||||
@@ -63,7 +64,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT scene configured under the scene platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, scene.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
scene.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -74,13 +79,8 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT scene through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, scene.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(hass, scene.DOMAIN, PLATFORM_SCHEMA_MODERN)
|
||||
)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
@@ -128,7 +128,7 @@ class MqttScene(
|
||||
|
||||
This method is a coroutine.
|
||||
"""
|
||||
await mqtt.async_publish(
|
||||
await async_publish(
|
||||
self.hass,
|
||||
self._config[CONF_COMMAND_TOPIC],
|
||||
self._config[CONF_PAYLOAD_ON],
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Configure select in a device through MQTT topic."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
import logging
|
||||
|
||||
@@ -17,8 +16,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RW_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TEMPLATE,
|
||||
CONF_COMMAND_TOPIC,
|
||||
@@ -31,11 +30,12 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate, MqttValueTemplate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -51,7 +51,7 @@ MQTT_SELECT_ATTRIBUTES_BLOCKED = frozenset(
|
||||
)
|
||||
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
@@ -79,7 +79,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT select configured under the select platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, select.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
select.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -90,12 +94,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT select through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, select.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, select.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import functools
|
||||
import logging
|
||||
@@ -34,19 +33,21 @@ from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RO_SCHEMA
|
||||
from .const import CONF_ENCODING, CONF_QOS, CONF_STATE_TOPIC
|
||||
from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttAvailability,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttValueTemplate
|
||||
from .util import valid_subscribe_topic
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -89,12 +90,12 @@ def validate_options(conf):
|
||||
return conf
|
||||
|
||||
|
||||
_PLATFORM_SCHEMA_BASE = mqtt.MQTT_RO_SCHEMA.extend(
|
||||
_PLATFORM_SCHEMA_BASE = MQTT_RO_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_EXPIRE_AFTER): cv.positive_int,
|
||||
vol.Optional(CONF_FORCE_UPDATE, default=DEFAULT_FORCE_UPDATE): cv.boolean,
|
||||
vol.Optional(CONF_LAST_RESET_TOPIC): mqtt.valid_subscribe_topic,
|
||||
vol.Optional(CONF_LAST_RESET_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_LAST_RESET_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_STATE_CLASS): STATE_CLASSES_SCHEMA,
|
||||
@@ -131,7 +132,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT sensors configured under the fan platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, sensor.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
sensor.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -142,12 +147,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT sensor through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, sensor.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, sensor.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT sirens."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import copy
|
||||
import functools
|
||||
import json
|
||||
@@ -35,8 +34,8 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttCommandTemplate, MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RW_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TEMPLATE,
|
||||
CONF_COMMAND_TOPIC,
|
||||
@@ -52,11 +51,12 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttCommandTemplate, MqttValueTemplate
|
||||
|
||||
DEFAULT_NAME = "MQTT Siren"
|
||||
DEFAULT_PAYLOAD_ON = "ON"
|
||||
@@ -74,7 +74,7 @@ CONF_SUPPORT_VOLUME_SET = "support_volume_set"
|
||||
|
||||
STATE = "state"
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_AVAILABLE_TONES): cv.ensure_list,
|
||||
vol.Optional(CONF_COMMAND_TEMPLATE): cv.template,
|
||||
@@ -128,7 +128,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT sirens configured under the fan platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, siren.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
siren.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -139,13 +143,8 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT siren through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, siren.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(hass, siren.DOMAIN, PLATFORM_SCHEMA_MODERN)
|
||||
)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT switches."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -24,8 +23,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_RW_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_ENCODING,
|
||||
@@ -38,11 +37,12 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
)
|
||||
from .models import MqttValueTemplate
|
||||
|
||||
DEFAULT_NAME = "MQTT Switch"
|
||||
DEFAULT_PAYLOAD_ON = "ON"
|
||||
@@ -51,7 +51,7 @@ DEFAULT_OPTIMISTIC = False
|
||||
CONF_STATE_ON = "state_on"
|
||||
CONF_STATE_OFF = "state_off"
|
||||
|
||||
PLATFORM_SCHEMA_MODERN = mqtt.MQTT_RW_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA_MODERN = MQTT_RW_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
|
||||
@@ -82,7 +82,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT switch configured under the fan platform key (deprecated)."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, switch.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
switch.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -93,12 +97,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT switch through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, switch.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, switch.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
@@ -11,8 +11,8 @@ from homeassistant.core import HomeAssistant, callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import MqttValueTemplate, subscription
|
||||
from .. import mqtt
|
||||
from . import subscription
|
||||
from .config import MQTT_BASE_SCHEMA
|
||||
from .const import ATTR_DISCOVERY_HASH, CONF_QOS, CONF_TOPIC
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_DEVICE_INFO_SCHEMA,
|
||||
@@ -21,7 +21,7 @@ from .mixins import (
|
||||
send_discovery_done,
|
||||
update_device,
|
||||
)
|
||||
from .models import ReceiveMessage
|
||||
from .models import MqttValueTemplate, ReceiveMessage
|
||||
from .subscription import EntitySubscription
|
||||
from .util import valid_subscribe_topic
|
||||
|
||||
@@ -30,7 +30,7 @@ LOG_NAME = "Tag"
|
||||
TAG = "tag"
|
||||
TAGS = "mqtt_tags"
|
||||
|
||||
PLATFORM_SCHEMA = mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend(
|
||||
PLATFORM_SCHEMA = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_DEVICE): MQTT_ENTITY_DEVICE_INFO_SCHEMA,
|
||||
vol.Optional(CONF_PLATFORM): "mqtt",
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for MQTT vacuums."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -13,8 +12,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from ..mixins import (
|
||||
async_get_platform_config_from_yaml,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_discovery,
|
||||
async_setup_platform_helper,
|
||||
)
|
||||
from .schema import CONF_SCHEMA, LEGACY, MQTT_VACUUM_SCHEMA, STATE
|
||||
@@ -77,7 +76,11 @@ async def async_setup_platform(
|
||||
"""Set up MQTT vacuum through configuration.yaml."""
|
||||
# Deprecated in HA Core 2022.6
|
||||
await async_setup_platform_helper(
|
||||
hass, vacuum.DOMAIN, config, async_add_entities, _async_setup_entity
|
||||
hass,
|
||||
vacuum.DOMAIN,
|
||||
discovery_info or config,
|
||||
async_add_entities,
|
||||
_async_setup_entity,
|
||||
)
|
||||
|
||||
|
||||
@@ -88,12 +91,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up MQTT vacuum through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_async_setup_entity(hass, async_add_entities, config, config_entry)
|
||||
for config in await async_get_platform_config_from_yaml(
|
||||
hass, vacuum.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
await async_setup_platform_discovery(
|
||||
hass, vacuum.DOMAIN, PLATFORM_SCHEMA_MODERN
|
||||
)
|
||||
)
|
||||
# setup for discovery
|
||||
|
||||
@@ -15,11 +15,13 @@ from homeassistant.core import callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.icon import icon_for_battery_level
|
||||
|
||||
from .. import MqttValueTemplate, subscription
|
||||
from ... import mqtt
|
||||
from .. import subscription
|
||||
from ..config import MQTT_BASE_SCHEMA
|
||||
from ..const import CONF_COMMAND_TOPIC, CONF_ENCODING, CONF_QOS, CONF_RETAIN
|
||||
from ..debug_info import log_messages
|
||||
from ..mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity, warn_for_legacy_schema
|
||||
from ..models import MqttValueTemplate
|
||||
from ..util import valid_publish_topic
|
||||
from .const import MQTT_VACUUM_ATTRIBUTES_BLOCKED
|
||||
from .schema import MQTT_VACUUM_SCHEMA, services_to_strings, strings_to_services
|
||||
|
||||
@@ -96,25 +98,23 @@ MQTT_LEGACY_VACUUM_ATTRIBUTES_BLOCKED = MQTT_VACUUM_ATTRIBUTES_BLOCKED | frozens
|
||||
)
|
||||
|
||||
PLATFORM_SCHEMA_LEGACY_MODERN = (
|
||||
mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Inclusive(CONF_BATTERY_LEVEL_TEMPLATE, "battery"): cv.template,
|
||||
vol.Inclusive(
|
||||
CONF_BATTERY_LEVEL_TOPIC, "battery"
|
||||
): mqtt.valid_publish_topic,
|
||||
vol.Inclusive(CONF_BATTERY_LEVEL_TOPIC, "battery"): valid_publish_topic,
|
||||
vol.Inclusive(CONF_CHARGING_TEMPLATE, "charging"): cv.template,
|
||||
vol.Inclusive(CONF_CHARGING_TOPIC, "charging"): mqtt.valid_publish_topic,
|
||||
vol.Inclusive(CONF_CHARGING_TOPIC, "charging"): valid_publish_topic,
|
||||
vol.Inclusive(CONF_CLEANING_TEMPLATE, "cleaning"): cv.template,
|
||||
vol.Inclusive(CONF_CLEANING_TOPIC, "cleaning"): mqtt.valid_publish_topic,
|
||||
vol.Inclusive(CONF_CLEANING_TOPIC, "cleaning"): valid_publish_topic,
|
||||
vol.Inclusive(CONF_DOCKED_TEMPLATE, "docked"): cv.template,
|
||||
vol.Inclusive(CONF_DOCKED_TOPIC, "docked"): mqtt.valid_publish_topic,
|
||||
vol.Inclusive(CONF_DOCKED_TOPIC, "docked"): valid_publish_topic,
|
||||
vol.Inclusive(CONF_ERROR_TEMPLATE, "error"): cv.template,
|
||||
vol.Inclusive(CONF_ERROR_TOPIC, "error"): mqtt.valid_publish_topic,
|
||||
vol.Inclusive(CONF_ERROR_TOPIC, "error"): valid_publish_topic,
|
||||
vol.Optional(CONF_FAN_SPEED_LIST, default=[]): vol.All(
|
||||
cv.ensure_list, [cv.string]
|
||||
),
|
||||
vol.Inclusive(CONF_FAN_SPEED_TEMPLATE, "fan_speed"): cv.template,
|
||||
vol.Inclusive(CONF_FAN_SPEED_TOPIC, "fan_speed"): mqtt.valid_publish_topic,
|
||||
vol.Inclusive(CONF_FAN_SPEED_TOPIC, "fan_speed"): valid_publish_topic,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(
|
||||
CONF_PAYLOAD_CLEAN_SPOT, default=DEFAULT_PAYLOAD_CLEAN_SPOT
|
||||
@@ -135,12 +135,12 @@ PLATFORM_SCHEMA_LEGACY_MODERN = (
|
||||
vol.Optional(
|
||||
CONF_PAYLOAD_TURN_ON, default=DEFAULT_PAYLOAD_TURN_ON
|
||||
): cv.string,
|
||||
vol.Optional(CONF_SEND_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_SET_FAN_SPEED_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_SEND_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_SET_FAN_SPEED_TOPIC): valid_publish_topic,
|
||||
vol.Optional(
|
||||
CONF_SUPPORTED_FEATURES, default=DEFAULT_SERVICE_STRINGS
|
||||
): vol.All(cv.ensure_list, [vol.In(STRING_TO_SERVICE.keys())]),
|
||||
vol.Optional(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -23,7 +23,7 @@ from homeassistant.core import callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from .. import subscription
|
||||
from ... import mqtt
|
||||
from ..config import MQTT_BASE_SCHEMA
|
||||
from ..const import (
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_ENCODING,
|
||||
@@ -33,6 +33,7 @@ from ..const import (
|
||||
)
|
||||
from ..debug_info import log_messages
|
||||
from ..mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity, warn_for_legacy_schema
|
||||
from ..util import valid_publish_topic
|
||||
from .const import MQTT_VACUUM_ATTRIBUTES_BLOCKED
|
||||
from .schema import MQTT_VACUUM_SCHEMA, services_to_strings, strings_to_services
|
||||
|
||||
@@ -105,7 +106,7 @@ DEFAULT_PAYLOAD_START = "start"
|
||||
DEFAULT_PAYLOAD_PAUSE = "pause"
|
||||
|
||||
PLATFORM_SCHEMA_STATE_MODERN = (
|
||||
mqtt.MQTT_BASE_SCHEMA.extend(
|
||||
MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_FAN_SPEED_LIST, default=[]): vol.All(
|
||||
cv.ensure_list, [cv.string]
|
||||
@@ -123,13 +124,13 @@ PLATFORM_SCHEMA_STATE_MODERN = (
|
||||
vol.Optional(CONF_PAYLOAD_START, default=DEFAULT_PAYLOAD_START): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_PAUSE, default=DEFAULT_PAYLOAD_PAUSE): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_STOP, default=DEFAULT_PAYLOAD_STOP): cv.string,
|
||||
vol.Optional(CONF_SEND_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_SET_FAN_SPEED_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_STATE_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_SEND_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_SET_FAN_SPEED_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_STATE_TOPIC): valid_publish_topic,
|
||||
vol.Optional(
|
||||
CONF_SUPPORTED_FEATURES, default=DEFAULT_SERVICE_STRINGS
|
||||
): vol.All(cv.ensure_list, [vol.In(STRING_TO_SERVICE.keys())]),
|
||||
vol.Optional(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
|
||||
vol.Optional(CONF_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
}
|
||||
)
|
||||
@@ -178,7 +179,7 @@ class MqttStateVacuum(MqttEntity, StateVacuumEntity):
|
||||
supported_feature_strings, STRING_TO_SERVICE
|
||||
)
|
||||
self._fan_speed_list = config[CONF_FAN_SPEED_LIST]
|
||||
self._command_topic = config.get(mqtt.CONF_COMMAND_TOPIC)
|
||||
self._command_topic = config.get(CONF_COMMAND_TOPIC)
|
||||
self._set_fan_speed_topic = config.get(CONF_SET_FAN_SPEED_TOPIC)
|
||||
self._send_command_topic = config.get(CONF_SEND_COMMAND_TOPIC)
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ GPS_JSON_PAYLOAD_SCHEMA = vol.Schema(
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend(mqtt.SCHEMA_BASE).extend(
|
||||
PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend(mqtt.config.SCHEMA_BASE).extend(
|
||||
{vol.Required(CONF_DEVICES): {cv.string: mqtt.valid_subscribe_topic}}
|
||||
)
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
vol.Optional(CONF_AWAY_TIMEOUT, default=DEFAULT_AWAY_TIMEOUT): cv.positive_int,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
}
|
||||
).extend(mqtt.MQTT_RO_PLATFORM_SCHEMA.schema)
|
||||
).extend(mqtt.config.MQTT_RO_SCHEMA.schema)
|
||||
|
||||
MQTT_PAYLOAD = vol.Schema(
|
||||
vol.All(
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "netgear",
|
||||
"name": "NETGEAR",
|
||||
"documentation": "https://www.home-assistant.io/integrations/netgear",
|
||||
"requirements": ["pynetgear==0.10.0"],
|
||||
"requirements": ["pynetgear==0.10.4"],
|
||||
"codeowners": ["@hacf-fr", "@Quentame", "@starkillerOG"],
|
||||
"iot_class": "local_polling",
|
||||
"config_flow": true,
|
||||
|
||||
@@ -139,8 +139,11 @@ async def async_setup_entry(
|
||||
entry, coordinator, controller, description
|
||||
)
|
||||
for description in BINARY_SENSOR_DESCRIPTIONS
|
||||
if (coordinator := coordinators[description.api_category]) is not None
|
||||
and key_exists(coordinator.data, description.data_key)
|
||||
if (
|
||||
(coordinator := coordinators[description.api_category]) is not None
|
||||
and coordinator.data
|
||||
and key_exists(coordinator.data, description.data_key)
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "RainMachine",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rainmachine",
|
||||
"requirements": ["regenmaschine==2022.05.1"],
|
||||
"requirements": ["regenmaschine==2022.06.0"],
|
||||
"codeowners": ["@bachya"],
|
||||
"iot_class": "local_polling",
|
||||
"homekit": {
|
||||
|
||||
@@ -133,8 +133,11 @@ async def async_setup_entry(
|
||||
entry, coordinator, controller, description
|
||||
)
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
if (coordinator := coordinators[description.api_category]) is not None
|
||||
and key_exists(coordinator.data, description.data_key)
|
||||
if (
|
||||
(coordinator := coordinators[description.api_category]) is not None
|
||||
and coordinator.data
|
||||
and key_exists(coordinator.data, description.data_key)
|
||||
)
|
||||
]
|
||||
|
||||
zone_coordinator = coordinators[DATA_ZONES]
|
||||
|
||||
@@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable, Iterable
|
||||
from concurrent.futures import CancelledError
|
||||
import contextlib
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
@@ -518,9 +519,16 @@ class Recorder(threading.Thread):
|
||||
|
||||
def _wait_startup_or_shutdown(self) -> object | None:
|
||||
"""Wait for startup or shutdown before starting."""
|
||||
return asyncio.run_coroutine_threadsafe(
|
||||
self._async_wait_for_started(), self.hass.loop
|
||||
).result()
|
||||
try:
|
||||
return asyncio.run_coroutine_threadsafe(
|
||||
self._async_wait_for_started(), self.hass.loop
|
||||
).result()
|
||||
except CancelledError as ex:
|
||||
_LOGGER.warning(
|
||||
"Recorder startup was externally canceled before it could complete: %s",
|
||||
ex,
|
||||
)
|
||||
return SHUTDOWN_TASK
|
||||
|
||||
def run(self) -> None:
|
||||
"""Start processing events to save."""
|
||||
|
||||
@@ -5,7 +5,7 @@ from collections.abc import Callable, Iterable
|
||||
import json
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import JSON, Column, Text, cast, not_, or_
|
||||
from sqlalchemy import Column, Text, cast, not_, or_
|
||||
from sqlalchemy.sql.elements import ClauseList
|
||||
|
||||
from homeassistant.const import CONF_DOMAINS, CONF_ENTITIES, CONF_EXCLUDE, CONF_INCLUDE
|
||||
@@ -16,6 +16,7 @@ from .models import ENTITY_ID_IN_EVENT, OLD_ENTITY_ID_IN_EVENT, States
|
||||
|
||||
DOMAIN = "history"
|
||||
HISTORY_FILTERS = "history_filters"
|
||||
JSON_NULL = json.dumps(None)
|
||||
|
||||
GLOB_TO_SQL_CHARS = {
|
||||
ord("*"): "%",
|
||||
@@ -36,7 +37,7 @@ def extract_include_exclude_filter_conf(conf: ConfigType) -> dict[str, Any]:
|
||||
"""
|
||||
return {
|
||||
filter_type: {
|
||||
matcher: set(conf.get(filter_type, {}).get(matcher, []))
|
||||
matcher: set(conf.get(filter_type, {}).get(matcher) or [])
|
||||
for matcher in FITLER_MATCHERS
|
||||
}
|
||||
for filter_type in FILTER_TYPES
|
||||
@@ -88,14 +89,32 @@ class Filters:
|
||||
self.included_domains: Iterable[str] = []
|
||||
self.included_entity_globs: Iterable[str] = []
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return human readable excludes/includes."""
|
||||
return (
|
||||
f"<Filters excluded_entities={self.excluded_entities} excluded_domains={self.excluded_domains} "
|
||||
f"excluded_entity_globs={self.excluded_entity_globs} "
|
||||
f"included_entities={self.included_entities} included_domains={self.included_domains} "
|
||||
f"included_entity_globs={self.included_entity_globs}>"
|
||||
)
|
||||
|
||||
@property
|
||||
def has_config(self) -> bool:
|
||||
"""Determine if there is any filter configuration."""
|
||||
return bool(self._have_exclude or self._have_include)
|
||||
|
||||
@property
|
||||
def _have_exclude(self) -> bool:
|
||||
return bool(
|
||||
self.excluded_entities
|
||||
or self.excluded_domains
|
||||
or self.excluded_entity_globs
|
||||
or self.included_entities
|
||||
)
|
||||
|
||||
@property
|
||||
def _have_include(self) -> bool:
|
||||
return bool(
|
||||
self.included_entities
|
||||
or self.included_domains
|
||||
or self.included_entity_globs
|
||||
)
|
||||
@@ -103,36 +122,67 @@ class Filters:
|
||||
def _generate_filter_for_columns(
|
||||
self, columns: Iterable[Column], encoder: Callable[[Any], Any]
|
||||
) -> ClauseList:
|
||||
includes = []
|
||||
if self.included_domains:
|
||||
includes.append(_domain_matcher(self.included_domains, columns, encoder))
|
||||
if self.included_entities:
|
||||
includes.append(_entity_matcher(self.included_entities, columns, encoder))
|
||||
if self.included_entity_globs:
|
||||
includes.append(
|
||||
_globs_to_like(self.included_entity_globs, columns, encoder)
|
||||
)
|
||||
"""Generate a filter from pre-comuted sets and pattern lists.
|
||||
|
||||
excludes = []
|
||||
if self.excluded_domains:
|
||||
excludes.append(_domain_matcher(self.excluded_domains, columns, encoder))
|
||||
if self.excluded_entities:
|
||||
excludes.append(_entity_matcher(self.excluded_entities, columns, encoder))
|
||||
if self.excluded_entity_globs:
|
||||
excludes.append(
|
||||
_globs_to_like(self.excluded_entity_globs, columns, encoder)
|
||||
)
|
||||
This must match exactly how homeassistant.helpers.entityfilter works.
|
||||
"""
|
||||
i_domains = _domain_matcher(self.included_domains, columns, encoder)
|
||||
i_entities = _entity_matcher(self.included_entities, columns, encoder)
|
||||
i_entity_globs = _globs_to_like(self.included_entity_globs, columns, encoder)
|
||||
includes = [i_domains, i_entities, i_entity_globs]
|
||||
|
||||
if not includes and not excludes:
|
||||
e_domains = _domain_matcher(self.excluded_domains, columns, encoder)
|
||||
e_entities = _entity_matcher(self.excluded_entities, columns, encoder)
|
||||
e_entity_globs = _globs_to_like(self.excluded_entity_globs, columns, encoder)
|
||||
excludes = [e_domains, e_entities, e_entity_globs]
|
||||
|
||||
have_exclude = self._have_exclude
|
||||
have_include = self._have_include
|
||||
|
||||
# Case 1 - no includes or excludes - pass all entities
|
||||
if not have_include and not have_exclude:
|
||||
return None
|
||||
|
||||
if includes and not excludes:
|
||||
# Case 2 - includes, no excludes - only include specified entities
|
||||
if have_include and not have_exclude:
|
||||
return or_(*includes).self_group()
|
||||
|
||||
if not includes and excludes:
|
||||
# Case 3 - excludes, no includes - only exclude specified entities
|
||||
if not have_include and have_exclude:
|
||||
return not_(or_(*excludes).self_group())
|
||||
|
||||
return or_(*includes).self_group() & not_(or_(*excludes).self_group())
|
||||
# Case 4 - both includes and excludes specified
|
||||
# Case 4a - include domain or glob specified
|
||||
# - if domain is included, pass if entity not excluded
|
||||
# - if glob is included, pass if entity and domain not excluded
|
||||
# - if domain and glob are not included, pass if entity is included
|
||||
# note: if both include domain matches then exclude domains ignored.
|
||||
# If glob matches then exclude domains and glob checked
|
||||
if self.included_domains or self.included_entity_globs:
|
||||
return or_(
|
||||
(i_domains & ~(e_entities | e_entity_globs)),
|
||||
(
|
||||
~i_domains
|
||||
& or_(
|
||||
(i_entity_globs & ~(or_(*excludes))),
|
||||
(~i_entity_globs & i_entities),
|
||||
)
|
||||
),
|
||||
).self_group()
|
||||
|
||||
# Case 4b - exclude domain or glob specified, include has no domain or glob
|
||||
# In this one case the traditional include logic is inverted. Even though an
|
||||
# include is specified since its only a list of entity IDs its used only to
|
||||
# expose specific entities excluded by domain or glob. Any entities not
|
||||
# excluded are then presumed included. Logic is as follows
|
||||
# - if domain or glob is excluded, pass if entity is included
|
||||
# - if domain is not excluded, pass if entity not excluded by ID
|
||||
if self.excluded_domains or self.excluded_entity_globs:
|
||||
return (not_(or_(*excludes)) | i_entities).self_group()
|
||||
|
||||
# Case 4c - neither include or exclude domain specified
|
||||
# - Only pass if entity is included. Ignore entity excludes.
|
||||
return i_entities
|
||||
|
||||
def states_entity_filter(self) -> ClauseList:
|
||||
"""Generate the entity filter query."""
|
||||
@@ -147,7 +197,17 @@ class Filters:
|
||||
"""Generate the entity filter query."""
|
||||
_encoder = json.dumps
|
||||
return or_(
|
||||
(ENTITY_ID_IN_EVENT == JSON.NULL) & (OLD_ENTITY_ID_IN_EVENT == JSON.NULL),
|
||||
# sqlalchemy's SQLite json implementation always
|
||||
# wraps everything with JSON_QUOTE so it resolves to 'null'
|
||||
# when its empty
|
||||
#
|
||||
# For MySQL and PostgreSQL it will resolve to a literal
|
||||
# NULL when its empty
|
||||
#
|
||||
((ENTITY_ID_IN_EVENT == JSON_NULL) | ENTITY_ID_IN_EVENT.is_(None))
|
||||
& (
|
||||
(OLD_ENTITY_ID_IN_EVENT == JSON_NULL) | OLD_ENTITY_ID_IN_EVENT.is_(None)
|
||||
),
|
||||
self._generate_filter_for_columns(
|
||||
(ENTITY_ID_IN_EVENT, OLD_ENTITY_ID_IN_EVENT), _encoder
|
||||
).self_group(),
|
||||
@@ -158,29 +218,43 @@ def _globs_to_like(
|
||||
glob_strs: Iterable[str], columns: Iterable[Column], encoder: Callable[[Any], Any]
|
||||
) -> ClauseList:
|
||||
"""Translate glob to sql."""
|
||||
return or_(
|
||||
cast(column, Text()).like(
|
||||
encoder(glob_str).translate(GLOB_TO_SQL_CHARS), escape="\\"
|
||||
matchers = [
|
||||
(
|
||||
column.is_not(None)
|
||||
& cast(column, Text()).like(
|
||||
encoder(glob_str).translate(GLOB_TO_SQL_CHARS), escape="\\"
|
||||
)
|
||||
)
|
||||
for glob_str in glob_strs
|
||||
for column in columns
|
||||
)
|
||||
]
|
||||
return or_(*matchers) if matchers else or_(False)
|
||||
|
||||
|
||||
def _entity_matcher(
|
||||
entity_ids: Iterable[str], columns: Iterable[Column], encoder: Callable[[Any], Any]
|
||||
) -> ClauseList:
|
||||
return or_(
|
||||
cast(column, Text()).in_([encoder(entity_id) for entity_id in entity_ids])
|
||||
matchers = [
|
||||
(
|
||||
column.is_not(None)
|
||||
& cast(column, Text()).in_([encoder(entity_id) for entity_id in entity_ids])
|
||||
)
|
||||
for column in columns
|
||||
)
|
||||
]
|
||||
return or_(*matchers) if matchers else or_(False)
|
||||
|
||||
|
||||
def _domain_matcher(
|
||||
domains: Iterable[str], columns: Iterable[Column], encoder: Callable[[Any], Any]
|
||||
) -> ClauseList:
|
||||
return or_(
|
||||
cast(column, Text()).like(encoder(f"{domain}.%"))
|
||||
for domain in domains
|
||||
matchers = [
|
||||
(column.is_not(None) & cast(column, Text()).like(encoder(domain_matcher)))
|
||||
for domain_matcher in like_domain_matchers(domains)
|
||||
for column in columns
|
||||
)
|
||||
]
|
||||
return or_(*matchers) if matchers else or_(False)
|
||||
|
||||
|
||||
def like_domain_matchers(domains: Iterable[str]) -> list[str]:
|
||||
"""Convert a list of domains to sql LIKE matchers."""
|
||||
return [f"{domain}.%" for domain in domains]
|
||||
|
||||
@@ -15,6 +15,7 @@ from sqlalchemy.orm.query import Query
|
||||
from sqlalchemy.orm.session import Session
|
||||
from sqlalchemy.sql.expression import literal
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.sql.selectable import Subquery
|
||||
|
||||
from homeassistant.components import recorder
|
||||
from homeassistant.components.websocket_api.const import (
|
||||
@@ -237,7 +238,9 @@ def _significant_states_stmt(
|
||||
stmt += _ignore_domains_filter
|
||||
if filters and filters.has_config:
|
||||
entity_filter = filters.states_entity_filter()
|
||||
stmt += lambda q: q.filter(entity_filter)
|
||||
stmt = stmt.add_criteria(
|
||||
lambda q: q.filter(entity_filter), track_on=[filters]
|
||||
)
|
||||
|
||||
stmt += lambda q: q.filter(States.last_updated > start_time)
|
||||
if end_time:
|
||||
@@ -349,7 +352,8 @@ def _state_changed_during_period_stmt(
|
||||
)
|
||||
if end_time:
|
||||
stmt += lambda q: q.filter(States.last_updated < end_time)
|
||||
stmt += lambda q: q.filter(States.entity_id == entity_id)
|
||||
if entity_id:
|
||||
stmt += lambda q: q.filter(States.entity_id == entity_id)
|
||||
if join_attributes:
|
||||
stmt += lambda q: q.outerjoin(
|
||||
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
||||
@@ -375,6 +379,7 @@ def state_changes_during_period(
|
||||
) -> MutableMapping[str, list[State]]:
|
||||
"""Return states changes during UTC period start_time - end_time."""
|
||||
entity_id = entity_id.lower() if entity_id is not None else None
|
||||
entity_ids = [entity_id] if entity_id is not None else None
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
stmt = _state_changed_during_period_stmt(
|
||||
@@ -389,8 +394,6 @@ def state_changes_during_period(
|
||||
states = execute_stmt_lambda_element(
|
||||
session, stmt, None if entity_id else start_time, end_time
|
||||
)
|
||||
entity_ids = [entity_id] if entity_id is not None else None
|
||||
|
||||
return cast(
|
||||
MutableMapping[str, list[State]],
|
||||
_sorted_states_to_dict(
|
||||
@@ -405,14 +408,16 @@ def state_changes_during_period(
|
||||
|
||||
|
||||
def _get_last_state_changes_stmt(
|
||||
schema_version: int, number_of_states: int, entity_id: str
|
||||
schema_version: int, number_of_states: int, entity_id: str | None
|
||||
) -> StatementLambdaElement:
|
||||
stmt, join_attributes = lambda_stmt_and_join_attributes(
|
||||
schema_version, False, include_last_changed=False
|
||||
)
|
||||
stmt += lambda q: q.filter(
|
||||
(States.last_changed == States.last_updated) | States.last_changed.is_(None)
|
||||
).filter(States.entity_id == entity_id)
|
||||
)
|
||||
if entity_id:
|
||||
stmt += lambda q: q.filter(States.entity_id == entity_id)
|
||||
if join_attributes:
|
||||
stmt += lambda q: q.outerjoin(
|
||||
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
||||
@@ -424,19 +429,18 @@ def _get_last_state_changes_stmt(
|
||||
|
||||
|
||||
def get_last_state_changes(
|
||||
hass: HomeAssistant, number_of_states: int, entity_id: str
|
||||
hass: HomeAssistant, number_of_states: int, entity_id: str | None
|
||||
) -> MutableMapping[str, list[State]]:
|
||||
"""Return the last number_of_states."""
|
||||
start_time = dt_util.utcnow()
|
||||
entity_id = entity_id.lower() if entity_id is not None else None
|
||||
entity_ids = [entity_id] if entity_id is not None else None
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
stmt = _get_last_state_changes_stmt(
|
||||
_schema_version(hass), number_of_states, entity_id
|
||||
)
|
||||
states = list(execute_stmt_lambda_element(session, stmt))
|
||||
entity_ids = [entity_id] if entity_id is not None else None
|
||||
|
||||
return cast(
|
||||
MutableMapping[str, list[State]],
|
||||
_sorted_states_to_dict(
|
||||
@@ -483,6 +487,25 @@ def _get_states_for_entites_stmt(
|
||||
return stmt
|
||||
|
||||
|
||||
def _generate_most_recent_states_by_date(
|
||||
run_start: datetime,
|
||||
utc_point_in_time: datetime,
|
||||
) -> Subquery:
|
||||
"""Generate the sub query for the most recent states by data."""
|
||||
return (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
func.max(States.last_updated).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated >= run_start)
|
||||
& (States.last_updated < utc_point_in_time)
|
||||
)
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
|
||||
def _get_states_for_all_stmt(
|
||||
schema_version: int,
|
||||
run_start: datetime,
|
||||
@@ -498,17 +521,8 @@ def _get_states_for_all_stmt(
|
||||
# query, then filter out unwanted domains as well as applying the custom filter.
|
||||
# This filtering can't be done in the inner query because the domain column is
|
||||
# not indexed and we can't control what's in the custom filter.
|
||||
most_recent_states_by_date = (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
func.max(States.last_updated).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated >= run_start)
|
||||
& (States.last_updated < utc_point_in_time)
|
||||
)
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
most_recent_states_by_date = _generate_most_recent_states_by_date(
|
||||
run_start, utc_point_in_time
|
||||
)
|
||||
stmt += lambda q: q.where(
|
||||
States.state_id
|
||||
@@ -529,7 +543,7 @@ def _get_states_for_all_stmt(
|
||||
stmt += _ignore_domains_filter
|
||||
if filters and filters.has_config:
|
||||
entity_filter = filters.states_entity_filter()
|
||||
stmt += lambda q: q.filter(entity_filter)
|
||||
stmt = stmt.add_criteria(lambda q: q.filter(entity_filter), track_on=[filters])
|
||||
if join_attributes:
|
||||
stmt += lambda q: q.outerjoin(
|
||||
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
|
||||
|
||||
@@ -715,14 +715,13 @@ def _apply_update( # noqa: C901
|
||||
if engine.dialect.name == SupportedDialect.MYSQL:
|
||||
# Ensure the row format is dynamic or the index
|
||||
# unique will be too large
|
||||
with session_scope(session=session_maker()) as session:
|
||||
connection = session.connection()
|
||||
# This is safe to run multiple times and fast since the table is small
|
||||
connection.execute(
|
||||
text(
|
||||
"ALTER TABLE statistics_meta ENGINE=InnoDB, ROW_FORMAT=DYNAMIC"
|
||||
with contextlib.suppress(SQLAlchemyError):
|
||||
with session_scope(session=session_maker()) as session:
|
||||
connection = session.connection()
|
||||
# This is safe to run multiple times and fast since the table is small
|
||||
connection.execute(
|
||||
text("ALTER TABLE statistics_meta ROW_FORMAT=DYNAMIC")
|
||||
)
|
||||
)
|
||||
try:
|
||||
_create_index(
|
||||
session_maker, "statistics_meta", "ix_statistics_meta_statistic_id"
|
||||
|
||||
@@ -93,6 +93,8 @@ TABLES_TO_CHECK = [
|
||||
|
||||
LAST_UPDATED_INDEX = "ix_states_last_updated"
|
||||
ENTITY_ID_LAST_UPDATED_INDEX = "ix_states_entity_id_last_updated"
|
||||
EVENTS_CONTEXT_ID_INDEX = "ix_events_context_id"
|
||||
STATES_CONTEXT_ID_INDEX = "ix_states_context_id"
|
||||
|
||||
EMPTY_JSON_OBJECT = "{}"
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ from sqlalchemy.exc import SQLAlchemyError, StatementError
|
||||
from sqlalchemy.orm.session import Session
|
||||
from sqlalchemy.sql.expression import literal_column, true
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.sql.selectable import Subquery
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
@@ -484,14 +485,13 @@ def _compile_hourly_statistics_summary_mean_stmt(
|
||||
start_time: datetime, end_time: datetime
|
||||
) -> StatementLambdaElement:
|
||||
"""Generate the summary mean statement for hourly statistics."""
|
||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SUMMARY_MEAN))
|
||||
stmt += (
|
||||
lambda q: q.filter(StatisticsShortTerm.start >= start_time)
|
||||
return lambda_stmt(
|
||||
lambda: select(*QUERY_STATISTICS_SUMMARY_MEAN)
|
||||
.filter(StatisticsShortTerm.start >= start_time)
|
||||
.filter(StatisticsShortTerm.start < end_time)
|
||||
.group_by(StatisticsShortTerm.metadata_id)
|
||||
.order_by(StatisticsShortTerm.metadata_id)
|
||||
)
|
||||
return stmt
|
||||
|
||||
|
||||
def compile_hourly_statistics(
|
||||
@@ -984,28 +984,44 @@ def _reduce_statistics_per_month(
|
||||
def _statistics_during_period_stmt(
|
||||
start_time: datetime,
|
||||
end_time: datetime | None,
|
||||
statistic_ids: list[str] | None,
|
||||
metadata_ids: list[int] | None,
|
||||
table: type[Statistics | StatisticsShortTerm],
|
||||
) -> StatementLambdaElement:
|
||||
"""Prepare a database query for statistics during a given period.
|
||||
|
||||
This prepares a lambda_stmt query, so we don't insert the parameters yet.
|
||||
"""
|
||||
if table == StatisticsShortTerm:
|
||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SHORT_TERM))
|
||||
else:
|
||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS))
|
||||
|
||||
stmt += lambda q: q.filter(table.start >= start_time)
|
||||
|
||||
stmt = lambda_stmt(
|
||||
lambda: select(*QUERY_STATISTICS).filter(Statistics.start >= start_time)
|
||||
)
|
||||
if end_time is not None:
|
||||
stmt += lambda q: q.filter(table.start < end_time)
|
||||
stmt += lambda q: q.filter(Statistics.start < end_time)
|
||||
if metadata_ids:
|
||||
stmt += lambda q: q.filter(Statistics.metadata_id.in_(metadata_ids))
|
||||
stmt += lambda q: q.order_by(Statistics.metadata_id, Statistics.start)
|
||||
return stmt
|
||||
|
||||
if statistic_ids is not None:
|
||||
stmt += lambda q: q.filter(table.metadata_id.in_(metadata_ids))
|
||||
|
||||
stmt += lambda q: q.order_by(table.metadata_id, table.start)
|
||||
def _statistics_during_period_stmt_short_term(
|
||||
start_time: datetime,
|
||||
end_time: datetime | None,
|
||||
metadata_ids: list[int] | None,
|
||||
) -> StatementLambdaElement:
|
||||
"""Prepare a database query for short term statistics during a given period.
|
||||
|
||||
This prepares a lambda_stmt query, so we don't insert the parameters yet.
|
||||
"""
|
||||
stmt = lambda_stmt(
|
||||
lambda: select(*QUERY_STATISTICS_SHORT_TERM).filter(
|
||||
StatisticsShortTerm.start >= start_time
|
||||
)
|
||||
)
|
||||
if end_time is not None:
|
||||
stmt += lambda q: q.filter(StatisticsShortTerm.start < end_time)
|
||||
if metadata_ids:
|
||||
stmt += lambda q: q.filter(StatisticsShortTerm.metadata_id.in_(metadata_ids))
|
||||
stmt += lambda q: q.order_by(
|
||||
StatisticsShortTerm.metadata_id, StatisticsShortTerm.start
|
||||
)
|
||||
return stmt
|
||||
|
||||
|
||||
@@ -1035,12 +1051,12 @@ def statistics_during_period(
|
||||
|
||||
if period == "5minute":
|
||||
table = StatisticsShortTerm
|
||||
stmt = _statistics_during_period_stmt_short_term(
|
||||
start_time, end_time, metadata_ids
|
||||
)
|
||||
else:
|
||||
table = Statistics
|
||||
|
||||
stmt = _statistics_during_period_stmt(
|
||||
start_time, end_time, statistic_ids, metadata_ids, table
|
||||
)
|
||||
stmt = _statistics_during_period_stmt(start_time, end_time, metadata_ids)
|
||||
stats = execute_stmt_lambda_element(session, stmt)
|
||||
|
||||
if not stats:
|
||||
@@ -1072,19 +1088,27 @@ def statistics_during_period(
|
||||
def _get_last_statistics_stmt(
|
||||
metadata_id: int,
|
||||
number_of_stats: int,
|
||||
table: type[Statistics | StatisticsShortTerm],
|
||||
) -> StatementLambdaElement:
|
||||
"""Generate a statement for number_of_stats statistics for a given statistic_id."""
|
||||
if table == StatisticsShortTerm:
|
||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SHORT_TERM))
|
||||
else:
|
||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS))
|
||||
stmt += (
|
||||
lambda q: q.filter_by(metadata_id=metadata_id)
|
||||
.order_by(table.metadata_id, table.start.desc())
|
||||
return lambda_stmt(
|
||||
lambda: select(*QUERY_STATISTICS)
|
||||
.filter_by(metadata_id=metadata_id)
|
||||
.order_by(Statistics.metadata_id, Statistics.start.desc())
|
||||
.limit(number_of_stats)
|
||||
)
|
||||
|
||||
|
||||
def _get_last_statistics_short_term_stmt(
|
||||
metadata_id: int,
|
||||
number_of_stats: int,
|
||||
) -> StatementLambdaElement:
|
||||
"""Generate a statement for number_of_stats short term statistics for a given statistic_id."""
|
||||
return lambda_stmt(
|
||||
lambda: select(*QUERY_STATISTICS_SHORT_TERM)
|
||||
.filter_by(metadata_id=metadata_id)
|
||||
.order_by(StatisticsShortTerm.metadata_id, StatisticsShortTerm.start.desc())
|
||||
.limit(number_of_stats)
|
||||
)
|
||||
return stmt
|
||||
|
||||
|
||||
def _get_last_statistics(
|
||||
@@ -1102,7 +1126,10 @@ def _get_last_statistics(
|
||||
if not metadata:
|
||||
return {}
|
||||
metadata_id = metadata[statistic_id][0]
|
||||
stmt = _get_last_statistics_stmt(metadata_id, number_of_stats, table)
|
||||
if table == Statistics:
|
||||
stmt = _get_last_statistics_stmt(metadata_id, number_of_stats)
|
||||
else:
|
||||
stmt = _get_last_statistics_short_term_stmt(metadata_id, number_of_stats)
|
||||
stats = execute_stmt_lambda_element(session, stmt)
|
||||
|
||||
if not stats:
|
||||
@@ -1139,12 +1166,9 @@ def get_last_short_term_statistics(
|
||||
)
|
||||
|
||||
|
||||
def _latest_short_term_statistics_stmt(
|
||||
metadata_ids: list[int],
|
||||
) -> StatementLambdaElement:
|
||||
"""Create the statement for finding the latest short term stat rows."""
|
||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SHORT_TERM))
|
||||
most_recent_statistic_row = (
|
||||
def _generate_most_recent_statistic_row(metadata_ids: list[int]) -> Subquery:
|
||||
"""Generate the subquery to find the most recent statistic row."""
|
||||
return (
|
||||
select(
|
||||
StatisticsShortTerm.metadata_id,
|
||||
func.max(StatisticsShortTerm.start).label("start_max"),
|
||||
@@ -1152,6 +1176,14 @@ def _latest_short_term_statistics_stmt(
|
||||
.where(StatisticsShortTerm.metadata_id.in_(metadata_ids))
|
||||
.group_by(StatisticsShortTerm.metadata_id)
|
||||
).subquery()
|
||||
|
||||
|
||||
def _latest_short_term_statistics_stmt(
|
||||
metadata_ids: list[int],
|
||||
) -> StatementLambdaElement:
|
||||
"""Create the statement for finding the latest short term stat rows."""
|
||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SHORT_TERM))
|
||||
most_recent_statistic_row = _generate_most_recent_statistic_row(metadata_ids)
|
||||
stmt += lambda s: s.join(
|
||||
most_recent_statistic_row,
|
||||
(
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "SimpliSafe",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/simplisafe",
|
||||
"requirements": ["simplisafe-python==2022.05.2"],
|
||||
"requirements": ["simplisafe-python==2022.06.0"],
|
||||
"codeowners": ["@bachya"],
|
||||
"iot_class": "cloud_polling",
|
||||
"dhcp": [
|
||||
|
||||
@@ -205,13 +205,15 @@ class SonosMedia:
|
||||
self, position_info: dict[str, int], force_update: bool = False
|
||||
) -> None:
|
||||
"""Update state when playing music tracks."""
|
||||
if (duration := position_info.get(DURATION_SECONDS)) == 0:
|
||||
duration = position_info.get(DURATION_SECONDS)
|
||||
current_position = position_info.get(POSITION_SECONDS)
|
||||
|
||||
if not (duration or current_position):
|
||||
self.clear_position()
|
||||
return
|
||||
|
||||
should_update = force_update
|
||||
self.duration = duration
|
||||
current_position = position_info.get(POSITION_SECONDS)
|
||||
|
||||
# player started reporting position?
|
||||
if current_position is not None and self.position is None:
|
||||
|
||||
@@ -25,7 +25,6 @@ from homeassistant.components.media_player import (
|
||||
)
|
||||
from homeassistant.components.media_player.const import (
|
||||
ATTR_INPUT_SOURCE,
|
||||
ATTR_MEDIA_ANNOUNCE,
|
||||
ATTR_MEDIA_ENQUEUE,
|
||||
MEDIA_TYPE_ALBUM,
|
||||
MEDIA_TYPE_ARTIST,
|
||||
@@ -544,9 +543,6 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
||||
"""
|
||||
# Use 'replace' as the default enqueue option
|
||||
enqueue = kwargs.get(ATTR_MEDIA_ENQUEUE, MediaPlayerEnqueue.REPLACE)
|
||||
if kwargs.get(ATTR_MEDIA_ANNOUNCE):
|
||||
# Temporary workaround until announce support is added
|
||||
enqueue = MediaPlayerEnqueue.PLAY
|
||||
|
||||
if spotify.is_spotify_media_type(media_type):
|
||||
media_type = spotify.resolve_spotify_media_type(media_type)
|
||||
|
||||
@@ -198,13 +198,16 @@ class TomorrowioWeatherEntity(TomorrowioEntity, WeatherEntity):
|
||||
max_forecasts = MAX_FORECASTS[self.forecast_type]
|
||||
forecast_count = 0
|
||||
|
||||
# Convert utcnow to local to be compatible with tests
|
||||
today = dt_util.as_local(dt_util.utcnow()).date()
|
||||
|
||||
# Set default values (in cases where keys don't exist), None will be
|
||||
# returned. Override properties per forecast type as needed
|
||||
for forecast in raw_forecasts:
|
||||
forecast_dt = dt_util.parse_datetime(forecast[TMRW_ATTR_TIMESTAMP])
|
||||
|
||||
# Throw out past data
|
||||
if forecast_dt.date() < dt_util.utcnow().date():
|
||||
if dt_util.as_local(forecast_dt).date() < today:
|
||||
continue
|
||||
|
||||
values = forecast["values"]
|
||||
|
||||
@@ -17,6 +17,8 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import DATA_VELUX, VeluxEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
@@ -97,12 +99,11 @@ class VeluxCover(VeluxEntity, CoverEntity):
|
||||
|
||||
async def async_set_cover_position(self, **kwargs):
|
||||
"""Move the cover to a specific position."""
|
||||
if ATTR_POSITION in kwargs:
|
||||
position_percent = 100 - kwargs[ATTR_POSITION]
|
||||
position_percent = 100 - kwargs[ATTR_POSITION]
|
||||
|
||||
await self.node.set_position(
|
||||
Position(position_percent=position_percent), wait_for_completion=False
|
||||
)
|
||||
await self.node.set_position(
|
||||
Position(position_percent=position_percent), wait_for_completion=False
|
||||
)
|
||||
|
||||
async def async_stop_cover(self, **kwargs):
|
||||
"""Stop the cover."""
|
||||
|
||||
@@ -10,6 +10,8 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import DATA_VELUX, VeluxEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -10,6 +10,8 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import _LOGGER, DATA_VELUX
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Wallbox",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/wallbox",
|
||||
"requirements": ["wallbox==0.4.4"],
|
||||
"requirements": ["wallbox==0.4.9"],
|
||||
"ssdp": [],
|
||||
"zeroconf": [],
|
||||
"homekit": {},
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "YoLink",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/yolink",
|
||||
"requirements": ["yolink-api==0.0.5"],
|
||||
"requirements": ["yolink-api==0.0.6"],
|
||||
"dependencies": ["auth", "application_credentials"],
|
||||
"codeowners": ["@matrixd2"],
|
||||
"iot_class": "cloud_push"
|
||||
|
||||
@@ -7,7 +7,7 @@ from .backports.enum import StrEnum
|
||||
|
||||
MAJOR_VERSION: Final = 2022
|
||||
MINOR_VERSION: Final = 6
|
||||
PATCH_VERSION: Final = "0"
|
||||
PATCH_VERSION: Final = "3"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
|
||||
|
||||
@@ -1369,19 +1369,19 @@ def multiply(value, amount, default=_SENTINEL):
|
||||
|
||||
def logarithm(value, base=math.e, default=_SENTINEL):
|
||||
"""Filter and function to get logarithm of the value with a specific base."""
|
||||
try:
|
||||
value_float = float(value)
|
||||
except (ValueError, TypeError):
|
||||
if default is _SENTINEL:
|
||||
raise_no_default("log", value)
|
||||
return default
|
||||
try:
|
||||
base_float = float(base)
|
||||
except (ValueError, TypeError):
|
||||
if default is _SENTINEL:
|
||||
raise_no_default("log", base)
|
||||
return default
|
||||
return math.log(value_float, base_float)
|
||||
try:
|
||||
value_float = float(value)
|
||||
return math.log(value_float, base_float)
|
||||
except (ValueError, TypeError):
|
||||
if default is _SENTINEL:
|
||||
raise_no_default("log", value)
|
||||
return default
|
||||
|
||||
|
||||
def sine(value, default=_SENTINEL):
|
||||
|
||||
@@ -15,7 +15,7 @@ ciso8601==2.2.0
|
||||
cryptography==36.0.2
|
||||
fnvhash==0.1.0
|
||||
hass-nabucasa==0.54.0
|
||||
home-assistant-frontend==20220531.0
|
||||
home-assistant-frontend==20220601.0
|
||||
httpx==0.23.0
|
||||
ifaddr==0.1.7
|
||||
jinja2==3.1.2
|
||||
|
||||
@@ -122,7 +122,7 @@ aioasuswrt==1.4.0
|
||||
aioazuredevops==1.3.5
|
||||
|
||||
# homeassistant.components.baf
|
||||
aiobafi6==0.3.0
|
||||
aiobafi6==0.5.0
|
||||
|
||||
# homeassistant.components.aws
|
||||
aiobotocore==2.1.0
|
||||
@@ -187,7 +187,7 @@ aiolifx==0.7.1
|
||||
aiolifx_effects==0.2.2
|
||||
|
||||
# homeassistant.components.lookin
|
||||
aiolookin==0.1.0
|
||||
aiolookin==0.1.1
|
||||
|
||||
# homeassistant.components.lyric
|
||||
aiolyric==1.0.8
|
||||
@@ -394,7 +394,7 @@ beautifulsoup4==4.11.1
|
||||
bellows==0.30.0
|
||||
|
||||
# homeassistant.components.bmw_connected_drive
|
||||
bimmer_connected==0.9.3
|
||||
bimmer_connected==0.9.4
|
||||
|
||||
# homeassistant.components.bizkaibus
|
||||
bizkaibus==0.1.1
|
||||
@@ -822,7 +822,7 @@ hole==0.7.0
|
||||
holidays==0.13
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20220531.0
|
||||
home-assistant-frontend==20220601.0
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
homeconnect==0.7.0
|
||||
@@ -1538,7 +1538,7 @@ pyheos==0.7.2
|
||||
pyhik==0.3.0
|
||||
|
||||
# homeassistant.components.hive
|
||||
pyhiveapi==0.5.4
|
||||
pyhiveapi==0.5.5
|
||||
|
||||
# homeassistant.components.homematic
|
||||
pyhomematic==0.1.77
|
||||
@@ -1550,7 +1550,7 @@ pyhomeworks==0.0.6
|
||||
pyialarm==1.9.0
|
||||
|
||||
# homeassistant.components.ialarm_xr
|
||||
pyialarmxr==1.0.18
|
||||
pyialarmxr-homeassistant==1.0.18
|
||||
|
||||
# homeassistant.components.icloud
|
||||
pyicloud==1.0.0
|
||||
@@ -1673,7 +1673,7 @@ pymyq==3.1.4
|
||||
pymysensors==0.22.1
|
||||
|
||||
# homeassistant.components.netgear
|
||||
pynetgear==0.10.0
|
||||
pynetgear==0.10.4
|
||||
|
||||
# homeassistant.components.netio
|
||||
pynetio==0.1.9.1
|
||||
@@ -1735,7 +1735,7 @@ pyownet==0.10.0.post1
|
||||
pypca==0.0.7
|
||||
|
||||
# homeassistant.components.lcn
|
||||
pypck==0.7.14
|
||||
pypck==0.7.15
|
||||
|
||||
# homeassistant.components.pjlink
|
||||
pypjlink2==1.2.1
|
||||
@@ -2065,7 +2065,7 @@ raincloudy==0.0.7
|
||||
raspyrfm-client==1.2.8
|
||||
|
||||
# homeassistant.components.rainmachine
|
||||
regenmaschine==2022.05.1
|
||||
regenmaschine==2022.06.0
|
||||
|
||||
# homeassistant.components.renault
|
||||
renault-api==0.1.11
|
||||
@@ -2168,7 +2168,7 @@ simplehound==0.3
|
||||
simplepush==1.1.4
|
||||
|
||||
# homeassistant.components.simplisafe
|
||||
simplisafe-python==2022.05.2
|
||||
simplisafe-python==2022.06.0
|
||||
|
||||
# homeassistant.components.sisyphus
|
||||
sisyphus-control==3.1.2
|
||||
@@ -2418,7 +2418,7 @@ vultr==0.1.2
|
||||
wakeonlan==2.0.1
|
||||
|
||||
# homeassistant.components.wallbox
|
||||
wallbox==0.4.4
|
||||
wallbox==0.4.9
|
||||
|
||||
# homeassistant.components.waqi
|
||||
waqiasync==1.0.0
|
||||
@@ -2486,7 +2486,7 @@ yeelight==0.7.10
|
||||
yeelightsunflower==0.0.10
|
||||
|
||||
# homeassistant.components.yolink
|
||||
yolink-api==0.0.5
|
||||
yolink-api==0.0.6
|
||||
|
||||
# homeassistant.components.youless
|
||||
youless-api==0.16
|
||||
|
||||
@@ -109,7 +109,7 @@ aioasuswrt==1.4.0
|
||||
aioazuredevops==1.3.5
|
||||
|
||||
# homeassistant.components.baf
|
||||
aiobafi6==0.3.0
|
||||
aiobafi6==0.5.0
|
||||
|
||||
# homeassistant.components.aws
|
||||
aiobotocore==2.1.0
|
||||
@@ -159,7 +159,7 @@ aiohue==4.4.1
|
||||
aiokafka==0.6.0
|
||||
|
||||
# homeassistant.components.lookin
|
||||
aiolookin==0.1.0
|
||||
aiolookin==0.1.1
|
||||
|
||||
# homeassistant.components.lyric
|
||||
aiolyric==1.0.8
|
||||
@@ -309,7 +309,7 @@ beautifulsoup4==4.11.1
|
||||
bellows==0.30.0
|
||||
|
||||
# homeassistant.components.bmw_connected_drive
|
||||
bimmer_connected==0.9.3
|
||||
bimmer_connected==0.9.4
|
||||
|
||||
# homeassistant.components.blebox
|
||||
blebox_uniapi==1.3.3
|
||||
@@ -589,7 +589,7 @@ hole==0.7.0
|
||||
holidays==0.13
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20220531.0
|
||||
home-assistant-frontend==20220601.0
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
homeconnect==0.7.0
|
||||
@@ -1029,7 +1029,7 @@ pyhaversion==22.4.1
|
||||
pyheos==0.7.2
|
||||
|
||||
# homeassistant.components.hive
|
||||
pyhiveapi==0.5.4
|
||||
pyhiveapi==0.5.5
|
||||
|
||||
# homeassistant.components.homematic
|
||||
pyhomematic==0.1.77
|
||||
@@ -1038,7 +1038,7 @@ pyhomematic==0.1.77
|
||||
pyialarm==1.9.0
|
||||
|
||||
# homeassistant.components.ialarm_xr
|
||||
pyialarmxr==1.0.18
|
||||
pyialarmxr-homeassistant==1.0.18
|
||||
|
||||
# homeassistant.components.icloud
|
||||
pyicloud==1.0.0
|
||||
@@ -1131,7 +1131,7 @@ pymyq==3.1.4
|
||||
pymysensors==0.22.1
|
||||
|
||||
# homeassistant.components.netgear
|
||||
pynetgear==0.10.0
|
||||
pynetgear==0.10.4
|
||||
|
||||
# homeassistant.components.nina
|
||||
pynina==0.1.8
|
||||
@@ -1178,7 +1178,7 @@ pyowm==3.2.0
|
||||
pyownet==0.10.0.post1
|
||||
|
||||
# homeassistant.components.lcn
|
||||
pypck==0.7.14
|
||||
pypck==0.7.15
|
||||
|
||||
# homeassistant.components.plaato
|
||||
pyplaato==0.0.18
|
||||
@@ -1364,7 +1364,7 @@ rachiopy==1.0.3
|
||||
radios==0.1.1
|
||||
|
||||
# homeassistant.components.rainmachine
|
||||
regenmaschine==2022.05.1
|
||||
regenmaschine==2022.06.0
|
||||
|
||||
# homeassistant.components.renault
|
||||
renault-api==0.1.11
|
||||
@@ -1425,7 +1425,7 @@ sharkiq==0.0.1
|
||||
simplehound==0.3
|
||||
|
||||
# homeassistant.components.simplisafe
|
||||
simplisafe-python==2022.05.2
|
||||
simplisafe-python==2022.06.0
|
||||
|
||||
# homeassistant.components.slack
|
||||
slackclient==2.5.0
|
||||
@@ -1591,7 +1591,7 @@ vultr==0.1.2
|
||||
wakeonlan==2.0.1
|
||||
|
||||
# homeassistant.components.wallbox
|
||||
wallbox==0.4.4
|
||||
wallbox==0.4.9
|
||||
|
||||
# homeassistant.components.folder_watcher
|
||||
watchdog==2.1.8
|
||||
@@ -1638,7 +1638,7 @@ yalexs==1.1.25
|
||||
yeelight==0.7.10
|
||||
|
||||
# homeassistant.components.yolink
|
||||
yolink-api==0.0.5
|
||||
yolink-api==0.0.6
|
||||
|
||||
# homeassistant.components.youless
|
||||
youless-api==0.16
|
||||
|
||||
@@ -2,24 +2,24 @@
|
||||
# ==============================================================================
|
||||
# Take down the S6 supervision tree when Home Assistant fails
|
||||
# ==============================================================================
|
||||
declare RESTART_EXIT_CODE 100
|
||||
declare SIGNAL_EXIT_CODE 256
|
||||
declare SIGTERM 15
|
||||
declare RESTART_EXIT_CODE=100
|
||||
declare SIGNAL_EXIT_CODE=256
|
||||
declare SIGTERM=15
|
||||
declare APP_EXIT_CODE=${1}
|
||||
declare SYS_EXIT_CODE=${2+x}
|
||||
declare SIGNAL_NO=${2}
|
||||
declare NEW_EXIT_CODE=
|
||||
|
||||
bashio::log.info "Home Assistant Core finish process exit code ${1}"
|
||||
bashio::log.info "Home Assistant Core finish process exit code ${APP_EXIT_CODE}"
|
||||
|
||||
if [[ ${APP_EXIT_CODE} -eq ${RESTART_EXIT_CODE} ]]; then
|
||||
exit 0
|
||||
elif [[ ${APP_EXIT_CODE} -eq ${SIGNAL_EXIT_CODE} ]]; then
|
||||
bashio::log.info "Home Assistant Core finish process received signal ${APP_EXIT_CODE}"
|
||||
bashio::log.info "Home Assistant Core finish process received signal ${SIGNAL_NO}"
|
||||
|
||||
NEW_EXIT_CODE=$((128 + SYS_EXIT_CODE))
|
||||
NEW_EXIT_CODE=$((128 + SIGNAL_NO))
|
||||
echo ${NEW_EXIT_CODE} > /run/s6-linux-init-container-results/exitcode
|
||||
|
||||
if [[ ${NEW_EXIT_CODE} -eq ${SIGTERM} ]]; then
|
||||
if [[ ${SIGNAL_NO} -eq ${SIGTERM} ]]; then
|
||||
/run/s6/basedir/bin/halt
|
||||
fi
|
||||
else
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[metadata]
|
||||
version = 2022.6.0
|
||||
version = 2022.6.3
|
||||
url = https://www.home-assistant.io/
|
||||
|
||||
[options]
|
||||
|
||||
@@ -16,7 +16,6 @@ from homeassistant.components.google import CONF_TRACK_NEW, DOMAIN
|
||||
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
@@ -136,7 +135,10 @@ def token_scopes() -> list[str]:
|
||||
@pytest.fixture
|
||||
def token_expiry() -> datetime.datetime:
|
||||
"""Expiration time for credentials used in the test."""
|
||||
return utcnow() + datetime.timedelta(days=7)
|
||||
# OAuth library returns an offset-naive timestamp
|
||||
return datetime.datetime.fromtimestamp(
|
||||
datetime.datetime.utcnow().timestamp()
|
||||
) + datetime.timedelta(hours=1)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@@ -8,6 +8,7 @@ from typing import Any
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from aiohttp.client_exceptions import ClientError
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
from oauth2client.client import (
|
||||
FlowExchangeError,
|
||||
OAuth2Credentials,
|
||||
@@ -94,11 +95,13 @@ async def fire_alarm(hass, point_in_time):
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
@pytest.mark.freeze_time("2022-06-03 15:19:59-00:00")
|
||||
async def test_full_flow_yaml_creds(
|
||||
hass: HomeAssistant,
|
||||
mock_code_flow: Mock,
|
||||
mock_exchange: Mock,
|
||||
component_setup: ComponentSetup,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
) -> None:
|
||||
"""Test successful creds setup."""
|
||||
assert await component_setup()
|
||||
@@ -115,8 +118,8 @@ async def test_full_flow_yaml_creds(
|
||||
"homeassistant.components.google.async_setup_entry", return_value=True
|
||||
) as mock_setup:
|
||||
# Run one tick to invoke the credential exchange check
|
||||
now = utcnow()
|
||||
await fire_alarm(hass, now + CODE_CHECK_ALARM_TIMEDELTA)
|
||||
freezer.tick(CODE_CHECK_ALARM_TIMEDELTA)
|
||||
await fire_alarm(hass, datetime.datetime.utcnow())
|
||||
await hass.async_block_till_done()
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
flow_id=result["flow_id"]
|
||||
@@ -127,12 +130,11 @@ async def test_full_flow_yaml_creds(
|
||||
assert "data" in result
|
||||
data = result["data"]
|
||||
assert "token" in data
|
||||
assert 0 < data["token"]["expires_in"] < 8 * 86400
|
||||
assert (
|
||||
datetime.datetime.now().timestamp()
|
||||
<= data["token"]["expires_at"]
|
||||
< (datetime.datetime.now() + datetime.timedelta(days=8)).timestamp()
|
||||
data["token"]["expires_in"]
|
||||
== 60 * 60 - CODE_CHECK_ALARM_TIMEDELTA.total_seconds()
|
||||
)
|
||||
assert data["token"]["expires_at"] == 1654273199.0
|
||||
data["token"].pop("expires_at")
|
||||
data["token"].pop("expires_in")
|
||||
assert data == {
|
||||
|
||||
@@ -5,6 +5,7 @@ from http import HTTPStatus
|
||||
import json
|
||||
from unittest.mock import patch, sentinel
|
||||
|
||||
from freezegun import freeze_time
|
||||
import pytest
|
||||
from pytest import approx
|
||||
|
||||
@@ -246,15 +247,11 @@ def test_get_significant_states_exclude(hass_history):
|
||||
def test_get_significant_states_exclude_include_entity(hass_history):
|
||||
"""Test significant states when excluding domains and include entities.
|
||||
|
||||
We should not get back every thermostat and media player test changes.
|
||||
We should not get back every thermostat change unless its specifically included
|
||||
"""
|
||||
hass = hass_history
|
||||
zero, four, states = record_states(hass)
|
||||
del states["media_player.test2"]
|
||||
del states["media_player.test3"]
|
||||
del states["thermostat.test"]
|
||||
del states["thermostat.test2"]
|
||||
del states["script.can_cancel_this_one"]
|
||||
|
||||
config = history.CONFIG_SCHEMA(
|
||||
{
|
||||
@@ -340,14 +337,12 @@ def test_get_significant_states_include(hass_history):
|
||||
def test_get_significant_states_include_exclude_domain(hass_history):
|
||||
"""Test if significant states when excluding and including domains.
|
||||
|
||||
We should not get back any changes since we include only the
|
||||
media_player domain but also exclude it.
|
||||
We should get back all the media_player domain changes
|
||||
only since the include wins over the exclude but will
|
||||
exclude everything else.
|
||||
"""
|
||||
hass = hass_history
|
||||
zero, four, states = record_states(hass)
|
||||
del states["media_player.test"]
|
||||
del states["media_player.test2"]
|
||||
del states["media_player.test3"]
|
||||
del states["thermostat.test"]
|
||||
del states["thermostat.test2"]
|
||||
del states["script.can_cancel_this_one"]
|
||||
@@ -372,7 +367,6 @@ def test_get_significant_states_include_exclude_entity(hass_history):
|
||||
"""
|
||||
hass = hass_history
|
||||
zero, four, states = record_states(hass)
|
||||
del states["media_player.test"]
|
||||
del states["media_player.test2"]
|
||||
del states["media_player.test3"]
|
||||
del states["thermostat.test"]
|
||||
@@ -394,12 +388,12 @@ def test_get_significant_states_include_exclude_entity(hass_history):
|
||||
def test_get_significant_states_include_exclude(hass_history):
|
||||
"""Test if significant states when in/excluding domains and entities.
|
||||
|
||||
We should only get back changes of the media_player.test2 entity.
|
||||
We should get back changes of the media_player.test2, media_player.test3,
|
||||
and thermostat.test.
|
||||
"""
|
||||
hass = hass_history
|
||||
zero, four, states = record_states(hass)
|
||||
del states["media_player.test"]
|
||||
del states["thermostat.test"]
|
||||
del states["thermostat.test2"]
|
||||
del states["script.can_cancel_this_one"]
|
||||
|
||||
@@ -935,6 +929,141 @@ async def test_statistics_during_period(
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"units, attributes, state, value",
|
||||
[
|
||||
(IMPERIAL_SYSTEM, POWER_SENSOR_ATTRIBUTES, 10, 10000),
|
||||
(METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, 10, 10000),
|
||||
(IMPERIAL_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, 10, 50),
|
||||
(METRIC_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, 10, 10),
|
||||
(IMPERIAL_SYSTEM, PRESSURE_SENSOR_ATTRIBUTES, 1000, 14.503774389728312),
|
||||
(METRIC_SYSTEM, PRESSURE_SENSOR_ATTRIBUTES, 1000, 100000),
|
||||
],
|
||||
)
|
||||
async def test_statistics_during_period_in_the_past(
|
||||
hass, hass_ws_client, recorder_mock, units, attributes, state, value
|
||||
):
|
||||
"""Test statistics_during_period in the past."""
|
||||
hass.config.set_time_zone("UTC")
|
||||
now = dt_util.utcnow().replace()
|
||||
|
||||
hass.config.units = units
|
||||
await async_setup_component(hass, "history", {})
|
||||
await async_setup_component(hass, "sensor", {})
|
||||
await async_recorder_block_till_done(hass)
|
||||
|
||||
past = now - timedelta(days=3)
|
||||
|
||||
with freeze_time(past):
|
||||
hass.states.async_set("sensor.test", state, attributes=attributes)
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
sensor_state = hass.states.get("sensor.test")
|
||||
assert sensor_state.last_updated == past
|
||||
|
||||
stats_top_of_hour = past.replace(minute=0, second=0, microsecond=0)
|
||||
stats_start = past.replace(minute=55)
|
||||
do_adhoc_statistics(hass, start=stats_start)
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
client = await hass_ws_client()
|
||||
await client.send_json(
|
||||
{
|
||||
"id": 1,
|
||||
"type": "history/statistics_during_period",
|
||||
"start_time": now.isoformat(),
|
||||
"end_time": now.isoformat(),
|
||||
"statistic_ids": ["sensor.test"],
|
||||
"period": "hour",
|
||||
}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"] == {}
|
||||
|
||||
await client.send_json(
|
||||
{
|
||||
"id": 2,
|
||||
"type": "history/statistics_during_period",
|
||||
"start_time": now.isoformat(),
|
||||
"statistic_ids": ["sensor.test"],
|
||||
"period": "5minute",
|
||||
}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"] == {}
|
||||
|
||||
past = now - timedelta(days=3)
|
||||
await client.send_json(
|
||||
{
|
||||
"id": 3,
|
||||
"type": "history/statistics_during_period",
|
||||
"start_time": past.isoformat(),
|
||||
"statistic_ids": ["sensor.test"],
|
||||
"period": "5minute",
|
||||
}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"] == {
|
||||
"sensor.test": [
|
||||
{
|
||||
"statistic_id": "sensor.test",
|
||||
"start": stats_start.isoformat(),
|
||||
"end": (stats_start + timedelta(minutes=5)).isoformat(),
|
||||
"mean": approx(value),
|
||||
"min": approx(value),
|
||||
"max": approx(value),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
start_of_day = stats_top_of_hour.replace(hour=0, minute=0)
|
||||
await client.send_json(
|
||||
{
|
||||
"id": 4,
|
||||
"type": "history/statistics_during_period",
|
||||
"start_time": stats_top_of_hour.isoformat(),
|
||||
"statistic_ids": ["sensor.test"],
|
||||
"period": "day",
|
||||
}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"] == {
|
||||
"sensor.test": [
|
||||
{
|
||||
"statistic_id": "sensor.test",
|
||||
"start": start_of_day.isoformat(),
|
||||
"end": (start_of_day + timedelta(days=1)).isoformat(),
|
||||
"mean": approx(value),
|
||||
"min": approx(value),
|
||||
"max": approx(value),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
await client.send_json(
|
||||
{
|
||||
"id": 5,
|
||||
"type": "history/statistics_during_period",
|
||||
"start_time": now.isoformat(),
|
||||
"statistic_ids": ["sensor.test"],
|
||||
"period": "5minute",
|
||||
}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"] == {}
|
||||
|
||||
|
||||
async def test_statistics_during_period_bad_start_time(
|
||||
hass, hass_ws_client, recorder_mock
|
||||
):
|
||||
|
||||
@@ -357,7 +357,7 @@ async def test_measure_multiple(hass, recorder_mock):
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("sensor.sensor1").state == "0.5"
|
||||
assert hass.states.get("sensor.sensor2").state == STATE_UNKNOWN
|
||||
assert hass.states.get("sensor.sensor2").state == "0.0"
|
||||
assert hass.states.get("sensor.sensor3").state == "2"
|
||||
assert hass.states.get("sensor.sensor4").state == "50.0"
|
||||
|
||||
|
||||
@@ -33,6 +33,9 @@ async def test_import_flow(hass):
|
||||
"AccessToken": "mock-access-token",
|
||||
},
|
||||
},
|
||||
), patch(
|
||||
"homeassistant.components.hive.config_flow.Auth.device_registration",
|
||||
return_value=True,
|
||||
), patch(
|
||||
"homeassistant.components.hive.config_flow.Auth.getDeviceData",
|
||||
return_value=[
|
||||
@@ -93,6 +96,9 @@ async def test_user_flow(hass):
|
||||
"AccessToken": "mock-access-token",
|
||||
},
|
||||
},
|
||||
), patch(
|
||||
"homeassistant.components.hive.config_flow.Auth.device_registration",
|
||||
return_value=True,
|
||||
), patch(
|
||||
"homeassistant.components.hive.config_flow.Auth.getDeviceData",
|
||||
return_value=[
|
||||
@@ -172,6 +178,9 @@ async def test_user_flow_2fa(hass):
|
||||
"AccessToken": "mock-access-token",
|
||||
},
|
||||
},
|
||||
), patch(
|
||||
"homeassistant.components.hive.config_flow.Auth.device_registration",
|
||||
return_value=True,
|
||||
), patch(
|
||||
"homeassistant.components.hive.config_flow.Auth.getDeviceData",
|
||||
return_value=[
|
||||
@@ -256,6 +265,9 @@ async def test_reauth_flow(hass):
|
||||
"AccessToken": "mock-access-token",
|
||||
},
|
||||
},
|
||||
), patch(
|
||||
"homeassistant.components.hive.config_flow.Auth.device_registration",
|
||||
return_value=True,
|
||||
):
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
@@ -361,6 +373,9 @@ async def test_user_flow_2fa_send_new_code(hass):
|
||||
"AccessToken": "mock-access-token",
|
||||
},
|
||||
},
|
||||
), patch(
|
||||
"homeassistant.components.hive.config_flow.Auth.device_registration",
|
||||
return_value=True,
|
||||
), patch(
|
||||
"homeassistant.components.hive.config_flow.Auth.getDeviceData",
|
||||
return_value=[
|
||||
|
||||
@@ -68,7 +68,6 @@ def mock_humanify(hass_, rows):
|
||||
return list(
|
||||
processor._humanify(
|
||||
rows,
|
||||
None,
|
||||
ent_reg,
|
||||
logbook_run,
|
||||
context_augmenter,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user