mirror of
https://github.com/home-assistant/core.git
synced 2026-05-05 12:24:48 +02:00
Compare commits
20 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 74b5db9ca5 | |||
| 6f4225b51d | |||
| b524cc9c56 | |||
| a6d50ba89b | |||
| 228de5807c | |||
| d4b40154e5 | |||
| 6e3aa004c4 | |||
| 149cc5cbeb | |||
| 37acf9b165 | |||
| 5c4ba23ca9 | |||
| abc42efe08 | |||
| 17ebc85b62 | |||
| 681cd92627 | |||
| 7fc4b196bd | |||
| f114419359 | |||
| 751f041009 | |||
| 44b1b87e13 | |||
| 1ef3d856a6 | |||
| 2707bbeb23 | |||
| f89ba74410 |
@@ -2,6 +2,6 @@
|
||||
"domain": "apprise",
|
||||
"name": "Apprise",
|
||||
"documentation": "https://www.home-assistant.io/integrations/apprise",
|
||||
"requirements": ["apprise==0.8.8"],
|
||||
"requirements": ["apprise==0.8.9"],
|
||||
"codeowners": ["@caronc"]
|
||||
}
|
||||
|
||||
@@ -28,12 +28,8 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
|
||||
def get_service(hass, config, discovery_info=None):
|
||||
"""Get the Apprise notification service."""
|
||||
|
||||
# Create our Apprise Asset Object
|
||||
asset = apprise.AppriseAsset(async_mode=False)
|
||||
|
||||
# Create our Apprise Instance (reference our asset)
|
||||
a_obj = apprise.Apprise(asset=asset)
|
||||
a_obj = apprise.Apprise()
|
||||
|
||||
if config.get(CONF_FILE):
|
||||
# Sourced from a Configuration File
|
||||
|
||||
@@ -10,7 +10,7 @@ from homeassistant.config import async_log_exception, config_without_domain
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_per_platform
|
||||
from homeassistant.helpers.condition import async_validate_condition_config
|
||||
from homeassistant.helpers.script import async_validate_action_config
|
||||
from homeassistant.helpers.script import async_validate_actions_config
|
||||
from homeassistant.helpers.trigger import async_validate_trigger_config
|
||||
from homeassistant.loader import IntegrationNotFound
|
||||
|
||||
@@ -36,9 +36,7 @@ async def async_validate_config_item(hass, config, full_config=None):
|
||||
]
|
||||
)
|
||||
|
||||
config[CONF_ACTION] = await asyncio.gather(
|
||||
*[async_validate_action_config(hass, action) for action in config[CONF_ACTION]]
|
||||
)
|
||||
config[CONF_ACTION] = await async_validate_actions_config(hass, config[CONF_ACTION])
|
||||
|
||||
return config
|
||||
|
||||
|
||||
@@ -86,7 +86,7 @@ SUPPORT_CAST = (
|
||||
|
||||
|
||||
ENTITY_SCHEMA = vol.All(
|
||||
cv.deprecated(CONF_HOST, invalidation_version="0.116"),
|
||||
cv.deprecated(CONF_HOST),
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Exclusive(CONF_HOST, "device_identifier"): cv.string,
|
||||
@@ -97,7 +97,7 @@ ENTITY_SCHEMA = vol.All(
|
||||
)
|
||||
|
||||
PLATFORM_SCHEMA = vol.All(
|
||||
cv.deprecated(CONF_HOST, invalidation_version="0.116"),
|
||||
cv.deprecated(CONF_HOST),
|
||||
PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Exclusive(CONF_HOST, "device_identifier"): cv.string,
|
||||
|
||||
@@ -91,6 +91,7 @@ class GroupIntegrationRegistry:
|
||||
"""Class to hold a registry of integrations."""
|
||||
|
||||
on_off_mapping: Dict[str, str] = {STATE_ON: STATE_OFF}
|
||||
off_on_mapping: Dict[str, str] = {STATE_OFF: STATE_ON}
|
||||
on_states_by_domain: Dict[str, Set] = {}
|
||||
exclude_domains: Set = set()
|
||||
|
||||
@@ -99,11 +100,14 @@ class GroupIntegrationRegistry:
|
||||
self.exclude_domains.add(current_domain.get())
|
||||
|
||||
def on_off_states(self, on_states: Set, off_state: str) -> None:
|
||||
"""Registry on and off states for the current domain."""
|
||||
"""Register on and off states for the current domain."""
|
||||
for on_state in on_states:
|
||||
if on_state not in self.on_off_mapping:
|
||||
self.on_off_mapping[on_state] = off_state
|
||||
|
||||
if len(on_states) == 1 and off_state not in self.off_on_mapping:
|
||||
self.off_on_mapping[off_state] = list(on_states)[0]
|
||||
|
||||
self.on_states_by_domain[current_domain.get()] = set(on_states)
|
||||
|
||||
|
||||
@@ -543,6 +547,7 @@ class Group(Entity):
|
||||
data = {ATTR_ENTITY_ID: self.tracking, ATTR_ORDER: self._order}
|
||||
if not self.user_defined:
|
||||
data[ATTR_AUTO] = True
|
||||
|
||||
return data
|
||||
|
||||
@property
|
||||
@@ -577,6 +582,7 @@ class Group(Entity):
|
||||
return
|
||||
|
||||
excluded_domains = self.hass.data[REG_KEY].exclude_domains
|
||||
|
||||
tracking = []
|
||||
trackable = []
|
||||
for ent_id in entity_ids:
|
||||
@@ -592,6 +598,7 @@ class Group(Entity):
|
||||
@callback
|
||||
def _async_start(self, *_):
|
||||
"""Start tracking members and write state."""
|
||||
self._reset_tracked_state()
|
||||
self._async_start_tracking()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -625,15 +632,14 @@ class Group(Entity):
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Handle addition to Home Assistant."""
|
||||
if self.tracking:
|
||||
self._reset_tracked_state()
|
||||
|
||||
if self.hass.state != CoreState.running:
|
||||
self.hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_START, self._async_start
|
||||
)
|
||||
return
|
||||
|
||||
if self.tracking:
|
||||
self._reset_tracked_state()
|
||||
self._async_start_tracking()
|
||||
|
||||
async def async_will_remove_from_hass(self):
|
||||
@@ -671,19 +677,26 @@ class Group(Entity):
|
||||
if state is not None:
|
||||
self._see_state(state)
|
||||
|
||||
def _see_state(self, state):
|
||||
def _see_state(self, new_state):
|
||||
"""Keep track of the the state."""
|
||||
entity_id = state.entity_id
|
||||
domain = state.domain
|
||||
entity_id = new_state.entity_id
|
||||
domain = new_state.domain
|
||||
state = new_state.state
|
||||
registry = self.hass.data[REG_KEY]
|
||||
self._assumed[entity_id] = new_state.attributes.get(ATTR_ASSUMED_STATE)
|
||||
|
||||
domain_on_state = self.hass.data[REG_KEY].on_states_by_domain.get(
|
||||
domain, {STATE_ON}
|
||||
)
|
||||
self._on_off[entity_id] = state.state in domain_on_state
|
||||
self._assumed[entity_id] = state.attributes.get(ATTR_ASSUMED_STATE)
|
||||
|
||||
if domain in self.hass.data[REG_KEY].on_states_by_domain:
|
||||
self._on_states.update(domain_on_state)
|
||||
if domain not in registry.on_states_by_domain:
|
||||
# Handle the group of a group case
|
||||
if state in registry.on_off_mapping:
|
||||
self._on_states.add(state)
|
||||
elif state in registry.off_on_mapping:
|
||||
self._on_states.add(registry.off_on_mapping[state])
|
||||
self._on_off[entity_id] = state in registry.on_off_mapping
|
||||
else:
|
||||
entity_on_state = registry.on_states_by_domain[domain]
|
||||
if domain in self.hass.data[REG_KEY].on_states_by_domain:
|
||||
self._on_states.update(entity_on_state)
|
||||
self._on_off[entity_id] = state in entity_on_state
|
||||
|
||||
@callback
|
||||
def _async_update_group_state(self, tr_state=None):
|
||||
@@ -726,7 +739,6 @@ class Group(Entity):
|
||||
# on state, we use STATE_ON/STATE_OFF
|
||||
else:
|
||||
on_state = STATE_ON
|
||||
|
||||
group_is_on = self.mode(self._on_off.values())
|
||||
if group_is_on:
|
||||
self._state = on_state
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Offer state listening automation rules."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Dict, Optional
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -25,18 +25,43 @@ CONF_ENTITY_ID = "entity_id"
|
||||
CONF_FROM = "from"
|
||||
CONF_TO = "to"
|
||||
|
||||
TRIGGER_SCHEMA = vol.Schema(
|
||||
BASE_SCHEMA = {
|
||||
vol.Required(CONF_PLATFORM): "state",
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
|
||||
vol.Optional(CONF_FOR): cv.positive_time_period_template,
|
||||
vol.Optional(CONF_ATTRIBUTE): cv.match_all,
|
||||
}
|
||||
|
||||
TRIGGER_STATE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PLATFORM): "state",
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
|
||||
**BASE_SCHEMA,
|
||||
# These are str on purpose. Want to catch YAML conversions
|
||||
vol.Optional(CONF_FROM): vol.Any(str, [str]),
|
||||
vol.Optional(CONF_TO): vol.Any(str, [str]),
|
||||
vol.Optional(CONF_FOR): cv.positive_time_period_template,
|
||||
vol.Optional(CONF_ATTRIBUTE): cv.match_all,
|
||||
}
|
||||
)
|
||||
|
||||
TRIGGER_ATTRIBUTE_SCHEMA = vol.Schema(
|
||||
{
|
||||
**BASE_SCHEMA,
|
||||
vol.Optional(CONF_FROM): cv.match_all,
|
||||
vol.Optional(CONF_TO): cv.match_all,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def TRIGGER_SCHEMA(value: Any) -> dict: # pylint: disable=invalid-name
|
||||
"""Validate trigger."""
|
||||
if not isinstance(value, dict):
|
||||
raise vol.Invalid("Expected a dictionary")
|
||||
|
||||
# We use this approach instead of vol.Any because
|
||||
# this gives better error messages.
|
||||
if CONF_ATTRIBUTE in value:
|
||||
return TRIGGER_ATTRIBUTE_SCHEMA(value)
|
||||
|
||||
return TRIGGER_STATE_SCHEMA(value)
|
||||
|
||||
|
||||
async def async_attach_trigger(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -23,7 +23,12 @@ def temperature_unit(block_info: dict) -> str:
|
||||
def shelly_naming(self, block, entity_type: str):
|
||||
"""Naming for switch and sensors."""
|
||||
|
||||
entity_name = self.wrapper.name
|
||||
if not block:
|
||||
return f"{entity_name} {self.description.name}"
|
||||
|
||||
channels = 0
|
||||
mode = "relays"
|
||||
if "num_outputs" in self.wrapper.device.shelly:
|
||||
channels = self.wrapper.device.shelly["num_outputs"]
|
||||
if (
|
||||
@@ -31,12 +36,21 @@ def shelly_naming(self, block, entity_type: str):
|
||||
and self.wrapper.device.settings["mode"] == "roller"
|
||||
):
|
||||
channels = 1
|
||||
|
||||
entity_name = self.wrapper.name
|
||||
if block.type == "emeter" and "num_emeters" in self.wrapper.device.shelly:
|
||||
channels = self.wrapper.device.shelly["num_emeters"]
|
||||
mode = "emeters"
|
||||
if channels > 1 and block.type != "device":
|
||||
entity_name = self.wrapper.device.settings["relays"][int(block.channel)]["name"]
|
||||
# Shelly EM (SHEM) with firmware v1.8.1 doesn't have "name" key; will be fixed in next firmware release
|
||||
if "name" in self.wrapper.device.settings[mode][int(block.channel)]:
|
||||
entity_name = self.wrapper.device.settings[mode][int(block.channel)]["name"]
|
||||
else:
|
||||
entity_name = None
|
||||
if not entity_name:
|
||||
entity_name = f"{self.wrapper.name} channel {int(block.channel)+1}"
|
||||
if self.wrapper.model == "SHEM-3":
|
||||
base = ord("A")
|
||||
else:
|
||||
base = ord("1")
|
||||
entity_name = f"{self.wrapper.name} channel {chr(int(block.channel)+base)}"
|
||||
|
||||
if entity_type == "switch":
|
||||
return entity_name
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Shelly",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/shelly",
|
||||
"requirements": ["aioshelly==0.3.3"],
|
||||
"zeroconf": [{"type": "_http._tcp.local.", "name":"shelly*"}],
|
||||
"requirements": ["aioshelly==0.3.4"],
|
||||
"zeroconf": [{ "type": "_http._tcp.local.", "name": "shelly*" }],
|
||||
"codeowners": ["@balloob", "@bieniu"]
|
||||
}
|
||||
|
||||
@@ -34,15 +34,18 @@ async def async_setup(hass, config) -> bool:
|
||||
"""Set up the SmartHab platform."""
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
sh_conf = config.get(DOMAIN)
|
||||
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=sh_conf,
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
|
||||
if not hass.config_entries.async_entries(DOMAIN):
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=config[DOMAIN],
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -16,6 +16,9 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class SmartHabConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""SmartHab config flow."""
|
||||
|
||||
VERSION = 1
|
||||
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
|
||||
|
||||
def _show_setup_form(self, user_input=None, errors=None):
|
||||
"""Show the setup form to the user."""
|
||||
|
||||
@@ -72,6 +75,6 @@ class SmartHabConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
return self._show_setup_form(user_input, errors)
|
||||
|
||||
async def async_step_import(self, user_input):
|
||||
async def async_step_import(self, import_info):
|
||||
"""Handle import from legacy config."""
|
||||
return await self.async_step_user(user_input)
|
||||
return await self.async_step_user(import_info)
|
||||
|
||||
@@ -87,7 +87,7 @@ SENSOR_TYPES = {
|
||||
None,
|
||||
False,
|
||||
],
|
||||
"swap_free": ["Swap free", DATA_MEBIBYTES, "mdi:harddisk", None, True],
|
||||
"swap_free": ["Swap free", DATA_MEBIBYTES, "mdi:harddisk", None, False],
|
||||
"swap_use": ["Swap use", DATA_MEBIBYTES, "mdi:harddisk", None, False],
|
||||
"swap_use_percent": ["Swap use (percent)", PERCENTAGE, "mdi:harddisk", None, False],
|
||||
}
|
||||
|
||||
@@ -234,9 +234,7 @@ class TemplateEntity(Entity):
|
||||
else:
|
||||
self._self_ref_update_count = 0
|
||||
|
||||
# If we need to make this less sensitive in the future,
|
||||
# change the '>=' to a '>' here.
|
||||
if self._self_ref_update_count >= len(self._template_attrs):
|
||||
if self._self_ref_update_count > len(self._template_attrs):
|
||||
for update in updates:
|
||||
_LOGGER.warning(
|
||||
"Template loop detected while processing event: %s, skipping template render for Template[%s]",
|
||||
|
||||
@@ -11,7 +11,7 @@ from typing import Dict, Optional
|
||||
|
||||
from aiohttp import web
|
||||
import mutagen
|
||||
from mutagen.id3 import TextFrame as ID3Text
|
||||
from mutagen.id3 import ID3, TextFrame as ID3Text
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
@@ -468,9 +468,14 @@ class SpeechManager:
|
||||
try:
|
||||
tts_file = mutagen.File(data_bytes)
|
||||
if tts_file is not None:
|
||||
tts_file["artist"] = ID3Text(encoding=3, text=artist)
|
||||
tts_file["album"] = ID3Text(encoding=3, text=album)
|
||||
tts_file["title"] = ID3Text(encoding=3, text=message)
|
||||
if isinstance(tts_file.tags, ID3):
|
||||
tts_file["artist"] = ID3Text(encoding=3, text=artist)
|
||||
tts_file["album"] = ID3Text(encoding=3, text=album)
|
||||
tts_file["title"] = ID3Text(encoding=3, text=message)
|
||||
else:
|
||||
tts_file["artist"] = artist
|
||||
tts_file["album"] = album
|
||||
tts_file["title"] = message
|
||||
tts_file.save(data_bytes)
|
||||
except mutagen.MutagenError as err:
|
||||
_LOGGER.error("ID3 tag error: %s", err)
|
||||
|
||||
@@ -57,7 +57,13 @@ def decorate_command(channel, command):
|
||||
return result
|
||||
|
||||
except (zigpy.exceptions.ZigbeeException, asyncio.TimeoutError) as ex:
|
||||
channel.debug("command failed: %s exception: %s", command.__name__, str(ex))
|
||||
channel.debug(
|
||||
"command failed: '%s' args: '%s' kwargs '%s' exception: '%s'",
|
||||
command.__name__,
|
||||
args,
|
||||
kwds,
|
||||
str(ex),
|
||||
)
|
||||
return ex
|
||||
|
||||
return wrapper
|
||||
|
||||
@@ -9,7 +9,7 @@ from typing import Any, Dict
|
||||
|
||||
from zigpy import types
|
||||
import zigpy.exceptions
|
||||
from zigpy.profiles import zha, zll
|
||||
from zigpy.profiles import PROFILES
|
||||
import zigpy.quirks
|
||||
from zigpy.zcl.clusters.general import Groups
|
||||
import zigpy.zdo.types as zdo_types
|
||||
@@ -456,27 +456,20 @@ class ZHADevice(LogMixin):
|
||||
]
|
||||
|
||||
# Return endpoint device type Names
|
||||
try:
|
||||
device_info[ATTR_ENDPOINT_NAMES] = [
|
||||
{
|
||||
"name": endpoint.device_type.name,
|
||||
}
|
||||
for (ep_id, endpoint) in self._zigpy_device.endpoints.items()
|
||||
if ep_id != 0
|
||||
and endpoint.profile_id in (zha.PROFILE_ID, zll.PROFILE_ID)
|
||||
]
|
||||
except AttributeError as ex:
|
||||
# Some device types are not using an enumeration
|
||||
self.warning(
|
||||
"Failed to identify endpoint name in '%s' with exception '%s'",
|
||||
self._zigpy_device.endpoints.items(),
|
||||
ex,
|
||||
)
|
||||
device_info[ATTR_ENDPOINT_NAMES] = [
|
||||
{
|
||||
"name": "unknown",
|
||||
}
|
||||
]
|
||||
names = []
|
||||
for endpoint in (ep for epid, ep in self.device.endpoints.items() if epid):
|
||||
profile = PROFILES.get(endpoint.profile_id)
|
||||
if profile and endpoint.device_type is not None:
|
||||
# DeviceType provides undefined enums
|
||||
names.append({ATTR_NAME: profile.DeviceType(endpoint.device_type).name})
|
||||
else:
|
||||
names.append(
|
||||
{
|
||||
ATTR_NAME: f"unknown {endpoint.device_type} device_type "
|
||||
"of 0x{endpoint.profile_id:04x} profile id"
|
||||
}
|
||||
)
|
||||
device_info[ATTR_ENDPOINT_NAMES] = names
|
||||
|
||||
reg_device = self.gateway.ha_device_registry.async_get(self.device_id)
|
||||
if reg_device is not None:
|
||||
@@ -516,7 +509,7 @@ class ZHADevice(LogMixin):
|
||||
CLUSTER_TYPE_OUT: endpoint.out_clusters,
|
||||
}
|
||||
for (ep_id, endpoint) in self._zigpy_device.endpoints.items()
|
||||
if ep_id != 0 and endpoint.profile_id in (zha.PROFILE_ID, zll.PROFILE_ID)
|
||||
if ep_id != 0 and endpoint.profile_id in PROFILES
|
||||
}
|
||||
|
||||
@callback
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"zigpy==0.25.0",
|
||||
"zigpy-xbee==0.13.0",
|
||||
"zigpy-zigate==0.6.2",
|
||||
"zigpy-znp==0.2.0"
|
||||
"zigpy-znp==0.2.1"
|
||||
],
|
||||
"codeowners": ["@dmulcahey", "@adminiuga"]
|
||||
}
|
||||
|
||||
@@ -60,10 +60,10 @@ class ZoneminderFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
{
|
||||
vol.Required(CONF_HOST, default=user_input.get(CONF_HOST)): str,
|
||||
vol.Optional(
|
||||
CONF_USERNAME, default=user_input.get(CONF_USERNAME)
|
||||
CONF_USERNAME, default=user_input.get(CONF_USERNAME, "")
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PASSWORD, default=user_input.get(CONF_PASSWORD)
|
||||
CONF_PASSWORD, default=user_input.get(CONF_PASSWORD, "")
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PATH, default=user_input.get(CONF_PATH, DEFAULT_PATH)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Constants used by Home Assistant components."""
|
||||
MAJOR_VERSION = 0
|
||||
MINOR_VERSION = 116
|
||||
PATCH_VERSION = "0b1"
|
||||
PATCH_VERSION = "0b3"
|
||||
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER = (3, 7, 1)
|
||||
|
||||
@@ -297,7 +297,7 @@ def async_numeric_state_from_config(
|
||||
def state(
|
||||
hass: HomeAssistant,
|
||||
entity: Union[None, str, State],
|
||||
req_state: Union[str, List[str]],
|
||||
req_state: Any,
|
||||
for_period: Optional[timedelta] = None,
|
||||
attribute: Optional[str] = None,
|
||||
) -> bool:
|
||||
@@ -314,17 +314,20 @@ def state(
|
||||
assert isinstance(entity, State)
|
||||
|
||||
if attribute is None:
|
||||
value = entity.state
|
||||
value: Any = entity.state
|
||||
else:
|
||||
value = str(entity.attributes.get(attribute))
|
||||
value = entity.attributes.get(attribute)
|
||||
|
||||
if isinstance(req_state, str):
|
||||
if not isinstance(req_state, list):
|
||||
req_state = [req_state]
|
||||
|
||||
is_state = False
|
||||
for req_state_value in req_state:
|
||||
state_value = req_state_value
|
||||
if INPUT_ENTITY_ID.match(req_state_value) is not None:
|
||||
if (
|
||||
isinstance(req_state_value, str)
|
||||
and INPUT_ENTITY_ID.match(req_state_value) is not None
|
||||
):
|
||||
state_entity = hass.states.get(req_state_value)
|
||||
if not state_entity:
|
||||
continue
|
||||
|
||||
@@ -929,22 +929,44 @@ NUMERIC_STATE_CONDITION_SCHEMA = vol.All(
|
||||
has_at_least_one_key(CONF_BELOW, CONF_ABOVE),
|
||||
)
|
||||
|
||||
STATE_CONDITION_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_CONDITION): "state",
|
||||
vol.Required(CONF_ENTITY_ID): entity_ids,
|
||||
vol.Optional(CONF_ATTRIBUTE): str,
|
||||
vol.Required(CONF_STATE): vol.Any(str, [str]),
|
||||
vol.Optional(CONF_FOR): positive_time_period,
|
||||
# To support use_trigger_value in automation
|
||||
# Deprecated 2016/04/25
|
||||
vol.Optional("from"): str,
|
||||
}
|
||||
),
|
||||
key_dependency("for", "state"),
|
||||
STATE_CONDITION_BASE_SCHEMA = {
|
||||
vol.Required(CONF_CONDITION): "state",
|
||||
vol.Required(CONF_ENTITY_ID): entity_ids,
|
||||
vol.Optional(CONF_ATTRIBUTE): str,
|
||||
vol.Optional(CONF_FOR): positive_time_period,
|
||||
# To support use_trigger_value in automation
|
||||
# Deprecated 2016/04/25
|
||||
vol.Optional("from"): str,
|
||||
}
|
||||
|
||||
STATE_CONDITION_STATE_SCHEMA = vol.Schema(
|
||||
{
|
||||
**STATE_CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_STATE): vol.Any(str, [str]),
|
||||
}
|
||||
)
|
||||
|
||||
STATE_CONDITION_ATTRIBUTE_SCHEMA = vol.Schema(
|
||||
{
|
||||
**STATE_CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_STATE): match_all,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def STATE_CONDITION_SCHEMA(value: Any) -> dict: # pylint: disable=invalid-name
|
||||
"""Validate a state condition."""
|
||||
if not isinstance(value, dict):
|
||||
raise vol.Invalid("Expected a dictionary")
|
||||
|
||||
if CONF_ATTRIBUTE in value:
|
||||
validated: dict = STATE_CONDITION_ATTRIBUTE_SCHEMA(value)
|
||||
else:
|
||||
validated = STATE_CONDITION_STATE_SCHEMA(value)
|
||||
|
||||
return key_dependency("for", "state")(validated)
|
||||
|
||||
|
||||
SUN_CONDITION_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
|
||||
+100
-53
@@ -774,16 +774,65 @@ class _TrackTemplateResultInfo:
|
||||
"""Force recalculate the template."""
|
||||
self._refresh(None)
|
||||
|
||||
@callback
|
||||
def _event_triggers_template(self, template: Template, event: Event) -> bool:
|
||||
"""Determine if a template should be re-rendered from an event."""
|
||||
entity_id = event.data.get(ATTR_ENTITY_ID)
|
||||
return (
|
||||
self._info[template].filter(entity_id)
|
||||
or event.data.get("new_state") is None
|
||||
or event.data.get("old_state") is None
|
||||
and self._info[template].filter_lifecycle(entity_id)
|
||||
)
|
||||
def _render_template_if_ready(
|
||||
self,
|
||||
track_template_: TrackTemplate,
|
||||
now: datetime,
|
||||
event: Optional[Event],
|
||||
) -> Union[bool, TrackTemplateResult]:
|
||||
"""Re-render the template if conditions match.
|
||||
|
||||
Returns False if the template was not be re-rendered
|
||||
|
||||
Returns True if the template re-rendered and did not
|
||||
change.
|
||||
|
||||
Returns TrackTemplateResult if the template re-render
|
||||
generates a new result.
|
||||
"""
|
||||
template = track_template_.template
|
||||
|
||||
if event:
|
||||
info = self._info[template]
|
||||
|
||||
if not self._rate_limit.async_has_timer(
|
||||
template
|
||||
) and not _event_triggers_rerender(event, info):
|
||||
return False
|
||||
|
||||
if self._rate_limit.async_schedule_action(
|
||||
template,
|
||||
_rate_limit_for_event(event, info, track_template_),
|
||||
now,
|
||||
self._refresh,
|
||||
event,
|
||||
):
|
||||
return False
|
||||
|
||||
_LOGGER.debug(
|
||||
"Template update %s triggered by event: %s",
|
||||
template.template,
|
||||
event,
|
||||
)
|
||||
|
||||
self._rate_limit.async_triggered(template, now)
|
||||
self._info[template] = template.async_render_to_info(track_template_.variables)
|
||||
|
||||
try:
|
||||
result: Union[str, TemplateError] = self._info[template].result()
|
||||
except TemplateError as ex:
|
||||
result = ex
|
||||
|
||||
last_result = self._last_result.get(template)
|
||||
|
||||
# Check to see if the result has changed
|
||||
if result == last_result:
|
||||
return True
|
||||
|
||||
if isinstance(result, TemplateError) and isinstance(last_result, TemplateError):
|
||||
return True
|
||||
|
||||
return TrackTemplateResult(template, last_result, result)
|
||||
|
||||
@callback
|
||||
def _refresh(self, event: Optional[Event]) -> None:
|
||||
@@ -792,51 +841,13 @@ class _TrackTemplateResultInfo:
|
||||
now = dt_util.utcnow()
|
||||
|
||||
for track_template_ in self._track_templates:
|
||||
template = track_template_.template
|
||||
if event:
|
||||
if not self._rate_limit.async_has_timer(
|
||||
template
|
||||
) and not self._event_triggers_template(template, event):
|
||||
continue
|
||||
update = self._render_template_if_ready(track_template_, now, event)
|
||||
if not update:
|
||||
continue
|
||||
|
||||
if self._rate_limit.async_schedule_action(
|
||||
template,
|
||||
self._info[template].rate_limit or track_template_.rate_limit,
|
||||
now,
|
||||
self._refresh,
|
||||
event,
|
||||
):
|
||||
continue
|
||||
|
||||
_LOGGER.debug(
|
||||
"Template update %s triggered by event: %s",
|
||||
template.template,
|
||||
event,
|
||||
)
|
||||
|
||||
self._rate_limit.async_triggered(template, now)
|
||||
self._info[template] = template.async_render_to_info(
|
||||
track_template_.variables
|
||||
)
|
||||
info_changed = True
|
||||
|
||||
try:
|
||||
result: Union[str, TemplateError] = self._info[template].result()
|
||||
except TemplateError as ex:
|
||||
result = ex
|
||||
|
||||
last_result = self._last_result.get(template)
|
||||
|
||||
# Check to see if the result has changed
|
||||
if result == last_result:
|
||||
continue
|
||||
|
||||
if isinstance(result, TemplateError) and isinstance(
|
||||
last_result, TemplateError
|
||||
):
|
||||
continue
|
||||
|
||||
updates.append(TrackTemplateResult(template, last_result, result))
|
||||
if isinstance(update, TrackTemplateResult):
|
||||
updates.append(update)
|
||||
|
||||
if info_changed:
|
||||
assert self._track_state_changes
|
||||
@@ -1348,3 +1359,39 @@ def _render_infos_to_track_states(render_infos: Iterable[RenderInfo]) -> TrackSt
|
||||
return TrackStates(True, set(), set())
|
||||
|
||||
return TrackStates(False, *_entities_domains_from_render_infos(render_infos))
|
||||
|
||||
|
||||
@callback
|
||||
def _event_triggers_rerender(event: Event, info: RenderInfo) -> bool:
|
||||
"""Determine if a template should be re-rendered from an event."""
|
||||
entity_id = event.data.get(ATTR_ENTITY_ID)
|
||||
|
||||
if info.filter(entity_id):
|
||||
return True
|
||||
|
||||
if (
|
||||
event.data.get("new_state") is not None
|
||||
and event.data.get("old_state") is not None
|
||||
):
|
||||
return False
|
||||
|
||||
return bool(info.filter_lifecycle(entity_id))
|
||||
|
||||
|
||||
@callback
|
||||
def _rate_limit_for_event(
|
||||
event: Event, info: RenderInfo, track_template_: TrackTemplate
|
||||
) -> Optional[timedelta]:
|
||||
"""Determine the rate limit for an event."""
|
||||
entity_id = event.data.get(ATTR_ENTITY_ID)
|
||||
|
||||
# Specifically referenced entities are excluded
|
||||
# from the rate limit
|
||||
if entity_id in info.entities:
|
||||
return None
|
||||
|
||||
if track_template_.rate_limit is not None:
|
||||
return track_template_.rate_limit
|
||||
|
||||
rate_limit: Optional[timedelta] = info.rate_limit
|
||||
return rate_limit
|
||||
|
||||
@@ -123,30 +123,71 @@ def make_script_schema(schema, default_script_mode, extra=vol.PREVENT_EXTRA):
|
||||
)
|
||||
|
||||
|
||||
STATIC_VALIDATION_ACTION_TYPES = (
|
||||
cv.SCRIPT_ACTION_CALL_SERVICE,
|
||||
cv.SCRIPT_ACTION_DELAY,
|
||||
cv.SCRIPT_ACTION_WAIT_TEMPLATE,
|
||||
cv.SCRIPT_ACTION_FIRE_EVENT,
|
||||
cv.SCRIPT_ACTION_ACTIVATE_SCENE,
|
||||
cv.SCRIPT_ACTION_VARIABLES,
|
||||
)
|
||||
|
||||
|
||||
async def async_validate_actions_config(
|
||||
hass: HomeAssistant, actions: List[ConfigType]
|
||||
) -> List[ConfigType]:
|
||||
"""Validate a list of actions."""
|
||||
return await asyncio.gather(
|
||||
*[async_validate_action_config(hass, action) for action in actions]
|
||||
)
|
||||
|
||||
|
||||
async def async_validate_action_config(
|
||||
hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
action_type = cv.determine_script_action(config)
|
||||
|
||||
if action_type == cv.SCRIPT_ACTION_DEVICE_AUTOMATION:
|
||||
if action_type in STATIC_VALIDATION_ACTION_TYPES:
|
||||
pass
|
||||
|
||||
elif action_type == cv.SCRIPT_ACTION_DEVICE_AUTOMATION:
|
||||
platform = await device_automation.async_get_device_automation_platform(
|
||||
hass, config[CONF_DOMAIN], "action"
|
||||
)
|
||||
config = platform.ACTION_SCHEMA(config) # type: ignore
|
||||
elif (
|
||||
action_type == cv.SCRIPT_ACTION_CHECK_CONDITION
|
||||
and config[CONF_CONDITION] == "device"
|
||||
):
|
||||
platform = await device_automation.async_get_device_automation_platform(
|
||||
hass, config[CONF_DOMAIN], "condition"
|
||||
)
|
||||
config = platform.CONDITION_SCHEMA(config) # type: ignore
|
||||
|
||||
elif action_type == cv.SCRIPT_ACTION_CHECK_CONDITION:
|
||||
if config[CONF_CONDITION] == "device":
|
||||
platform = await device_automation.async_get_device_automation_platform(
|
||||
hass, config[CONF_DOMAIN], "condition"
|
||||
)
|
||||
config = platform.CONDITION_SCHEMA(config) # type: ignore
|
||||
|
||||
elif action_type == cv.SCRIPT_ACTION_WAIT_FOR_TRIGGER:
|
||||
config[CONF_WAIT_FOR_TRIGGER] = await async_validate_trigger_config(
|
||||
hass, config[CONF_WAIT_FOR_TRIGGER]
|
||||
)
|
||||
|
||||
elif action_type == cv.SCRIPT_ACTION_REPEAT:
|
||||
config[CONF_SEQUENCE] = await async_validate_actions_config(
|
||||
hass, config[CONF_REPEAT][CONF_SEQUENCE]
|
||||
)
|
||||
|
||||
elif action_type == cv.SCRIPT_ACTION_CHOOSE:
|
||||
if CONF_DEFAULT in config:
|
||||
config[CONF_DEFAULT] = await async_validate_actions_config(
|
||||
hass, config[CONF_DEFAULT]
|
||||
)
|
||||
|
||||
for choose_conf in config[CONF_CHOOSE]:
|
||||
choose_conf[CONF_SEQUENCE] = await async_validate_actions_config(
|
||||
hass, choose_conf[CONF_SEQUENCE]
|
||||
)
|
||||
|
||||
else:
|
||||
raise ValueError(f"No validation for {action_type}")
|
||||
|
||||
return config
|
||||
|
||||
|
||||
@@ -850,7 +891,7 @@ class Script:
|
||||
|
||||
entity_ids = data.get(ATTR_ENTITY_ID)
|
||||
|
||||
if entity_ids is None:
|
||||
if entity_ids is None or isinstance(entity_ids, template.Template):
|
||||
continue
|
||||
|
||||
if isinstance(entity_ids, str):
|
||||
|
||||
@@ -72,7 +72,7 @@ _COLLECTABLE_STATE_ATTRIBUTES = {
|
||||
"name",
|
||||
}
|
||||
|
||||
DEFAULT_RATE_LIMIT = timedelta(seconds=1)
|
||||
DEFAULT_RATE_LIMIT = timedelta(minutes=1)
|
||||
|
||||
|
||||
@bind_hass
|
||||
@@ -489,26 +489,6 @@ class Template:
|
||||
return 'Template("' + self.template + '")'
|
||||
|
||||
|
||||
class RateLimit:
|
||||
"""Class to control update rate limits."""
|
||||
|
||||
def __init__(self, hass: HomeAssistantType):
|
||||
"""Initialize rate limit."""
|
||||
self._hass = hass
|
||||
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> str:
|
||||
"""Handle a call to the class."""
|
||||
render_info = self._hass.data.get(_RENDER_INFO)
|
||||
if render_info is not None:
|
||||
render_info.rate_limit = timedelta(*args, **kwargs)
|
||||
|
||||
return ""
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Representation of a RateLimit."""
|
||||
return "<template RateLimit>"
|
||||
|
||||
|
||||
class AllStates:
|
||||
"""Class to expose all HA states as attributes."""
|
||||
|
||||
@@ -1310,11 +1290,10 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment):
|
||||
self.globals["is_state_attr"] = hassfunction(is_state_attr)
|
||||
self.globals["state_attr"] = hassfunction(state_attr)
|
||||
self.globals["states"] = AllStates(hass)
|
||||
self.globals["rate_limit"] = RateLimit(hass)
|
||||
|
||||
def is_safe_callable(self, obj):
|
||||
"""Test if callback is safe."""
|
||||
return isinstance(obj, (AllStates, RateLimit)) or super().is_safe_callable(obj)
|
||||
return isinstance(obj, AllStates) or super().is_safe_callable(obj)
|
||||
|
||||
def is_safe_attribute(self, obj, attr, value):
|
||||
"""Test if attribute is safe."""
|
||||
|
||||
@@ -221,7 +221,7 @@ aiopvpc==2.0.2
|
||||
aiopylgtv==0.3.3
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==0.3.3
|
||||
aioshelly==0.3.4
|
||||
|
||||
# homeassistant.components.switcher_kis
|
||||
aioswitcher==1.2.1
|
||||
@@ -263,7 +263,7 @@ apcaccess==0.0.13
|
||||
apns2==0.3.0
|
||||
|
||||
# homeassistant.components.apprise
|
||||
apprise==0.8.8
|
||||
apprise==0.8.9
|
||||
|
||||
# homeassistant.components.aprs
|
||||
aprslib==0.6.46
|
||||
@@ -2329,7 +2329,7 @@ zigpy-xbee==0.13.0
|
||||
zigpy-zigate==0.6.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-znp==0.2.0
|
||||
zigpy-znp==0.2.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy==0.25.0
|
||||
|
||||
@@ -134,7 +134,7 @@ aiopvpc==2.0.2
|
||||
aiopylgtv==0.3.3
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==0.3.3
|
||||
aioshelly==0.3.4
|
||||
|
||||
# homeassistant.components.switcher_kis
|
||||
aioswitcher==1.2.1
|
||||
@@ -158,7 +158,7 @@ androidtv[async]==0.0.50
|
||||
apns2==0.3.0
|
||||
|
||||
# homeassistant.components.apprise
|
||||
apprise==0.8.8
|
||||
apprise==0.8.9
|
||||
|
||||
# homeassistant.components.aprs
|
||||
aprslib==0.6.46
|
||||
@@ -1086,7 +1086,7 @@ zigpy-xbee==0.13.0
|
||||
zigpy-zigate==0.6.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-znp==0.2.0
|
||||
zigpy-znp==0.2.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy==0.25.0
|
||||
|
||||
@@ -763,7 +763,6 @@ async def test_group_climate_all_cool(hass):
|
||||
hass.states.async_set("climate.two", "cool")
|
||||
hass.states.async_set("climate.three", "cool")
|
||||
|
||||
assert await async_setup_component(hass, "climate", {})
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
@@ -773,6 +772,7 @@ async def test_group_climate_all_cool(hass):
|
||||
}
|
||||
},
|
||||
)
|
||||
assert await async_setup_component(hass, "climate", {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.group_zero").state == STATE_ON
|
||||
@@ -804,8 +804,8 @@ async def test_group_alarm(hass):
|
||||
hass.states.async_set("alarm_control_panel.one", "armed_away")
|
||||
hass.states.async_set("alarm_control_panel.two", "armed_home")
|
||||
hass.states.async_set("alarm_control_panel.three", "armed_away")
|
||||
hass.state = CoreState.stopped
|
||||
|
||||
assert await async_setup_component(hass, "alarm_control_panel", {})
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
@@ -817,8 +817,10 @@ async def test_group_alarm(hass):
|
||||
}
|
||||
},
|
||||
)
|
||||
assert await async_setup_component(hass, "alarm_control_panel", {})
|
||||
await hass.async_block_till_done()
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.group_zero").state == STATE_ON
|
||||
|
||||
|
||||
@@ -850,8 +852,8 @@ async def test_group_vacuum_off(hass):
|
||||
hass.states.async_set("vacuum.one", "docked")
|
||||
hass.states.async_set("vacuum.two", "off")
|
||||
hass.states.async_set("vacuum.three", "off")
|
||||
hass.state = CoreState.stopped
|
||||
|
||||
assert await async_setup_component(hass, "vacuum", {})
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
@@ -861,8 +863,11 @@ async def test_group_vacuum_off(hass):
|
||||
}
|
||||
},
|
||||
)
|
||||
assert await async_setup_component(hass, "vacuum", {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
assert hass.states.get("group.group_zero").state == STATE_OFF
|
||||
|
||||
|
||||
@@ -893,7 +898,6 @@ async def test_device_tracker_not_home(hass):
|
||||
hass.states.async_set("device_tracker.two", "not_home")
|
||||
hass.states.async_set("device_tracker.three", "not_home")
|
||||
|
||||
assert await async_setup_component(hass, "device_tracker", {})
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
@@ -916,7 +920,6 @@ async def test_light_removed(hass):
|
||||
hass.states.async_set("light.two", "off")
|
||||
hass.states.async_set("light.three", "on")
|
||||
|
||||
assert await async_setup_component(hass, "light", {})
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
@@ -943,7 +946,6 @@ async def test_switch_removed(hass):
|
||||
hass.states.async_set("switch.three", "on")
|
||||
|
||||
hass.state = CoreState.stopped
|
||||
assert await async_setup_component(hass, "switch", {})
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
@@ -956,6 +958,8 @@ async def test_switch_removed(hass):
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.group_zero").state == "unknown"
|
||||
assert await async_setup_component(hass, "switch", {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
@@ -965,3 +969,247 @@ async def test_switch_removed(hass):
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.group_zero").state == "off"
|
||||
|
||||
|
||||
async def test_lights_added_after_group(hass):
|
||||
"""Test lights added after group."""
|
||||
|
||||
entity_ids = [
|
||||
"light.living_front_ri",
|
||||
"light.living_back_lef",
|
||||
"light.living_back_cen",
|
||||
"light.living_front_le",
|
||||
"light.living_front_ce",
|
||||
"light.living_back_rig",
|
||||
]
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
{
|
||||
"group": {
|
||||
"living_room_downlights": {"entities": entity_ids},
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.living_room_downlights").state == "unknown"
|
||||
|
||||
for entity_id in entity_ids:
|
||||
hass.states.async_set(entity_id, "off")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.living_room_downlights").state == "off"
|
||||
|
||||
|
||||
async def test_lights_added_before_group(hass):
|
||||
"""Test lights added before group."""
|
||||
|
||||
entity_ids = [
|
||||
"light.living_front_ri",
|
||||
"light.living_back_lef",
|
||||
"light.living_back_cen",
|
||||
"light.living_front_le",
|
||||
"light.living_front_ce",
|
||||
"light.living_back_rig",
|
||||
]
|
||||
|
||||
for entity_id in entity_ids:
|
||||
hass.states.async_set(entity_id, "off")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
{
|
||||
"group": {
|
||||
"living_room_downlights": {"entities": entity_ids},
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.living_room_downlights").state == "off"
|
||||
|
||||
|
||||
async def test_cover_added_after_group(hass):
|
||||
"""Test cover added after group."""
|
||||
|
||||
entity_ids = [
|
||||
"cover.upstairs",
|
||||
"cover.downstairs",
|
||||
]
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
{
|
||||
"group": {
|
||||
"shades": {"entities": entity_ids},
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
for entity_id in entity_ids:
|
||||
hass.states.async_set(entity_id, "open")
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.shades").state == "open"
|
||||
|
||||
for entity_id in entity_ids:
|
||||
hass.states.async_set(entity_id, "closed")
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert hass.states.get("group.shades").state == "closed"
|
||||
|
||||
|
||||
async def test_group_that_references_a_group_of_lights(hass):
|
||||
"""Group that references a group of lights."""
|
||||
|
||||
entity_ids = [
|
||||
"light.living_front_ri",
|
||||
"light.living_back_lef",
|
||||
]
|
||||
hass.state = CoreState.stopped
|
||||
|
||||
for entity_id in entity_ids:
|
||||
hass.states.async_set(entity_id, "off")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
{
|
||||
"group": {
|
||||
"living_room_downlights": {"entities": entity_ids},
|
||||
"grouped_group": {
|
||||
"entities": ["group.living_room_downlights", *entity_ids]
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.living_room_downlights").state == "off"
|
||||
assert hass.states.get("group.grouped_group").state == "off"
|
||||
|
||||
|
||||
async def test_group_that_references_a_group_of_covers(hass):
|
||||
"""Group that references a group of covers."""
|
||||
|
||||
entity_ids = [
|
||||
"cover.living_front_ri",
|
||||
"cover.living_back_lef",
|
||||
]
|
||||
hass.state = CoreState.stopped
|
||||
|
||||
for entity_id in entity_ids:
|
||||
hass.states.async_set(entity_id, "closed")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
{
|
||||
"group": {
|
||||
"living_room_downcover": {"entities": entity_ids},
|
||||
"grouped_group": {
|
||||
"entities": ["group.living_room_downlights", *entity_ids]
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.living_room_downcover").state == "closed"
|
||||
assert hass.states.get("group.grouped_group").state == "closed"
|
||||
|
||||
|
||||
async def test_group_that_references_two_groups_of_covers(hass):
|
||||
"""Group that references a group of covers."""
|
||||
|
||||
entity_ids = [
|
||||
"cover.living_front_ri",
|
||||
"cover.living_back_lef",
|
||||
]
|
||||
hass.state = CoreState.stopped
|
||||
|
||||
for entity_id in entity_ids:
|
||||
hass.states.async_set(entity_id, "closed")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
{
|
||||
"group": {
|
||||
"living_room_downcover": {"entities": entity_ids},
|
||||
"living_room_upcover": {"entities": entity_ids},
|
||||
"grouped_group": {
|
||||
"entities": [
|
||||
"group.living_room_downlights",
|
||||
"group.living_room_upcover",
|
||||
]
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.living_room_downcover").state == "closed"
|
||||
assert hass.states.get("group.living_room_upcover").state == "closed"
|
||||
assert hass.states.get("group.grouped_group").state == "closed"
|
||||
|
||||
|
||||
async def test_group_that_references_two_types_of_groups(hass):
|
||||
"""Group that references a group of covers and device_trackers."""
|
||||
|
||||
group_1_entity_ids = [
|
||||
"cover.living_front_ri",
|
||||
"cover.living_back_lef",
|
||||
]
|
||||
group_2_entity_ids = [
|
||||
"device_tracker.living_front_ri",
|
||||
"device_tracker.living_back_lef",
|
||||
]
|
||||
hass.state = CoreState.stopped
|
||||
|
||||
for entity_id in group_1_entity_ids:
|
||||
hass.states.async_set(entity_id, "closed")
|
||||
for entity_id in group_2_entity_ids:
|
||||
hass.states.async_set(entity_id, "home")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
{
|
||||
"group": {
|
||||
"covers": {"entities": group_1_entity_ids},
|
||||
"device_trackers": {"entities": group_2_entity_ids},
|
||||
"grouped_group": {
|
||||
"entities": ["group.covers", "group.device_trackers"]
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.covers").state == "closed"
|
||||
assert hass.states.get("group.device_trackers").state == "home"
|
||||
assert hass.states.get("group.grouped_group").state == "on"
|
||||
|
||||
@@ -1240,3 +1240,32 @@ async def test_attribute_if_not_fires_on_entities_change_with_for_after_stop(
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10))
|
||||
await hass.async_block_till_done()
|
||||
assert len(calls) == 1
|
||||
|
||||
|
||||
async def test_attribute_if_fires_on_entity_change_with_both_filters_boolean(
|
||||
hass, calls
|
||||
):
|
||||
"""Test for firing if both filters are match attribute."""
|
||||
hass.states.async_set("test.entity", "bla", {"happening": False})
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
automation.DOMAIN,
|
||||
{
|
||||
automation.DOMAIN: {
|
||||
"trigger": {
|
||||
"platform": "state",
|
||||
"entity_id": "test.entity",
|
||||
"from": False,
|
||||
"to": True,
|
||||
"attribute": "happening",
|
||||
},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.states.async_set("test.entity", "bla", {"happening": True})
|
||||
await hass.async_block_till_done()
|
||||
assert len(calls) == 1
|
||||
|
||||
@@ -1121,3 +1121,48 @@ async def test_state_gets_lowercased(hass):
|
||||
hass.states.async_set("binary_sensor.garage_door_sensor", "on")
|
||||
await hass.async_block_till_done()
|
||||
assert hass.states.get("cover.garage_door").state == STATE_CLOSED
|
||||
|
||||
|
||||
async def test_self_referencing_icon_with_no_template_is_not_a_loop(hass, caplog):
|
||||
"""Test a self referencing icon with no value template is not a loop."""
|
||||
|
||||
icon_template_str = """{% if is_state('cover.office', 'open') %}
|
||||
mdi:window-shutter-open
|
||||
{% else %}
|
||||
mdi:window-shutter
|
||||
{% endif %}"""
|
||||
|
||||
await setup.async_setup_component(
|
||||
hass,
|
||||
"cover",
|
||||
{
|
||||
"cover": {
|
||||
"platform": "template",
|
||||
"covers": {
|
||||
"office": {
|
||||
"icon_template": icon_template_str,
|
||||
"open_cover": {
|
||||
"service": "switch.turn_on",
|
||||
"entity_id": "switch.office_blinds_up",
|
||||
},
|
||||
"close_cover": {
|
||||
"service": "switch.turn_on",
|
||||
"entity_id": "switch.office_blinds_down",
|
||||
},
|
||||
"stop_cover": {
|
||||
"service": "switch.turn_on",
|
||||
"entity_id": "switch.office_blinds_up",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_start()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(hass.states.async_all()) == 1
|
||||
|
||||
assert "Template loop detected" not in caplog.text
|
||||
|
||||
@@ -797,9 +797,9 @@ async def test_self_referencing_sensor_loop(hass, caplog):
|
||||
assert "Template loop detected" in caplog.text
|
||||
|
||||
state = hass.states.get("sensor.test")
|
||||
assert int(state.state) == 1
|
||||
assert int(state.state) == 2
|
||||
await hass.async_block_till_done()
|
||||
assert int(state.state) == 1
|
||||
assert int(state.state) == 2
|
||||
|
||||
|
||||
async def test_self_referencing_sensor_with_icon_loop(hass, caplog):
|
||||
@@ -833,11 +833,11 @@ async def test_self_referencing_sensor_with_icon_loop(hass, caplog):
|
||||
assert "Template loop detected" in caplog.text
|
||||
|
||||
state = hass.states.get("sensor.test")
|
||||
assert int(state.state) == 2
|
||||
assert int(state.state) == 3
|
||||
assert state.attributes[ATTR_ICON] == "mdi:greater"
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert int(state.state) == 2
|
||||
assert int(state.state) == 3
|
||||
|
||||
|
||||
async def test_self_referencing_sensor_with_icon_and_picture_entity_loop(hass, caplog):
|
||||
@@ -872,12 +872,12 @@ async def test_self_referencing_sensor_with_icon_and_picture_entity_loop(hass, c
|
||||
assert "Template loop detected" in caplog.text
|
||||
|
||||
state = hass.states.get("sensor.test")
|
||||
assert int(state.state) == 3
|
||||
assert int(state.state) == 4
|
||||
assert state.attributes[ATTR_ICON] == "mdi:less"
|
||||
assert state.attributes[ATTR_ENTITY_PICTURE] == "bigpic"
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert int(state.state) == 3
|
||||
assert int(state.state) == 4
|
||||
|
||||
|
||||
async def test_self_referencing_entity_picture_loop(hass, caplog):
|
||||
@@ -917,7 +917,7 @@ async def test_self_referencing_entity_picture_loop(hass, caplog):
|
||||
|
||||
state = hass.states.get("sensor.test")
|
||||
assert int(state.state) == 1
|
||||
assert state.attributes[ATTR_ENTITY_PICTURE] == "1"
|
||||
assert state.attributes[ATTR_ENTITY_PICTURE] == "2"
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert int(state.state) == 1
|
||||
|
||||
@@ -422,7 +422,7 @@ async def test_state_attribute(hass):
|
||||
"condition": "state",
|
||||
"entity_id": "sensor.temperature",
|
||||
"attribute": "attribute1",
|
||||
"state": "200",
|
||||
"state": 200,
|
||||
},
|
||||
],
|
||||
},
|
||||
@@ -435,7 +435,7 @@ async def test_state_attribute(hass):
|
||||
assert test(hass)
|
||||
|
||||
hass.states.async_set("sensor.temperature", 100, {"attribute1": "200"})
|
||||
assert test(hass)
|
||||
assert not test(hass)
|
||||
|
||||
hass.states.async_set("sensor.temperature", 100, {"attribute1": 201})
|
||||
assert not test(hass)
|
||||
@@ -444,6 +444,31 @@ async def test_state_attribute(hass):
|
||||
assert not test(hass)
|
||||
|
||||
|
||||
async def test_state_attribute_boolean(hass):
|
||||
"""Test with boolean state attribute in condition."""
|
||||
test = await condition.async_from_config(
|
||||
hass,
|
||||
{
|
||||
"condition": "state",
|
||||
"entity_id": "sensor.temperature",
|
||||
"attribute": "happening",
|
||||
"state": False,
|
||||
},
|
||||
)
|
||||
|
||||
hass.states.async_set("sensor.temperature", 100, {"happening": 200})
|
||||
assert not test(hass)
|
||||
|
||||
hass.states.async_set("sensor.temperature", 100, {"happening": True})
|
||||
assert not test(hass)
|
||||
|
||||
hass.states.async_set("sensor.temperature", 100, {"no_happening": 201})
|
||||
assert not test(hass)
|
||||
|
||||
hass.states.async_set("sensor.temperature", 100, {"happening": False})
|
||||
assert test(hass)
|
||||
|
||||
|
||||
async def test_state_using_input_entities(hass):
|
||||
"""Test state conditions using input_* entities."""
|
||||
await async_setup_component(
|
||||
|
||||
+28
-168
@@ -927,7 +927,6 @@ async def test_track_template_result_complex(hass):
|
||||
"""Test tracking template."""
|
||||
specific_runs = []
|
||||
template_complex_str = """
|
||||
{{ rate_limit(seconds=0) }}
|
||||
{% if states("sensor.domain") == "light" %}
|
||||
{{ states.light | map(attribute='entity_id') | list }}
|
||||
{% elif states("sensor.domain") == "lock" %}
|
||||
@@ -948,7 +947,9 @@ async def test_track_template_result_complex(hass):
|
||||
hass.states.async_set("lock.one", "locked")
|
||||
|
||||
info = async_track_template_result(
|
||||
hass, [TrackTemplate(template_complex, None)], specific_run_callback
|
||||
hass,
|
||||
[TrackTemplate(template_complex, None, timedelta(seconds=0))],
|
||||
specific_run_callback,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -1236,7 +1237,7 @@ async def test_track_template_result_iterator(hass):
|
||||
[
|
||||
TrackTemplate(
|
||||
Template(
|
||||
"""{{ rate_limit(seconds=0) }}
|
||||
"""
|
||||
{% for state in states.sensor %}
|
||||
{% if state.state == 'on' %}
|
||||
{{ state.entity_id }},
|
||||
@@ -1246,6 +1247,7 @@ async def test_track_template_result_iterator(hass):
|
||||
hass,
|
||||
),
|
||||
None,
|
||||
timedelta(seconds=0),
|
||||
)
|
||||
],
|
||||
iterator_callback,
|
||||
@@ -1268,11 +1270,12 @@ async def test_track_template_result_iterator(hass):
|
||||
[
|
||||
TrackTemplate(
|
||||
Template(
|
||||
"""{{ rate_limit(seconds=0) }}{{ states.sensor|selectattr("state","equalto","on")
|
||||
"""{{ states.sensor|selectattr("state","equalto","on")
|
||||
|join(",", attribute="entity_id") }}""",
|
||||
hass,
|
||||
),
|
||||
None,
|
||||
timedelta(seconds=0),
|
||||
)
|
||||
],
|
||||
filter_callback,
|
||||
@@ -1452,62 +1455,6 @@ async def test_track_template_rate_limit(hass):
|
||||
assert refresh_runs == ["0", "1", "2", "4"]
|
||||
|
||||
|
||||
async def test_track_template_rate_limit_overridden(hass):
|
||||
"""Test template rate limit can be overridden from the template."""
|
||||
template_refresh = Template(
|
||||
"{% set x = rate_limit(seconds=0.1) %}{{ states | count }}", hass
|
||||
)
|
||||
|
||||
refresh_runs = []
|
||||
|
||||
@ha.callback
|
||||
def refresh_listener(event, updates):
|
||||
refresh_runs.append(updates.pop().result)
|
||||
|
||||
info = async_track_template_result(
|
||||
hass,
|
||||
[TrackTemplate(template_refresh, None, timedelta(seconds=5))],
|
||||
refresh_listener,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
info.async_refresh()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert refresh_runs == ["0"]
|
||||
hass.states.async_set("sensor.one", "any")
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0"]
|
||||
info.async_refresh()
|
||||
assert refresh_runs == ["0", "1"]
|
||||
hass.states.async_set("sensor.two", "any")
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0", "1"]
|
||||
next_time = dt_util.utcnow() + timedelta(seconds=0.125)
|
||||
with patch(
|
||||
"homeassistant.helpers.ratelimit.dt_util.utcnow", return_value=next_time
|
||||
):
|
||||
async_fire_time_changed(hass, next_time)
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0", "1", "2"]
|
||||
hass.states.async_set("sensor.three", "any")
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0", "1", "2"]
|
||||
hass.states.async_set("sensor.four", "any")
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0", "1", "2"]
|
||||
next_time = dt_util.utcnow() + timedelta(seconds=0.125 * 2)
|
||||
with patch(
|
||||
"homeassistant.helpers.ratelimit.dt_util.utcnow", return_value=next_time
|
||||
):
|
||||
async_fire_time_changed(hass, next_time)
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0", "1", "2", "4"]
|
||||
hass.states.async_set("sensor.five", "any")
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0", "1", "2", "4"]
|
||||
|
||||
|
||||
async def test_track_template_rate_limit_five(hass):
|
||||
"""Test template rate limit of 5 seconds."""
|
||||
template_refresh = Template("{{ states | count }}", hass)
|
||||
@@ -1541,19 +1488,11 @@ async def test_track_template_rate_limit_five(hass):
|
||||
assert refresh_runs == ["0", "1"]
|
||||
|
||||
|
||||
async def test_track_template_rate_limit_changes(hass):
|
||||
"""Test template rate limit can be changed."""
|
||||
template_refresh = Template(
|
||||
"""
|
||||
{% if states.sensor.two.state == "any" %}
|
||||
{% set x = rate_limit(seconds=5) %}
|
||||
{% else %}
|
||||
{% set x = rate_limit(seconds=0.1) %}
|
||||
{% endif %}
|
||||
{{ states | count }}
|
||||
""",
|
||||
hass,
|
||||
)
|
||||
async def test_specifically_referenced_entity_is_not_rate_limited(hass):
|
||||
"""Test template rate limit of 5 seconds."""
|
||||
hass.states.async_set("sensor.one", "none")
|
||||
|
||||
template_refresh = Template('{{ states | count }}_{{ states("sensor.one") }}', hass)
|
||||
|
||||
refresh_runs = []
|
||||
|
||||
@@ -1563,114 +1502,34 @@ async def test_track_template_rate_limit_changes(hass):
|
||||
|
||||
info = async_track_template_result(
|
||||
hass,
|
||||
[TrackTemplate(template_refresh, None)],
|
||||
[TrackTemplate(template_refresh, None, timedelta(seconds=5))],
|
||||
refresh_listener,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
info.async_refresh()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert refresh_runs == ["0"]
|
||||
assert refresh_runs == ["1_none"]
|
||||
hass.states.async_set("sensor.one", "any")
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0"]
|
||||
assert refresh_runs == ["1_none", "1_any"]
|
||||
info.async_refresh()
|
||||
assert refresh_runs == ["0", "1"]
|
||||
assert refresh_runs == ["1_none", "1_any"]
|
||||
hass.states.async_set("sensor.two", "any")
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0", "1"]
|
||||
next_time = dt_util.utcnow() + timedelta(seconds=0.125 * 1)
|
||||
with patch(
|
||||
"homeassistant.helpers.ratelimit.dt_util.utcnow", return_value=next_time
|
||||
):
|
||||
async_fire_time_changed(hass, next_time)
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0", "1", "2"]
|
||||
assert refresh_runs == ["1_none", "1_any"]
|
||||
hass.states.async_set("sensor.three", "any")
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0", "1", "2"]
|
||||
hass.states.async_set("sensor.four", "any")
|
||||
assert refresh_runs == ["1_none", "1_any"]
|
||||
hass.states.async_set("sensor.one", "none")
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0", "1", "2"]
|
||||
next_time = dt_util.utcnow() + timedelta(seconds=0.125 * 2)
|
||||
with patch(
|
||||
"homeassistant.helpers.ratelimit.dt_util.utcnow", return_value=next_time
|
||||
):
|
||||
async_fire_time_changed(hass, next_time)
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0", "1", "2"]
|
||||
hass.states.async_set("sensor.five", "any")
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0", "1", "2"]
|
||||
|
||||
|
||||
async def test_track_template_rate_limit_removed(hass):
|
||||
"""Test template rate limit can be removed."""
|
||||
template_refresh = Template(
|
||||
"""
|
||||
{% if states.sensor.two.state == "any" %}
|
||||
{% set x = rate_limit(0) %}
|
||||
{% else %}
|
||||
{% set x = rate_limit(seconds=0.1) %}
|
||||
{% endif %}
|
||||
{{ states | count }}
|
||||
""",
|
||||
hass,
|
||||
)
|
||||
|
||||
refresh_runs = []
|
||||
|
||||
@ha.callback
|
||||
def refresh_listener(event, updates):
|
||||
refresh_runs.append(updates.pop().result)
|
||||
|
||||
info = async_track_template_result(
|
||||
hass,
|
||||
[TrackTemplate(template_refresh, None)],
|
||||
refresh_listener,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
info.async_refresh()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert refresh_runs == ["0"]
|
||||
hass.states.async_set("sensor.one", "any")
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0"]
|
||||
info.async_refresh()
|
||||
assert refresh_runs == ["0", "1"]
|
||||
hass.states.async_set("sensor.two", "any")
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0", "1"]
|
||||
next_time = dt_util.utcnow() + timedelta(seconds=0.125 * 1)
|
||||
with patch(
|
||||
"homeassistant.helpers.ratelimit.dt_util.utcnow", return_value=next_time
|
||||
):
|
||||
async_fire_time_changed(hass, next_time)
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0", "1", "2"]
|
||||
hass.states.async_set("sensor.three", "any")
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0", "1", "2", "3"]
|
||||
hass.states.async_set("sensor.four", "any")
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0", "1", "2", "3", "4"]
|
||||
hass.states.async_set("sensor.five", "any")
|
||||
await hass.async_block_till_done()
|
||||
assert refresh_runs == ["0", "1", "2", "3", "4", "5"]
|
||||
assert refresh_runs == ["1_none", "1_any", "3_none"]
|
||||
|
||||
|
||||
async def test_track_two_templates_with_different_rate_limits(hass):
|
||||
"""Test two templates with different rate limits."""
|
||||
template_one = Template(
|
||||
"{% set x = rate_limit(seconds=0.1) %}{{ states | count }}", hass
|
||||
)
|
||||
template_five = Template(
|
||||
"{% set x = rate_limit(seconds=5) %}{{ states | count }}", hass
|
||||
)
|
||||
template_one = Template("{{ states | count }} ", hass)
|
||||
template_five = Template("{{ states | count }}", hass)
|
||||
|
||||
refresh_runs = {
|
||||
template_one: [],
|
||||
@@ -1684,7 +1543,10 @@ async def test_track_two_templates_with_different_rate_limits(hass):
|
||||
|
||||
info = async_track_template_result(
|
||||
hass,
|
||||
[TrackTemplate(template_one, None), TrackTemplate(template_five, None)],
|
||||
[
|
||||
TrackTemplate(template_one, None, timedelta(seconds=0.1)),
|
||||
TrackTemplate(template_five, None, timedelta(seconds=5)),
|
||||
],
|
||||
refresh_listener,
|
||||
)
|
||||
|
||||
@@ -1867,9 +1729,7 @@ async def test_async_track_template_result_multiple_templates_mixing_domain(hass
|
||||
template_1 = Template("{{ states.switch.test.state == 'on' }}")
|
||||
template_2 = Template("{{ states.switch.test.state == 'on' }}")
|
||||
template_3 = Template("{{ states.switch.test.state == 'off' }}")
|
||||
template_4 = Template(
|
||||
"{{ rate_limit(seconds=0) }}{{ states.switch | map(attribute='entity_id') | list }}"
|
||||
)
|
||||
template_4 = Template("{{ states.switch | map(attribute='entity_id') | list }}")
|
||||
|
||||
refresh_runs = []
|
||||
|
||||
@@ -1883,7 +1743,7 @@ async def test_async_track_template_result_multiple_templates_mixing_domain(hass
|
||||
TrackTemplate(template_1, None),
|
||||
TrackTemplate(template_2, None),
|
||||
TrackTemplate(template_3, None),
|
||||
TrackTemplate(template_4, None),
|
||||
TrackTemplate(template_4, None, timedelta(seconds=0)),
|
||||
],
|
||||
refresh_listener,
|
||||
)
|
||||
|
||||
@@ -16,6 +16,7 @@ import homeassistant.components.scene as scene
|
||||
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_ON
|
||||
from homeassistant.core import Context, CoreState, callback
|
||||
from homeassistant.helpers import config_validation as cv, script
|
||||
from homeassistant.setup import async_setup_component
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from tests.async_mock import patch
|
||||
@@ -1332,6 +1333,10 @@ async def test_referenced_entities(hass):
|
||||
"service": "test.script",
|
||||
"data": {"entity_id": ["light.service_list"]},
|
||||
},
|
||||
{
|
||||
"service": "test.script",
|
||||
"data": {"entity_id": "{{ 'light.service_template' }}"},
|
||||
},
|
||||
{
|
||||
"condition": "state",
|
||||
"entity_id": "sensor.condition",
|
||||
@@ -1824,3 +1829,114 @@ async def test_set_redefines_variable(hass, caplog):
|
||||
|
||||
assert mock_calls[0].data["value"] == "1"
|
||||
assert mock_calls[1].data["value"] == "2"
|
||||
|
||||
|
||||
async def test_validate_action_config(hass):
|
||||
"""Validate action config."""
|
||||
configs = {
|
||||
cv.SCRIPT_ACTION_CALL_SERVICE: {"service": "light.turn_on"},
|
||||
cv.SCRIPT_ACTION_DELAY: {"delay": 5},
|
||||
cv.SCRIPT_ACTION_WAIT_TEMPLATE: {
|
||||
"wait_template": "{{ states.light.kitchen.state == 'on' }}"
|
||||
},
|
||||
cv.SCRIPT_ACTION_FIRE_EVENT: {"event": "my_event"},
|
||||
cv.SCRIPT_ACTION_CHECK_CONDITION: {
|
||||
"condition": "{{ states.light.kitchen.state == 'on' }}"
|
||||
},
|
||||
cv.SCRIPT_ACTION_DEVICE_AUTOMATION: {
|
||||
"domain": "light",
|
||||
"entity_id": "light.kitchen",
|
||||
"device_id": "abcd",
|
||||
"type": "turn_on",
|
||||
},
|
||||
cv.SCRIPT_ACTION_ACTIVATE_SCENE: {"scene": "scene.relax"},
|
||||
cv.SCRIPT_ACTION_REPEAT: {
|
||||
"repeat": {"count": 3, "sequence": [{"event": "repeat_event"}]}
|
||||
},
|
||||
cv.SCRIPT_ACTION_CHOOSE: {
|
||||
"choose": [
|
||||
{
|
||||
"condition": "{{ states.light.kitchen.state == 'on' }}",
|
||||
"sequence": [{"event": "choose_event"}],
|
||||
}
|
||||
],
|
||||
"default": [{"event": "choose_default_event"}],
|
||||
},
|
||||
cv.SCRIPT_ACTION_WAIT_FOR_TRIGGER: {
|
||||
"wait_for_trigger": [
|
||||
{"platform": "event", "event_type": "wait_for_trigger_event"}
|
||||
]
|
||||
},
|
||||
cv.SCRIPT_ACTION_VARIABLES: {"variables": {"hello": "world"}},
|
||||
}
|
||||
|
||||
for key in cv.ACTION_TYPE_SCHEMAS:
|
||||
assert key in configs, f"No validate config test found for {key}"
|
||||
|
||||
# Verify we raise if we don't know the action type
|
||||
with patch(
|
||||
"homeassistant.helpers.config_validation.determine_script_action",
|
||||
return_value="non-existing",
|
||||
), pytest.raises(ValueError):
|
||||
await script.async_validate_action_config(hass, {})
|
||||
|
||||
for action_type, config in configs.items():
|
||||
assert cv.determine_script_action(config) == action_type
|
||||
try:
|
||||
await script.async_validate_action_config(hass, config)
|
||||
except vol.Invalid as err:
|
||||
assert False, f"{action_type} config invalid: {err}"
|
||||
|
||||
|
||||
async def test_embedded_wait_for_trigger_in_automation(hass):
|
||||
"""Test an embedded wait for trigger."""
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"automation",
|
||||
{
|
||||
"automation": {
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"action": {
|
||||
"repeat": {
|
||||
"while": [
|
||||
{
|
||||
"condition": "template",
|
||||
"value_template": '{{ is_state("test.value1", "trigger-while") }}',
|
||||
}
|
||||
],
|
||||
"sequence": [
|
||||
{"event": "trigger_wait_event"},
|
||||
{
|
||||
"wait_for_trigger": [
|
||||
{
|
||||
"platform": "template",
|
||||
"value_template": '{{ is_state("test.value2", "trigger-wait") }}',
|
||||
}
|
||||
]
|
||||
},
|
||||
{"service": "test.script"},
|
||||
],
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
hass.states.async_set("test.value1", "trigger-while")
|
||||
hass.states.async_set("test.value2", "not-trigger-wait")
|
||||
mock_calls = async_mock_service(hass, "test", "script")
|
||||
|
||||
async def trigger_wait_event(_):
|
||||
# give script the time to attach the trigger.
|
||||
await asyncio.sleep(0)
|
||||
hass.states.async_set("test.value1", "not-trigger-while")
|
||||
hass.states.async_set("test.value2", "trigger-wait")
|
||||
|
||||
hass.bus.async_listen("trigger_wait_event", trigger_wait_event)
|
||||
|
||||
# Start automation
|
||||
hass.bus.async_fire("test_event")
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(mock_calls) == 1
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""Test Home Assistant template helper methods."""
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime
|
||||
import math
|
||||
import random
|
||||
|
||||
@@ -2617,28 +2617,3 @@ async def test_unavailable_states(hass):
|
||||
hass,
|
||||
)
|
||||
assert tpl.async_render() == "light.none, light.unavailable, light.unknown"
|
||||
|
||||
|
||||
async def test_rate_limit(hass):
|
||||
"""Test we can pickup a rate limit directive."""
|
||||
tmp = template.Template("{{ states | count }}", hass)
|
||||
|
||||
info = tmp.async_render_to_info()
|
||||
assert info.rate_limit is None
|
||||
|
||||
tmp = template.Template("{{ rate_limit(minutes=1) }}{{ states | count }}", hass)
|
||||
|
||||
info = tmp.async_render_to_info()
|
||||
assert info.rate_limit == timedelta(minutes=1)
|
||||
|
||||
tmp = template.Template("{{ rate_limit(minutes=1) }}random", hass)
|
||||
|
||||
info = tmp.async_render_to_info()
|
||||
assert info.result() == "random"
|
||||
assert info.rate_limit == timedelta(minutes=1)
|
||||
|
||||
tmp = template.Template("{{ rate_limit(seconds=0) }}random", hass)
|
||||
|
||||
info = tmp.async_render_to_info()
|
||||
assert info.result() == "random"
|
||||
assert info.rate_limit == timedelta(seconds=0)
|
||||
|
||||
Reference in New Issue
Block a user