Compare commits

..

29 Commits

Author SHA1 Message Date
Paulus Schoutsen
8b0e10d8a5 Merge pull request #73193 from home-assistant/rc 2022-06-07 17:32:30 -07:00
Paulus Schoutsen
2ee4cd02c7 Bumped version to 2022.6.4 2022-06-07 16:16:30 -07:00
J. Nick Koston
d63569da82 Remove sqlalchemy lambda_stmt usage from history, logbook, and statistics (#73191) 2022-06-07 16:16:25 -07:00
Eric Severance
bc7cf1f649 Bump pywemo to 0.9.1 (#73186) 2022-06-07 16:16:24 -07:00
Michael
d6b1a7ca68 Fix creating unique IDs for WiFi switches in Fritz!Tools (#73183) 2022-06-07 16:16:23 -07:00
Aaron Bach
af248fa386 Fix bugs with RainMachine zone run time sensors (#73179) 2022-06-07 16:16:22 -07:00
Jesse Hills
fa56e3633d Fix KeyError from ESPHome media players on startup (#73149) 2022-06-07 16:16:22 -07:00
Michael
e886d37124 Use default None for voltage property of FritzDevice in Fritz!Smarthome (#73141)
use default None for device.voltage
2022-06-07 16:16:21 -07:00
Steven Looman
f4ed7720de Bump async-upnp-client==0.31.1 (#73135)
Co-authored-by: J. Nick Koston <nick@koston.org>
2022-06-07 16:16:20 -07:00
jjlawren
70473df2fe Fix errors when unjoining multiple Sonos devices simultaneously (#73133) 2022-06-07 16:16:19 -07:00
Paulus Schoutsen
0d31d94532 Merge pull request #73136 from home-assistant/rc 2022-06-06 15:07:58 -07:00
J. Nick Koston
4678466560 Remove unused code from logbook (#72950) 2022-06-06 13:13:09 -07:00
J. Nick Koston
a886c6110d Fix state_changes_during_period history query when no entities are passed (#73139) 2022-06-06 12:51:16 -07:00
Paulus Schoutsen
33f282af46 Point iAlarm XR at PyPI fork (#73143) 2022-06-06 12:49:30 -07:00
Paulus Schoutsen
2f3232f087 Bumped version to 2022.6.3 2022-06-06 12:18:57 -07:00
Igor Loborec
54ff6ddd41 Remove available property from Kodi (#73103) 2022-06-06 12:18:29 -07:00
lymanepp
eef79e2912 Tomorrowio utc fix (#73102)
* Discard past data using local time instead of UTC

* Tweak changes to fix tests

* Cleanup
2022-06-06 12:18:28 -07:00
J. Nick Koston
93aad108a7 Mark counter domain as continuous to exclude it from logbook (#73101) 2022-06-06 12:18:27 -07:00
Glenn Waters
792ebbb600 Fix elk attributes not being json serializable (#73096)
* Fix jsonifying.

* Only serialize Enums
2022-06-06 12:18:26 -07:00
J. Nick Koston
c47774e273 Fix incompatiblity with live logbook and google_assistant (#73063) 2022-06-06 12:18:26 -07:00
Aaron Bach
22bdeab1e7 Bump regenmaschine to 2022.06.0 (#73056) 2022-06-06 12:18:25 -07:00
Aaron Bach
ca05cde6ba Fix unhandled exception when RainMachine coordinator data doesn't exist (#73055) 2022-06-06 12:18:24 -07:00
Aaron Bach
1e59ce19f5 Bump simplisafe-python to 2022.06.0 (#73054) 2022-06-06 12:18:23 -07:00
J. Nick Koston
7bdada7898 Bump aiolookup to 0.1.1 (#73048) 2022-06-06 12:18:23 -07:00
J. Nick Koston
06a2fe94d3 Send an empty logbook response when all requested entity_ids are filtered away (#73046) 2022-06-06 12:18:22 -07:00
J. Nick Koston
854b0dbb2d Reduce branching in generated lambda_stmts (#73042) 2022-06-06 12:18:21 -07:00
rappenze
bd8424d184 Fix fibaro cover detection (#72986) 2022-06-06 12:18:20 -07:00
hesselonline
b50e3d5ce7 Bump wallbox to 0.4.9 (#72978) 2022-06-06 12:18:19 -07:00
Marcel van der Veldt
39c6a57c35 Throttle multiple requests to the velux gateway (#72974) 2022-06-06 12:18:18 -07:00
65 changed files with 1271 additions and 628 deletions

View File

@@ -3,7 +3,7 @@
"name": "DLNA Digital Media Renderer",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/dlna_dmr",
"requirements": ["async-upnp-client==0.30.1"],
"requirements": ["async-upnp-client==0.31.1"],
"dependencies": ["ssdp"],
"after_dependencies": ["media_source"],
"ssdp": [

View File

@@ -3,7 +3,7 @@
"name": "DLNA Digital Media Server",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/dlna_dms",
"requirements": ["async-upnp-client==0.30.1"],
"requirements": ["async-upnp-client==0.31.1"],
"dependencies": ["ssdp"],
"after_dependencies": ["media_source"],
"ssdp": [

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
import asyncio
from enum import Enum
import logging
import re
from types import MappingProxyType
@@ -481,7 +482,10 @@ class ElkEntity(Entity):
@property
def extra_state_attributes(self) -> dict[str, Any]:
"""Return the default attributes of the element."""
return {**self._element.as_dict(), **self.initial_attrs()}
dict_as_str = {}
for key, val in self._element.as_dict().items():
dict_as_str[key] = val.value if isinstance(val, Enum) else val
return {**dict_as_str, **self.initial_attrs()}
@property
def available(self) -> bool:

View File

@@ -25,7 +25,12 @@ from homeassistant.const import STATE_IDLE, STATE_PAUSED, STATE_PLAYING
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import EsphomeEntity, EsphomeEnumMapper, platform_async_setup_entry
from . import (
EsphomeEntity,
EsphomeEnumMapper,
esphome_state_property,
platform_async_setup_entry,
)
async def async_setup_entry(
@@ -54,6 +59,10 @@ _STATES: EsphomeEnumMapper[MediaPlayerState, str] = EsphomeEnumMapper(
)
# https://github.com/PyCQA/pylint/issues/3150 for all @esphome_state_property
# pylint: disable=invalid-overridden-method
class EsphomeMediaPlayer(
EsphomeEntity[MediaPlayerInfo, MediaPlayerEntityState], MediaPlayerEntity
):
@@ -61,17 +70,17 @@ class EsphomeMediaPlayer(
_attr_device_class = MediaPlayerDeviceClass.SPEAKER
@property
@esphome_state_property
def state(self) -> str | None:
"""Return current state."""
return _STATES.from_esphome(self._state.state)
@property
@esphome_state_property
def is_volume_muted(self) -> bool:
"""Return true if volume is muted."""
return self._state.muted
@property
@esphome_state_property
def volume_level(self) -> float | None:
"""Volume level of the media player (0..1)."""
return self._state.volume

View File

@@ -46,6 +46,8 @@ class FibaroCover(FibaroDevice, CoverEntity):
self._attr_supported_features = (
CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
)
if "stop" in self.fibaro_device.actions:
self._attr_supported_features |= CoverEntityFeature.STOP
@staticmethod
def bound(position):

View File

@@ -169,7 +169,16 @@ def wifi_entities_list(
}
for i, network in networks.copy().items():
networks[i]["switch_name"] = network["ssid"]
if len([j for j, n in networks.items() if n["ssid"] == network["ssid"]]) > 1:
if (
len(
[
j
for j, n in networks.items()
if slugify(n["ssid"]) == slugify(network["ssid"])
]
)
> 1
):
networks[i]["switch_name"] += f" ({WIFI_STANDARD[i]})"
_LOGGER.debug("WiFi networks list: %s", networks)

View File

@@ -96,7 +96,9 @@ SENSOR_TYPES: Final[tuple[FritzSensorEntityDescription, ...]] = (
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
suitable=lambda device: device.has_powermeter, # type: ignore[no-any-return]
native_value=lambda device: device.voltage / 1000 if device.voltage else 0.0,
native_value=lambda device: device.voltage / 1000
if getattr(device, "voltage", None)
else 0.0,
),
FritzSensorEntityDescription(
key="electric_current",
@@ -106,7 +108,7 @@ SENSOR_TYPES: Final[tuple[FritzSensorEntityDescription, ...]] = (
state_class=SensorStateClass.MEASUREMENT,
suitable=lambda device: device.has_powermeter, # type: ignore[no-any-return]
native_value=lambda device: device.power / device.voltage
if device.power and device.voltage
if device.power and getattr(device, "voltage", None)
else 0.0,
),
FritzSensorEntityDescription(

View File

@@ -2,7 +2,7 @@
"domain": "ialarm_xr",
"name": "Antifurto365 iAlarmXR",
"documentation": "https://www.home-assistant.io/integrations/ialarm_xr",
"requirements": ["pyialarmxr==1.0.18"],
"requirements": ["pyialarmxr-homeassistant==1.0.18"],
"codeowners": ["@bigmoby"],
"config_flow": true,
"iot_class": "cloud_polling",

View File

@@ -636,11 +636,6 @@ class KodiEntity(MediaPlayerEntity):
return None
@property
def available(self):
"""Return True if entity is available."""
return not self._connect_error
async def async_turn_on(self):
"""Turn the media player on."""
_LOGGER.debug("Firing event to turn on device")

View File

@@ -2,9 +2,18 @@
from __future__ import annotations
from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED
from homeassistant.components.counter import DOMAIN as COUNTER_DOMAIN
from homeassistant.components.proximity import DOMAIN as PROXIMITY_DOMAIN
from homeassistant.components.script import EVENT_SCRIPT_STARTED
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import EVENT_CALL_SERVICE, EVENT_LOGBOOK_ENTRY
# Domains that are always continuous
ALWAYS_CONTINUOUS_DOMAINS = {COUNTER_DOMAIN, PROXIMITY_DOMAIN}
# Domains that are continuous if there is a UOM set on the entity
CONDITIONALLY_CONTINUOUS_DOMAINS = {SENSOR_DOMAIN}
ATTR_MESSAGE = "message"
DOMAIN = "logbook"
@@ -30,13 +39,11 @@ LOGBOOK_ENTRY_NAME = "name"
LOGBOOK_ENTRY_STATE = "state"
LOGBOOK_ENTRY_WHEN = "when"
ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED = {EVENT_LOGBOOK_ENTRY, EVENT_CALL_SERVICE}
ENTITY_EVENTS_WITHOUT_CONFIG_ENTRY = {
EVENT_LOGBOOK_ENTRY,
EVENT_AUTOMATION_TRIGGERED,
EVENT_SCRIPT_STARTED,
}
# Automation events that can affect an entity_id or device_id
AUTOMATION_EVENTS = {EVENT_AUTOMATION_TRIGGERED, EVENT_SCRIPT_STARTED}
# Events that are built-in to the logbook or core
BUILT_IN_EVENTS = {EVENT_LOGBOOK_ENTRY, EVENT_CALL_SERVICE}
LOGBOOK_FILTERS = "logbook_filters"
LOGBOOK_ENTITIES_FILTER = "entities_filter"

View File

@@ -7,6 +7,7 @@ from typing import Any
from homeassistant.components.sensor import ATTR_STATE_CLASS
from homeassistant.const import (
ATTR_DEVICE_ID,
ATTR_DOMAIN,
ATTR_ENTITY_ID,
ATTR_UNIT_OF_MEASUREMENT,
EVENT_LOGBOOK_ENTRY,
@@ -19,15 +20,13 @@ from homeassistant.core import (
State,
callback,
is_callback,
split_entity_id,
)
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers.entityfilter import EntityFilter
from homeassistant.helpers.event import async_track_state_change_event
from .const import (
ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED,
DOMAIN,
ENTITY_EVENTS_WITHOUT_CONFIG_ENTRY,
)
from .const import ALWAYS_CONTINUOUS_DOMAINS, AUTOMATION_EVENTS, BUILT_IN_EVENTS, DOMAIN
from .models import LazyEventPartialState
@@ -41,6 +40,25 @@ def async_filter_entities(hass: HomeAssistant, entity_ids: list[str]) -> list[st
]
@callback
def _async_config_entries_for_ids(
hass: HomeAssistant, entity_ids: list[str] | None, device_ids: list[str] | None
) -> set[str]:
"""Find the config entry ids for a set of entities or devices."""
config_entry_ids: set[str] = set()
if entity_ids:
eng_reg = er.async_get(hass)
for entity_id in entity_ids:
if (entry := eng_reg.async_get(entity_id)) and entry.config_entry_id:
config_entry_ids.add(entry.config_entry_id)
if device_ids:
dev_reg = dr.async_get(hass)
for device_id in device_ids:
if (device := dev_reg.async_get(device_id)) and device.config_entries:
config_entry_ids |= device.config_entries
return config_entry_ids
def async_determine_event_types(
hass: HomeAssistant, entity_ids: list[str] | None, device_ids: list[str] | None
) -> tuple[str, ...]:
@@ -49,42 +67,91 @@ def async_determine_event_types(
str, tuple[str, Callable[[LazyEventPartialState], dict[str, Any]]]
] = hass.data.get(DOMAIN, {})
if not entity_ids and not device_ids:
return (*ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED, *external_events)
config_entry_ids: set[str] = set()
intrested_event_types: set[str] = set()
return (*BUILT_IN_EVENTS, *external_events)
interested_domains: set[str] = set()
for entry_id in _async_config_entries_for_ids(hass, entity_ids, device_ids):
if entry := hass.config_entries.async_get_entry(entry_id):
interested_domains.add(entry.domain)
#
# automations and scripts can refer to entities or devices
# but they do not have a config entry so we need
# to add them since we have historically included
# them when matching only on entities
#
intrested_event_types: set[str] = {
external_event
for external_event, domain_call in external_events.items()
if domain_call[0] in interested_domains
} | AUTOMATION_EVENTS
if entity_ids:
#
# Home Assistant doesn't allow firing events from
# entities so we have a limited list to check
#
# automations and scripts can refer to entities
# but they do not have a config entry so we need
# to add them.
#
# We also allow entity_ids to be recorded via
# manual logbook entries.
#
intrested_event_types |= ENTITY_EVENTS_WITHOUT_CONFIG_ENTRY
# We also allow entity_ids to be recorded via manual logbook entries.
intrested_event_types.add(EVENT_LOGBOOK_ENTRY)
if device_ids:
dev_reg = dr.async_get(hass)
for device_id in device_ids:
if (device := dev_reg.async_get(device_id)) and device.config_entries:
config_entry_ids |= device.config_entries
interested_domains: set[str] = set()
for entry_id in config_entry_ids:
if entry := hass.config_entries.async_get_entry(entry_id):
interested_domains.add(entry.domain)
for external_event, domain_call in external_events.items():
if domain_call[0] in interested_domains:
intrested_event_types.add(external_event)
return tuple(intrested_event_types)
return tuple(
event_type
for event_type in (EVENT_LOGBOOK_ENTRY, *external_events)
if event_type in intrested_event_types
)
@callback
def extract_attr(source: dict[str, Any], attr: str) -> list[str]:
"""Extract an attribute as a list or string."""
if (value := source.get(attr)) is None:
return []
if isinstance(value, list):
return value
return str(value).split(",")
@callback
def event_forwarder_filtered(
target: Callable[[Event], None],
entities_filter: EntityFilter | None,
entity_ids: list[str] | None,
device_ids: list[str] | None,
) -> Callable[[Event], None]:
"""Make a callable to filter events."""
if not entities_filter and not entity_ids and not device_ids:
# No filter
# - Script Trace (context ids)
# - Automation Trace (context ids)
return target
if entities_filter:
# We have an entity filter:
# - Logbook panel
@callback
def _forward_events_filtered_by_entities_filter(event: Event) -> None:
assert entities_filter is not None
event_data = event.data
entity_ids = extract_attr(event_data, ATTR_ENTITY_ID)
if entity_ids and not any(
entities_filter(entity_id) for entity_id in entity_ids
):
return
domain = event_data.get(ATTR_DOMAIN)
if domain and not entities_filter(f"{domain}._"):
return
target(event)
return _forward_events_filtered_by_entities_filter
# We are filtering on entity_ids and/or device_ids:
# - Areas
# - Devices
# - Logbook Card
entity_ids_set = set(entity_ids) if entity_ids else set()
device_ids_set = set(device_ids) if device_ids else set()
@callback
def _forward_events_filtered_by_device_entity_ids(event: Event) -> None:
event_data = event.data
if entity_ids_set.intersection(
extract_attr(event_data, ATTR_ENTITY_ID)
) or device_ids_set.intersection(extract_attr(event_data, ATTR_DEVICE_ID)):
target(event)
return _forward_events_filtered_by_device_entity_ids
@callback
@@ -93,6 +160,7 @@ def async_subscribe_events(
subscriptions: list[CALLBACK_TYPE],
target: Callable[[Event], None],
event_types: tuple[str, ...],
entities_filter: EntityFilter | None,
entity_ids: list[str] | None,
device_ids: list[str] | None,
) -> None:
@@ -103,41 +171,31 @@ def async_subscribe_events(
"""
ent_reg = er.async_get(hass)
assert is_callback(target), "target must be a callback"
event_forwarder = target
if entity_ids or device_ids:
entity_ids_set = set(entity_ids) if entity_ids else set()
device_ids_set = set(device_ids) if device_ids else set()
@callback
def _forward_events_filtered(event: Event) -> None:
event_data = event.data
if (
entity_ids_set and event_data.get(ATTR_ENTITY_ID) in entity_ids_set
) or (device_ids_set and event_data.get(ATTR_DEVICE_ID) in device_ids_set):
target(event)
event_forwarder = _forward_events_filtered
event_forwarder = event_forwarder_filtered(
target, entities_filter, entity_ids, device_ids
)
for event_type in event_types:
subscriptions.append(
hass.bus.async_listen(event_type, event_forwarder, run_immediately=True)
)
@callback
def _forward_state_events_filtered(event: Event) -> None:
if event.data.get("old_state") is None or event.data.get("new_state") is None:
return
state: State = event.data["new_state"]
if not _is_state_filtered(ent_reg, state):
target(event)
if device_ids and not entity_ids:
# No entities to subscribe to but we are filtering
# on device ids so we do not want to get any state
# changed events
return
@callback
def _forward_state_events_filtered(event: Event) -> None:
if event.data.get("old_state") is None or event.data.get("new_state") is None:
return
state: State = event.data["new_state"]
if _is_state_filtered(ent_reg, state) or (
entities_filter and not entities_filter(state.entity_id)
):
return
target(event)
if entity_ids:
subscriptions.append(
async_track_state_change_event(
@@ -178,7 +236,8 @@ def _is_state_filtered(ent_reg: er.EntityRegistry, state: State) -> bool:
we only get significant changes (state.last_changed != state.last_updated)
"""
return bool(
state.last_changed != state.last_updated
split_entity_id(state.entity_id)[0] in ALWAYS_CONTINUOUS_DOMAINS
or state.last_changed != state.last_updated
or ATTR_UNIT_OF_MEASUREMENT in state.attributes
or is_sensor_continuous(ent_reg, state.entity_id)
)
@@ -193,7 +252,8 @@ def _is_entity_id_filtered(
from the database when a list of entities is requested.
"""
return bool(
(state := hass.states.get(entity_id))
split_entity_id(entity_id)[0] in ALWAYS_CONTINUOUS_DOMAINS
or (state := hass.states.get(entity_id))
and (ATTR_UNIT_OF_MEASUREMENT in state.attributes)
or is_sensor_continuous(ent_reg, entity_id)
)

View File

@@ -5,8 +5,6 @@ from collections.abc import Callable, Generator
from contextlib import suppress
from dataclasses import dataclass
from datetime import datetime as dt
import logging
import re
from typing import Any
from sqlalchemy.engine.row import Row
@@ -30,7 +28,6 @@ from homeassistant.const import (
)
from homeassistant.core import HomeAssistant, split_entity_id
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.entityfilter import EntityFilter
import homeassistant.util.dt as dt_util
from .const import (
@@ -46,7 +43,6 @@ from .const import (
CONTEXT_STATE,
CONTEXT_USER_ID,
DOMAIN,
LOGBOOK_ENTITIES_FILTER,
LOGBOOK_ENTRY_DOMAIN,
LOGBOOK_ENTRY_ENTITY_ID,
LOGBOOK_ENTRY_ICON,
@@ -62,11 +58,6 @@ from .models import EventAsRow, LazyEventPartialState, async_event_to_row
from .queries import statement_for_request
from .queries.common import PSUEDO_EVENT_STATE_CHANGED
_LOGGER = logging.getLogger(__name__)
ENTITY_ID_JSON_EXTRACT = re.compile('"entity_id": ?"([^"]+)"')
DOMAIN_JSON_EXTRACT = re.compile('"domain": ?"([^"]+)"')
@dataclass
class LogbookRun:
@@ -106,10 +97,6 @@ class EventProcessor:
self.device_ids = device_ids
self.context_id = context_id
self.filters: Filters | None = hass.data[LOGBOOK_FILTERS]
if self.limited_select:
self.entities_filter: EntityFilter | Callable[[str], bool] | None = None
else:
self.entities_filter = hass.data[LOGBOOK_ENTITIES_FILTER]
format_time = (
_row_time_fired_timestamp if timestamp else _row_time_fired_isoformat
)
@@ -183,7 +170,6 @@ class EventProcessor:
return list(
_humanify(
row_generator,
self.entities_filter,
self.ent_reg,
self.logbook_run,
self.context_augmenter,
@@ -193,7 +179,6 @@ class EventProcessor:
def _humanify(
rows: Generator[Row | EventAsRow, None, None],
entities_filter: EntityFilter | Callable[[str], bool] | None,
ent_reg: er.EntityRegistry,
logbook_run: LogbookRun,
context_augmenter: ContextAugmenter,
@@ -208,29 +193,13 @@ def _humanify(
include_entity_name = logbook_run.include_entity_name
format_time = logbook_run.format_time
def _keep_row(row: EventAsRow) -> bool:
"""Check if the entity_filter rejects a row."""
assert entities_filter is not None
if entity_id := row.entity_id:
return entities_filter(entity_id)
if entity_id := row.data.get(ATTR_ENTITY_ID):
return entities_filter(entity_id)
if domain := row.data.get(ATTR_DOMAIN):
return entities_filter(f"{domain}._")
return True
# Process rows
for row in rows:
context_id = context_lookup.memorize(row)
if row.context_only:
continue
event_type = row.event_type
if event_type == EVENT_CALL_SERVICE or (
entities_filter
# We literally mean is EventAsRow not a subclass of EventAsRow
and type(row) is EventAsRow # pylint: disable=unidiomatic-typecheck
and not _keep_row(row)
):
if event_type == EVENT_CALL_SERVICE:
continue
if event_type is PSUEDO_EVENT_STATE_CHANGED:
entity_id = row.entity_id
@@ -417,12 +386,6 @@ def _rows_match(row: Row | EventAsRow, other_row: Row | EventAsRow) -> bool:
return False
def _row_event_data_extract(row: Row | EventAsRow, extractor: re.Pattern) -> str | None:
"""Extract from event_data row."""
result = extractor.search(row.shared_data or row.event_data or "")
return result.group(1) if result else None
def _row_time_fired_isoformat(row: Row | EventAsRow) -> str:
"""Convert the row timed_fired to isoformat."""
return process_timestamp_to_utc_isoformat(row.time_fired or dt_util.utcnow())

View File

@@ -2,8 +2,9 @@
from __future__ import annotations
from datetime import datetime as dt
import json
from sqlalchemy.sql.lambdas import StatementLambdaElement
from sqlalchemy.sql.selectable import Select
from homeassistant.components.recorder.filters import Filters
@@ -21,7 +22,7 @@ def statement_for_request(
device_ids: list[str] | None = None,
filters: Filters | None = None,
context_id: str | None = None,
) -> StatementLambdaElement:
) -> Select:
"""Generate the logbook statement for a logbook request."""
# No entities: logbook sends everything for the timeframe
@@ -38,41 +39,36 @@ def statement_for_request(
context_id,
)
# sqlalchemy caches object quoting, the
# json quotable ones must be a different
# object from the non-json ones to prevent
# sqlalchemy from quoting them incorrectly
# entities and devices: logbook sends everything for the timeframe for the entities and devices
if entity_ids and device_ids:
json_quotable_entity_ids = list(entity_ids)
json_quotable_device_ids = list(device_ids)
json_quoted_entity_ids = [json.dumps(entity_id) for entity_id in entity_ids]
json_quoted_device_ids = [json.dumps(device_id) for device_id in device_ids]
return entities_devices_stmt(
start_day,
end_day,
event_types,
entity_ids,
json_quotable_entity_ids,
json_quotable_device_ids,
json_quoted_entity_ids,
json_quoted_device_ids,
)
# entities: logbook sends everything for the timeframe for the entities
if entity_ids:
json_quotable_entity_ids = list(entity_ids)
json_quoted_entity_ids = [json.dumps(entity_id) for entity_id in entity_ids]
return entities_stmt(
start_day,
end_day,
event_types,
entity_ids,
json_quotable_entity_ids,
json_quoted_entity_ids,
)
# devices: logbook sends everything for the timeframe for the devices
assert device_ids is not None
json_quotable_device_ids = list(device_ids)
json_quoted_device_ids = [json.dumps(device_id) for device_id in device_ids]
return devices_stmt(
start_day,
end_day,
event_types,
json_quotable_device_ids,
json_quoted_device_ids,
)

View File

@@ -3,10 +3,9 @@ from __future__ import annotations
from datetime import datetime as dt
from sqlalchemy import lambda_stmt
from sqlalchemy.orm import Query
from sqlalchemy.sql.elements import ClauseList
from sqlalchemy.sql.lambdas import StatementLambdaElement
from sqlalchemy.sql.selectable import Select
from homeassistant.components.recorder.models import LAST_UPDATED_INDEX, Events, States
@@ -25,32 +24,29 @@ def all_stmt(
states_entity_filter: ClauseList | None = None,
events_entity_filter: ClauseList | None = None,
context_id: str | None = None,
) -> StatementLambdaElement:
) -> Select:
"""Generate a logbook query for all entities."""
stmt = lambda_stmt(
lambda: select_events_without_states(start_day, end_day, event_types)
)
stmt = select_events_without_states(start_day, end_day, event_types)
if context_id is not None:
# Once all the old `state_changed` events
# are gone from the database remove the
# _legacy_select_events_context_id()
stmt += lambda s: s.where(Events.context_id == context_id).union_all(
stmt = stmt.where(Events.context_id == context_id).union_all(
_states_query_for_context_id(start_day, end_day, context_id),
legacy_select_events_context_id(start_day, end_day, context_id),
)
else:
if events_entity_filter is not None:
stmt += lambda s: s.where(events_entity_filter)
stmt = stmt.where(events_entity_filter)
if states_entity_filter is not None:
stmt += lambda s: s.union_all(
stmt = stmt.union_all(
_states_query_for_all(start_day, end_day).where(states_entity_filter)
)
else:
stmt += lambda s: s.union_all(_states_query_for_all(start_day, end_day))
stmt = stmt.union_all(_states_query_for_all(start_day, end_day))
stmt += lambda s: s.order_by(Events.time_fired)
return stmt
return stmt.order_by(Events.time_fired)
def _states_query_for_all(start_day: dt, end_day: dt) -> Query:

View File

@@ -10,7 +10,7 @@ from sqlalchemy.sql.elements import ClauseList
from sqlalchemy.sql.expression import literal
from sqlalchemy.sql.selectable import Select
from homeassistant.components.proximity import DOMAIN as PROXIMITY_DOMAIN
from homeassistant.components.recorder.filters import like_domain_matchers
from homeassistant.components.recorder.models import (
EVENTS_CONTEXT_ID_INDEX,
OLD_FORMAT_ATTRS_JSON,
@@ -22,15 +22,19 @@ from homeassistant.components.recorder.models import (
StateAttributes,
States,
)
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
CONTINUOUS_DOMAINS = {PROXIMITY_DOMAIN, SENSOR_DOMAIN}
CONTINUOUS_ENTITY_ID_LIKE = [f"{domain}.%" for domain in CONTINUOUS_DOMAINS]
from ..const import ALWAYS_CONTINUOUS_DOMAINS, CONDITIONALLY_CONTINUOUS_DOMAINS
# Domains that are continuous if there is a UOM set on the entity
CONDITIONALLY_CONTINUOUS_ENTITY_ID_LIKE = like_domain_matchers(
CONDITIONALLY_CONTINUOUS_DOMAINS
)
# Domains that are always continuous
ALWAYS_CONTINUOUS_ENTITY_ID_LIKE = like_domain_matchers(ALWAYS_CONTINUOUS_DOMAINS)
UNIT_OF_MEASUREMENT_JSON = '"unit_of_measurement":'
UNIT_OF_MEASUREMENT_JSON_LIKE = f"%{UNIT_OF_MEASUREMENT_JSON}%"
PSUEDO_EVENT_STATE_CHANGED = None
# Since we don't store event_types and None
# and we don't store state_changed in events
@@ -220,29 +224,44 @@ def _missing_state_matcher() -> sqlalchemy.and_:
def _not_continuous_entity_matcher() -> sqlalchemy.or_:
"""Match non continuous entities."""
return sqlalchemy.or_(
_not_continuous_domain_matcher(),
# First exclude domains that may be continuous
_not_possible_continuous_domain_matcher(),
# But let in the entities in the possible continuous domains
# that are not actually continuous sensors because they lack a UOM
sqlalchemy.and_(
_continuous_domain_matcher, _not_uom_attributes_matcher()
_conditionally_continuous_domain_matcher, _not_uom_attributes_matcher()
).self_group(),
)
def _not_continuous_domain_matcher() -> sqlalchemy.and_:
"""Match not continuous domains."""
def _not_possible_continuous_domain_matcher() -> sqlalchemy.and_:
"""Match not continuous domains.
This matches domain that are always considered continuous
and domains that are conditionally (if they have a UOM)
continuous domains.
"""
return sqlalchemy.and_(
*[
~States.entity_id.like(entity_domain)
for entity_domain in CONTINUOUS_ENTITY_ID_LIKE
for entity_domain in (
*ALWAYS_CONTINUOUS_ENTITY_ID_LIKE,
*CONDITIONALLY_CONTINUOUS_ENTITY_ID_LIKE,
)
],
).self_group()
def _continuous_domain_matcher() -> sqlalchemy.or_:
"""Match continuous domains."""
def _conditionally_continuous_domain_matcher() -> sqlalchemy.or_:
"""Match conditionally continuous domains.
This matches domain that are only considered
continuous if a UOM is set.
"""
return sqlalchemy.or_(
*[
States.entity_id.like(entity_domain)
for entity_domain in CONTINUOUS_ENTITY_ID_LIKE
for entity_domain in CONDITIONALLY_CONTINUOUS_ENTITY_ID_LIKE
],
).self_group()

View File

@@ -4,11 +4,10 @@ from __future__ import annotations
from collections.abc import Iterable
from datetime import datetime as dt
from sqlalchemy import lambda_stmt, select
from sqlalchemy import select
from sqlalchemy.orm import Query
from sqlalchemy.sql.elements import ClauseList
from sqlalchemy.sql.lambdas import StatementLambdaElement
from sqlalchemy.sql.selectable import CTE, CompoundSelect
from sqlalchemy.sql.selectable import CTE, CompoundSelect, Select
from homeassistant.components.recorder.models import (
DEVICE_ID_IN_EVENT,
@@ -31,11 +30,11 @@ def _select_device_id_context_ids_sub_query(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
json_quotable_device_ids: list[str],
json_quoted_device_ids: list[str],
) -> CompoundSelect:
"""Generate a subquery to find context ids for multiple devices."""
inner = select_events_context_id_subquery(start_day, end_day, event_types).where(
apply_event_device_id_matchers(json_quotable_device_ids)
apply_event_device_id_matchers(json_quoted_device_ids)
)
return select(inner.c.context_id).group_by(inner.c.context_id)
@@ -45,14 +44,14 @@ def _apply_devices_context_union(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
json_quotable_device_ids: list[str],
json_quoted_device_ids: list[str],
) -> CompoundSelect:
"""Generate a CTE to find the device context ids and a query to find linked row."""
devices_cte: CTE = _select_device_id_context_ids_sub_query(
start_day,
end_day,
event_types,
json_quotable_device_ids,
json_quoted_device_ids,
).cte()
return query.union_all(
apply_events_context_hints(
@@ -72,25 +71,22 @@ def devices_stmt(
start_day: dt,
end_day: dt,
event_types: tuple[str, ...],
json_quotable_device_ids: list[str],
) -> StatementLambdaElement:
json_quoted_device_ids: list[str],
) -> Select:
"""Generate a logbook query for multiple devices."""
stmt = lambda_stmt(
lambda: _apply_devices_context_union(
select_events_without_states(start_day, end_day, event_types).where(
apply_event_device_id_matchers(json_quotable_device_ids)
),
start_day,
end_day,
event_types,
json_quotable_device_ids,
).order_by(Events.time_fired)
)
return stmt
return _apply_devices_context_union(
select_events_without_states(start_day, end_day, event_types).where(
apply_event_device_id_matchers(json_quoted_device_ids)
),
start_day,
end_day,
event_types,
json_quoted_device_ids,
).order_by(Events.time_fired)
def apply_event_device_id_matchers(
json_quotable_device_ids: Iterable[str],
json_quoted_device_ids: Iterable[str],
) -> ClauseList:
"""Create matchers for the device_ids in the event_data."""
return DEVICE_ID_IN_EVENT.in_(json_quotable_device_ids)
return DEVICE_ID_IN_EVENT.in_(json_quoted_device_ids)

View File

@@ -5,10 +5,9 @@ from collections.abc import Iterable
from datetime import datetime as dt
import sqlalchemy
from sqlalchemy import lambda_stmt, select, union_all
from sqlalchemy import select, union_all
from sqlalchemy.orm import Query
from sqlalchemy.sql.lambdas import StatementLambdaElement
from sqlalchemy.sql.selectable import CTE, CompoundSelect
from sqlalchemy.sql.selectable import CTE, CompoundSelect, Select
from homeassistant.components.recorder.models import (
ENTITY_ID_IN_EVENT,
@@ -36,12 +35,12 @@ def _select_entities_context_ids_sub_query(
end_day: dt,
event_types: tuple[str, ...],
entity_ids: list[str],
json_quotable_entity_ids: list[str],
json_quoted_entity_ids: list[str],
) -> CompoundSelect:
"""Generate a subquery to find context ids for multiple entities."""
union = union_all(
select_events_context_id_subquery(start_day, end_day, event_types).where(
apply_event_entity_id_matchers(json_quotable_entity_ids)
apply_event_entity_id_matchers(json_quoted_entity_ids)
),
apply_entities_hints(select(States.context_id))
.filter((States.last_updated > start_day) & (States.last_updated < end_day))
@@ -56,7 +55,7 @@ def _apply_entities_context_union(
end_day: dt,
event_types: tuple[str, ...],
entity_ids: list[str],
json_quotable_entity_ids: list[str],
json_quoted_entity_ids: list[str],
) -> CompoundSelect:
"""Generate a CTE to find the entity and device context ids and a query to find linked row."""
entities_cte: CTE = _select_entities_context_ids_sub_query(
@@ -64,7 +63,7 @@ def _apply_entities_context_union(
end_day,
event_types,
entity_ids,
json_quotable_entity_ids,
json_quoted_entity_ids,
).cte()
# We used to optimize this to exclude rows we already in the union with
# a States.entity_id.not_in(entity_ids) but that made the
@@ -91,21 +90,19 @@ def entities_stmt(
end_day: dt,
event_types: tuple[str, ...],
entity_ids: list[str],
json_quotable_entity_ids: list[str],
) -> StatementLambdaElement:
json_quoted_entity_ids: list[str],
) -> Select:
"""Generate a logbook query for multiple entities."""
return lambda_stmt(
lambda: _apply_entities_context_union(
select_events_without_states(start_day, end_day, event_types).where(
apply_event_entity_id_matchers(json_quotable_entity_ids)
),
start_day,
end_day,
event_types,
entity_ids,
json_quotable_entity_ids,
).order_by(Events.time_fired)
)
return _apply_entities_context_union(
select_events_without_states(start_day, end_day, event_types).where(
apply_event_entity_id_matchers(json_quoted_entity_ids)
),
start_day,
end_day,
event_types,
entity_ids,
json_quoted_entity_ids,
).order_by(Events.time_fired)
def states_query_for_entity_ids(
@@ -118,12 +115,12 @@ def states_query_for_entity_ids(
def apply_event_entity_id_matchers(
json_quotable_entity_ids: Iterable[str],
json_quoted_entity_ids: Iterable[str],
) -> sqlalchemy.or_:
"""Create matchers for the entity_id in the event_data."""
return ENTITY_ID_IN_EVENT.in_(
json_quotable_entity_ids
) | OLD_ENTITY_ID_IN_EVENT.in_(json_quotable_entity_ids)
return ENTITY_ID_IN_EVENT.in_(json_quoted_entity_ids) | OLD_ENTITY_ID_IN_EVENT.in_(
json_quoted_entity_ids
)
def apply_entities_hints(query: Query) -> Query:

View File

@@ -5,10 +5,9 @@ from collections.abc import Iterable
from datetime import datetime as dt
import sqlalchemy
from sqlalchemy import lambda_stmt, select, union_all
from sqlalchemy import select, union_all
from sqlalchemy.orm import Query
from sqlalchemy.sql.lambdas import StatementLambdaElement
from sqlalchemy.sql.selectable import CTE, CompoundSelect
from sqlalchemy.sql.selectable import CTE, CompoundSelect, Select
from homeassistant.components.recorder.models import EventData, Events, States
@@ -33,14 +32,14 @@ def _select_entities_device_id_context_ids_sub_query(
end_day: dt,
event_types: tuple[str, ...],
entity_ids: list[str],
json_quotable_entity_ids: list[str],
json_quotable_device_ids: list[str],
json_quoted_entity_ids: list[str],
json_quoted_device_ids: list[str],
) -> CompoundSelect:
"""Generate a subquery to find context ids for multiple entities and multiple devices."""
union = union_all(
select_events_context_id_subquery(start_day, end_day, event_types).where(
_apply_event_entity_id_device_id_matchers(
json_quotable_entity_ids, json_quotable_device_ids
json_quoted_entity_ids, json_quoted_device_ids
)
),
apply_entities_hints(select(States.context_id))
@@ -56,16 +55,16 @@ def _apply_entities_devices_context_union(
end_day: dt,
event_types: tuple[str, ...],
entity_ids: list[str],
json_quotable_entity_ids: list[str],
json_quotable_device_ids: list[str],
json_quoted_entity_ids: list[str],
json_quoted_device_ids: list[str],
) -> CompoundSelect:
devices_entities_cte: CTE = _select_entities_device_id_context_ids_sub_query(
start_day,
end_day,
event_types,
entity_ids,
json_quotable_entity_ids,
json_quotable_device_ids,
json_quoted_entity_ids,
json_quoted_device_ids,
).cte()
# We used to optimize this to exclude rows we already in the union with
# a States.entity_id.not_in(entity_ids) but that made the
@@ -92,32 +91,30 @@ def entities_devices_stmt(
end_day: dt,
event_types: tuple[str, ...],
entity_ids: list[str],
json_quotable_entity_ids: list[str],
json_quotable_device_ids: list[str],
) -> StatementLambdaElement:
json_quoted_entity_ids: list[str],
json_quoted_device_ids: list[str],
) -> Select:
"""Generate a logbook query for multiple entities."""
stmt = lambda_stmt(
lambda: _apply_entities_devices_context_union(
select_events_without_states(start_day, end_day, event_types).where(
_apply_event_entity_id_device_id_matchers(
json_quotable_entity_ids, json_quotable_device_ids
)
),
start_day,
end_day,
event_types,
entity_ids,
json_quotable_entity_ids,
json_quotable_device_ids,
).order_by(Events.time_fired)
)
stmt = _apply_entities_devices_context_union(
select_events_without_states(start_day, end_day, event_types).where(
_apply_event_entity_id_device_id_matchers(
json_quoted_entity_ids, json_quoted_device_ids
)
),
start_day,
end_day,
event_types,
entity_ids,
json_quoted_entity_ids,
json_quoted_device_ids,
).order_by(Events.time_fired)
return stmt
def _apply_event_entity_id_device_id_matchers(
json_quotable_entity_ids: Iterable[str], json_quotable_device_ids: Iterable[str]
json_quoted_entity_ids: Iterable[str], json_quoted_device_ids: Iterable[str]
) -> sqlalchemy.or_:
"""Create matchers for the device_id and entity_id in the event_data."""
return apply_event_entity_id_matchers(
json_quotable_entity_ids
) | apply_event_device_id_matchers(json_quotable_device_ids)
json_quoted_entity_ids
) | apply_event_device_id_matchers(json_quoted_device_ids)

View File

@@ -16,9 +16,11 @@ from homeassistant.components.websocket_api import messages
from homeassistant.components.websocket_api.connection import ActiveConnection
from homeassistant.components.websocket_api.const import JSON_DUMP
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, callback
from homeassistant.helpers.entityfilter import EntityFilter
from homeassistant.helpers.event import async_track_point_in_utc_time
import homeassistant.util.dt as dt_util
from .const import LOGBOOK_ENTITIES_FILTER
from .helpers import (
async_determine_event_types,
async_filter_entities,
@@ -67,6 +69,23 @@ async def _async_wait_for_recorder_sync(hass: HomeAssistant) -> None:
)
@callback
def _async_send_empty_response(
connection: ActiveConnection, msg_id: int, start_time: dt, end_time: dt | None
) -> None:
"""Send an empty response.
The current case for this is when they ask for entity_ids
that will all be filtered away because they have UOMs or
state_class.
"""
connection.send_result(msg_id)
stream_end_time = end_time or dt_util.utcnow()
empty_stream_message = _generate_stream_message([], start_time, stream_end_time)
empty_response = messages.event_message(msg_id, empty_stream_message)
connection.send_message(JSON_DUMP(empty_response))
async def _async_send_historical_events(
hass: HomeAssistant,
connection: ActiveConnection,
@@ -171,6 +190,17 @@ async def _async_get_ws_stream_events(
)
def _generate_stream_message(
events: list[dict[str, Any]], start_day: dt, end_day: dt
) -> dict[str, Any]:
"""Generate a logbook stream message response."""
return {
"events": events,
"start_time": dt_util.utc_to_timestamp(start_day),
"end_time": dt_util.utc_to_timestamp(end_day),
}
def _ws_stream_get_events(
msg_id: int,
start_day: dt,
@@ -184,11 +214,7 @@ def _ws_stream_get_events(
last_time = None
if events:
last_time = dt_util.utc_from_timestamp(events[-1]["when"])
message = {
"events": events,
"start_time": dt_util.utc_to_timestamp(start_day),
"end_time": dt_util.utc_to_timestamp(end_day),
}
message = _generate_stream_message(events, start_day, end_day)
if partial:
# This is a hint to consumers of the api that
# we are about to send a another block of historical
@@ -275,6 +301,10 @@ async def ws_event_stream(
entity_ids = msg.get("entity_ids")
if entity_ids:
entity_ids = async_filter_entities(hass, entity_ids)
if not entity_ids:
_async_send_empty_response(connection, msg_id, start_time, end_time)
return
event_types = async_determine_event_types(hass, entity_ids, device_ids)
event_processor = EventProcessor(
hass,
@@ -337,8 +367,18 @@ async def ws_event_stream(
)
_unsub()
entities_filter: EntityFilter | None = None
if not event_processor.limited_select:
entities_filter = hass.data[LOGBOOK_ENTITIES_FILTER]
async_subscribe_events(
hass, subscriptions, _queue_or_cancel, event_types, entity_ids, device_ids
hass,
subscriptions,
_queue_or_cancel,
event_types,
entities_filter,
entity_ids,
device_ids,
)
subscriptions_setup_complete_time = dt_util.utcnow()
connection.subscriptions[msg_id] = _unsub

View File

@@ -3,7 +3,7 @@
"name": "LOOKin",
"documentation": "https://www.home-assistant.io/integrations/lookin/",
"codeowners": ["@ANMalko", "@bdraco"],
"requirements": ["aiolookin==0.1.0"],
"requirements": ["aiolookin==0.1.1"],
"zeroconf": ["_lookin._tcp.local."],
"config_flow": true,
"iot_class": "local_push",

View File

@@ -139,8 +139,11 @@ async def async_setup_entry(
entry, coordinator, controller, description
)
for description in BINARY_SENSOR_DESCRIPTIONS
if (coordinator := coordinators[description.api_category]) is not None
and key_exists(coordinator.data, description.data_key)
if (
(coordinator := coordinators[description.api_category]) is not None
and coordinator.data
and key_exists(coordinator.data, description.data_key)
)
]
)

View File

@@ -3,7 +3,7 @@
"name": "RainMachine",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/rainmachine",
"requirements": ["regenmaschine==2022.05.1"],
"requirements": ["regenmaschine==2022.06.0"],
"codeowners": ["@bachya"],
"iot_class": "local_polling",
"homekit": {

View File

@@ -4,6 +4,8 @@ from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime, timedelta
from regenmaschine.controller import Controller
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
@@ -13,8 +15,9 @@ from homeassistant.components.sensor import (
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import TEMP_CELSIUS, VOLUME_CUBIC_METERS
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity import EntityCategory
from homeassistant.helpers.entity import EntityCategory, EntityDescription
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from homeassistant.util.dt import utcnow
from . import RainMachineEntity
@@ -133,8 +136,11 @@ async def async_setup_entry(
entry, coordinator, controller, description
)
for description in SENSOR_DESCRIPTIONS
if (coordinator := coordinators[description.api_category]) is not None
and key_exists(coordinator.data, description.data_key)
if (
(coordinator := coordinators[description.api_category]) is not None
and coordinator.data
and key_exists(coordinator.data, description.data_key)
)
]
zone_coordinator = coordinators[DATA_ZONES]
@@ -202,16 +208,33 @@ class ZoneTimeRemainingSensor(RainMachineEntity, SensorEntity):
entity_description: RainMachineSensorDescriptionUid
def __init__(
self,
entry: ConfigEntry,
coordinator: DataUpdateCoordinator,
controller: Controller,
description: EntityDescription,
) -> None:
"""Initialize."""
super().__init__(entry, coordinator, controller, description)
self._running_or_queued: bool = False
@callback
def update_from_latest_data(self) -> None:
"""Update the state."""
data = self.coordinator.data[self.entity_description.uid]
now = utcnow()
if RUN_STATE_MAP.get(data["state"]) != RunStates.RUNNING:
# If the zone isn't actively running, return immediately:
if RUN_STATE_MAP.get(data["state"]) == RunStates.NOT_RUNNING:
if self._running_or_queued:
# If we go from running to not running, update the state to be right
# now (i.e., the time the zone stopped running):
self._attr_native_value = now
self._running_or_queued = False
return
self._running_or_queued = True
new_timestamp = now + timedelta(seconds=data["remaining"])
if self._attr_native_value:

View File

@@ -248,8 +248,13 @@ def _domain_matcher(
domains: Iterable[str], columns: Iterable[Column], encoder: Callable[[Any], Any]
) -> ClauseList:
matchers = [
(column.is_not(None) & cast(column, Text()).like(encoder(f"{domain}.%")))
for domain in domains
(column.is_not(None) & cast(column, Text()).like(encoder(domain_matcher)))
for domain_matcher in like_domain_matchers(domains)
for column in columns
]
return or_(*matchers) if matchers else or_(False)
def like_domain_matchers(domains: Iterable[str]) -> list[str]:
"""Convert a list of domains to sql LIKE matchers."""
return [f"{domain}.%" for domain in domains]

View File

@@ -9,12 +9,11 @@ import logging
import time
from typing import Any, cast
from sqlalchemy import Column, Text, and_, func, lambda_stmt, or_, select
from sqlalchemy import Column, Text, and_, func, or_, select
from sqlalchemy.engine.row import Row
from sqlalchemy.orm.query import Query
from sqlalchemy.orm.session import Session
from sqlalchemy.sql.expression import literal
from sqlalchemy.sql.lambdas import StatementLambdaElement
from sqlalchemy.sql.selectable import Select, Subquery
from homeassistant.components import recorder
from homeassistant.components.websocket_api.const import (
@@ -35,7 +34,7 @@ from .models import (
process_timestamp_to_utc_isoformat,
row_to_compressed_state,
)
from .util import execute_stmt_lambda_element, session_scope
from .util import execute_stmt, session_scope
# mypy: allow-untyped-defs, no-check-untyped-defs
@@ -115,22 +114,18 @@ def _schema_version(hass: HomeAssistant) -> int:
return recorder.get_instance(hass).schema_version
def lambda_stmt_and_join_attributes(
def stmt_and_join_attributes(
schema_version: int, no_attributes: bool, include_last_changed: bool = True
) -> tuple[StatementLambdaElement, bool]:
"""Return the lambda_stmt and if StateAttributes should be joined.
Because these are lambda_stmt the values inside the lambdas need
to be explicitly written out to avoid caching the wrong values.
"""
) -> tuple[Select, bool]:
"""Return the stmt and if StateAttributes should be joined."""
# If no_attributes was requested we do the query
# without the attributes fields and do not join the
# state_attributes table
if no_attributes:
if include_last_changed:
return lambda_stmt(lambda: select(*QUERY_STATE_NO_ATTR)), False
return select(*QUERY_STATE_NO_ATTR), False
return (
lambda_stmt(lambda: select(*QUERY_STATE_NO_ATTR_NO_LAST_CHANGED)),
select(*QUERY_STATE_NO_ATTR_NO_LAST_CHANGED),
False,
)
# If we in the process of migrating schema we do
@@ -139,19 +134,19 @@ def lambda_stmt_and_join_attributes(
if schema_version < 25:
if include_last_changed:
return (
lambda_stmt(lambda: select(*QUERY_STATES_PRE_SCHEMA_25)),
select(*QUERY_STATES_PRE_SCHEMA_25),
False,
)
return (
lambda_stmt(lambda: select(*QUERY_STATES_PRE_SCHEMA_25_NO_LAST_CHANGED)),
select(*QUERY_STATES_PRE_SCHEMA_25_NO_LAST_CHANGED),
False,
)
# Finally if no migration is in progress and no_attributes
# was not requested, we query both attributes columns and
# join state_attributes
if include_last_changed:
return lambda_stmt(lambda: select(*QUERY_STATES)), True
return lambda_stmt(lambda: select(*QUERY_STATES_NO_LAST_CHANGED)), True
return select(*QUERY_STATES), True
return select(*QUERY_STATES_NO_LAST_CHANGED), True
def get_significant_states(
@@ -183,7 +178,7 @@ def get_significant_states(
)
def _ignore_domains_filter(query: Query) -> Query:
def _ignore_domains_filter(query: Select) -> Select:
"""Add a filter to ignore domains we do not fetch history for."""
return query.filter(
and_(
@@ -203,9 +198,9 @@ def _significant_states_stmt(
filters: Filters | None,
significant_changes_only: bool,
no_attributes: bool,
) -> StatementLambdaElement:
) -> Select:
"""Query the database for significant state changes."""
stmt, join_attributes = lambda_stmt_and_join_attributes(
stmt, join_attributes = stmt_and_join_attributes(
schema_version, no_attributes, include_last_changed=not significant_changes_only
)
if (
@@ -214,11 +209,11 @@ def _significant_states_stmt(
and significant_changes_only
and split_entity_id(entity_ids[0])[0] not in SIGNIFICANT_DOMAINS
):
stmt += lambda q: q.filter(
stmt = stmt.filter(
(States.last_changed == States.last_updated) | States.last_changed.is_(None)
)
elif significant_changes_only:
stmt += lambda q: q.filter(
stmt = stmt.filter(
or_(
*[
States.entity_id.like(entity_domain)
@@ -232,25 +227,22 @@ def _significant_states_stmt(
)
if entity_ids:
stmt += lambda q: q.filter(States.entity_id.in_(entity_ids))
stmt = stmt.filter(States.entity_id.in_(entity_ids))
else:
stmt += _ignore_domains_filter
stmt = _ignore_domains_filter(stmt)
if filters and filters.has_config:
entity_filter = filters.states_entity_filter()
stmt = stmt.add_criteria(
lambda q: q.filter(entity_filter), track_on=[filters]
)
stmt = stmt.filter(entity_filter)
stmt += lambda q: q.filter(States.last_updated > start_time)
stmt = stmt.filter(States.last_updated > start_time)
if end_time:
stmt += lambda q: q.filter(States.last_updated < end_time)
stmt = stmt.filter(States.last_updated < end_time)
if join_attributes:
stmt += lambda q: q.outerjoin(
stmt = stmt.outerjoin(
StateAttributes, States.attributes_id == StateAttributes.attributes_id
)
stmt += lambda q: q.order_by(States.entity_id, States.last_updated)
return stmt
return stmt.order_by(States.entity_id, States.last_updated)
def get_significant_states_with_session(
@@ -287,9 +279,7 @@ def get_significant_states_with_session(
significant_changes_only,
no_attributes,
)
states = execute_stmt_lambda_element(
session, stmt, None if entity_ids else start_time, end_time
)
states = execute_stmt(session, stmt, None if entity_ids else start_time, end_time)
return _sorted_states_to_dict(
hass,
session,
@@ -341,27 +331,28 @@ def _state_changed_during_period_stmt(
no_attributes: bool,
descending: bool,
limit: int | None,
) -> StatementLambdaElement:
stmt, join_attributes = lambda_stmt_and_join_attributes(
) -> Select:
stmt, join_attributes = stmt_and_join_attributes(
schema_version, no_attributes, include_last_changed=False
)
stmt += lambda q: q.filter(
stmt = stmt.filter(
((States.last_changed == States.last_updated) | States.last_changed.is_(None))
& (States.last_updated > start_time)
)
if end_time:
stmt += lambda q: q.filter(States.last_updated < end_time)
stmt += lambda q: q.filter(States.entity_id == entity_id)
stmt = stmt.filter(States.last_updated < end_time)
if entity_id:
stmt = stmt.filter(States.entity_id == entity_id)
if join_attributes:
stmt += lambda q: q.outerjoin(
stmt = stmt.outerjoin(
StateAttributes, States.attributes_id == StateAttributes.attributes_id
)
if descending:
stmt += lambda q: q.order_by(States.entity_id, States.last_updated.desc())
stmt = stmt.order_by(States.entity_id, States.last_updated.desc())
else:
stmt += lambda q: q.order_by(States.entity_id, States.last_updated)
stmt = stmt.order_by(States.entity_id, States.last_updated)
if limit:
stmt += lambda q: q.limit(limit)
stmt = stmt.limit(limit)
return stmt
@@ -377,6 +368,7 @@ def state_changes_during_period(
) -> MutableMapping[str, list[State]]:
"""Return states changes during UTC period start_time - end_time."""
entity_id = entity_id.lower() if entity_id is not None else None
entity_ids = [entity_id] if entity_id is not None else None
with session_scope(hass=hass) as session:
stmt = _state_changed_during_period_stmt(
@@ -388,11 +380,9 @@ def state_changes_during_period(
descending,
limit,
)
states = execute_stmt_lambda_element(
states = execute_stmt(
session, stmt, None if entity_id else start_time, end_time
)
entity_ids = [entity_id] if entity_id is not None else None
return cast(
MutableMapping[str, list[State]],
_sorted_states_to_dict(
@@ -407,38 +397,38 @@ def state_changes_during_period(
def _get_last_state_changes_stmt(
schema_version: int, number_of_states: int, entity_id: str
) -> StatementLambdaElement:
stmt, join_attributes = lambda_stmt_and_join_attributes(
schema_version: int, number_of_states: int, entity_id: str | None
) -> Select:
stmt, join_attributes = stmt_and_join_attributes(
schema_version, False, include_last_changed=False
)
stmt += lambda q: q.filter(
stmt = stmt.filter(
(States.last_changed == States.last_updated) | States.last_changed.is_(None)
).filter(States.entity_id == entity_id)
)
if entity_id:
stmt = stmt.filter(States.entity_id == entity_id)
if join_attributes:
stmt += lambda q: q.outerjoin(
stmt = stmt.outerjoin(
StateAttributes, States.attributes_id == StateAttributes.attributes_id
)
stmt += lambda q: q.order_by(States.entity_id, States.last_updated.desc()).limit(
return stmt.order_by(States.entity_id, States.last_updated.desc()).limit(
number_of_states
)
return stmt
def get_last_state_changes(
hass: HomeAssistant, number_of_states: int, entity_id: str
hass: HomeAssistant, number_of_states: int, entity_id: str | None
) -> MutableMapping[str, list[State]]:
"""Return the last number_of_states."""
start_time = dt_util.utcnow()
entity_id = entity_id.lower() if entity_id is not None else None
entity_ids = [entity_id] if entity_id is not None else None
with session_scope(hass=hass) as session:
stmt = _get_last_state_changes_stmt(
_schema_version(hass), number_of_states, entity_id
)
states = list(execute_stmt_lambda_element(session, stmt))
entity_ids = [entity_id] if entity_id is not None else None
states = list(execute_stmt(session, stmt))
return cast(
MutableMapping[str, list[State]],
_sorted_states_to_dict(
@@ -458,14 +448,14 @@ def _get_states_for_entites_stmt(
utc_point_in_time: datetime,
entity_ids: list[str],
no_attributes: bool,
) -> StatementLambdaElement:
) -> Select:
"""Baked query to get states for specific entities."""
stmt, join_attributes = lambda_stmt_and_join_attributes(
stmt, join_attributes = stmt_and_join_attributes(
schema_version, no_attributes, include_last_changed=True
)
# We got an include-list of entities, accelerate the query by filtering already
# in the inner query.
stmt += lambda q: q.where(
stmt = stmt.where(
States.state_id
== (
select(func.max(States.state_id).label("max_state_id"))
@@ -479,28 +469,18 @@ def _get_states_for_entites_stmt(
).c.max_state_id
)
if join_attributes:
stmt += lambda q: q.outerjoin(
stmt = stmt.outerjoin(
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
)
return stmt
def _get_states_for_all_stmt(
schema_version: int,
def _generate_most_recent_states_by_date(
run_start: datetime,
utc_point_in_time: datetime,
filters: Filters | None,
no_attributes: bool,
) -> StatementLambdaElement:
"""Baked query to get states for all entities."""
stmt, join_attributes = lambda_stmt_and_join_attributes(
schema_version, no_attributes, include_last_changed=True
)
# We did not get an include-list of entities, query all states in the inner
# query, then filter out unwanted domains as well as applying the custom filter.
# This filtering can't be done in the inner query because the domain column is
# not indexed and we can't control what's in the custom filter.
most_recent_states_by_date = (
) -> Subquery:
"""Generate the sub query for the most recent states by data."""
return (
select(
States.entity_id.label("max_entity_id"),
func.max(States.last_updated).label("max_last_updated"),
@@ -512,7 +492,27 @@ def _get_states_for_all_stmt(
.group_by(States.entity_id)
.subquery()
)
stmt += lambda q: q.where(
def _get_states_for_all_stmt(
schema_version: int,
run_start: datetime,
utc_point_in_time: datetime,
filters: Filters | None,
no_attributes: bool,
) -> Select:
"""Baked query to get states for all entities."""
stmt, join_attributes = stmt_and_join_attributes(
schema_version, no_attributes, include_last_changed=True
)
# We did not get an include-list of entities, query all states in the inner
# query, then filter out unwanted domains as well as applying the custom filter.
# This filtering can't be done in the inner query because the domain column is
# not indexed and we can't control what's in the custom filter.
most_recent_states_by_date = _generate_most_recent_states_by_date(
run_start, utc_point_in_time
)
stmt = stmt.where(
States.state_id
== (
select(func.max(States.state_id).label("max_state_id"))
@@ -528,12 +528,12 @@ def _get_states_for_all_stmt(
.subquery()
).c.max_state_id,
)
stmt += _ignore_domains_filter
stmt = _ignore_domains_filter(stmt)
if filters and filters.has_config:
entity_filter = filters.states_entity_filter()
stmt = stmt.add_criteria(lambda q: q.filter(entity_filter), track_on=[filters])
stmt = stmt.filter(entity_filter)
if join_attributes:
stmt += lambda q: q.outerjoin(
stmt = stmt.outerjoin(
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
)
return stmt
@@ -551,7 +551,7 @@ def _get_rows_with_session(
"""Return the states at a specific point in time."""
schema_version = _schema_version(hass)
if entity_ids and len(entity_ids) == 1:
return execute_stmt_lambda_element(
return execute_stmt(
session,
_get_single_entity_states_stmt(
schema_version, utc_point_in_time, entity_ids[0], no_attributes
@@ -576,7 +576,7 @@ def _get_rows_with_session(
schema_version, run.start, utc_point_in_time, filters, no_attributes
)
return execute_stmt_lambda_element(session, stmt)
return execute_stmt(session, stmt)
def _get_single_entity_states_stmt(
@@ -584,14 +584,14 @@ def _get_single_entity_states_stmt(
utc_point_in_time: datetime,
entity_id: str,
no_attributes: bool = False,
) -> StatementLambdaElement:
) -> Select:
# Use an entirely different (and extremely fast) query if we only
# have a single entity id
stmt, join_attributes = lambda_stmt_and_join_attributes(
stmt, join_attributes = stmt_and_join_attributes(
schema_version, no_attributes, include_last_changed=True
)
stmt += (
lambda q: q.filter(
stmt = (
stmt.filter(
States.last_updated < utc_point_in_time,
States.entity_id == entity_id,
)
@@ -599,7 +599,7 @@ def _get_single_entity_states_stmt(
.limit(1)
)
if join_attributes:
stmt += lambda q: q.outerjoin(
stmt = stmt.outerjoin(
StateAttributes, States.attributes_id == StateAttributes.attributes_id
)
return stmt

View File

@@ -14,12 +14,12 @@ import re
from statistics import mean
from typing import TYPE_CHECKING, Any, Literal, overload
from sqlalchemy import bindparam, func, lambda_stmt, select
from sqlalchemy import bindparam, func, select
from sqlalchemy.engine.row import Row
from sqlalchemy.exc import SQLAlchemyError, StatementError
from sqlalchemy.orm.session import Session
from sqlalchemy.sql.expression import literal_column, true
from sqlalchemy.sql.lambdas import StatementLambdaElement
from sqlalchemy.sql.selectable import Select, Subquery
import voluptuous as vol
from homeassistant.const import (
@@ -52,12 +52,7 @@ from .models import (
process_timestamp,
process_timestamp_to_utc_isoformat,
)
from .util import (
execute,
execute_stmt_lambda_element,
retryable_database_job,
session_scope,
)
from .util import execute, execute_stmt, retryable_database_job, session_scope
if TYPE_CHECKING:
from . import Recorder
@@ -482,16 +477,15 @@ def delete_statistics_meta_duplicates(session: Session) -> None:
def _compile_hourly_statistics_summary_mean_stmt(
start_time: datetime, end_time: datetime
) -> StatementLambdaElement:
) -> Select:
"""Generate the summary mean statement for hourly statistics."""
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SUMMARY_MEAN))
stmt += (
lambda q: q.filter(StatisticsShortTerm.start >= start_time)
return (
select(*QUERY_STATISTICS_SUMMARY_MEAN)
.filter(StatisticsShortTerm.start >= start_time)
.filter(StatisticsShortTerm.start < end_time)
.group_by(StatisticsShortTerm.metadata_id)
.order_by(StatisticsShortTerm.metadata_id)
)
return stmt
def compile_hourly_statistics(
@@ -509,7 +503,7 @@ def compile_hourly_statistics(
# Compute last hour's average, min, max
summary: dict[str, StatisticData] = {}
stmt = _compile_hourly_statistics_summary_mean_stmt(start_time, end_time)
stats = execute_stmt_lambda_element(session, stmt)
stats = execute_stmt(session, stmt)
if stats:
for stat in stats:
@@ -691,17 +685,17 @@ def _generate_get_metadata_stmt(
statistic_ids: list[str] | tuple[str] | None = None,
statistic_type: Literal["mean"] | Literal["sum"] | None = None,
statistic_source: str | None = None,
) -> StatementLambdaElement:
) -> Select:
"""Generate a statement to fetch metadata."""
stmt = lambda_stmt(lambda: select(*QUERY_STATISTIC_META))
stmt = select(*QUERY_STATISTIC_META)
if statistic_ids is not None:
stmt += lambda q: q.where(StatisticsMeta.statistic_id.in_(statistic_ids))
stmt = stmt.where(StatisticsMeta.statistic_id.in_(statistic_ids))
if statistic_source is not None:
stmt += lambda q: q.where(StatisticsMeta.source == statistic_source)
stmt = stmt.where(StatisticsMeta.source == statistic_source)
if statistic_type == "mean":
stmt += lambda q: q.where(StatisticsMeta.has_mean == true())
stmt = stmt.where(StatisticsMeta.has_mean == true())
elif statistic_type == "sum":
stmt += lambda q: q.where(StatisticsMeta.has_sum == true())
stmt = stmt.where(StatisticsMeta.has_sum == true())
return stmt
@@ -723,7 +717,7 @@ def get_metadata_with_session(
# Fetch metatadata from the database
stmt = _generate_get_metadata_stmt(statistic_ids, statistic_type, statistic_source)
result = execute_stmt_lambda_element(session, stmt)
result = execute_stmt(session, stmt)
if not result:
return {}
@@ -985,27 +979,30 @@ def _statistics_during_period_stmt(
start_time: datetime,
end_time: datetime | None,
metadata_ids: list[int] | None,
table: type[Statistics | StatisticsShortTerm],
) -> StatementLambdaElement:
"""Prepare a database query for statistics during a given period.
This prepares a lambda_stmt query, so we don't insert the parameters yet.
"""
if table == StatisticsShortTerm:
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SHORT_TERM))
else:
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS))
stmt += lambda q: q.filter(table.start >= start_time)
) -> Select:
"""Prepare a database query for statistics during a given period."""
stmt = select(*QUERY_STATISTICS).filter(Statistics.start >= start_time)
if end_time is not None:
stmt += lambda q: q.filter(table.start < end_time)
stmt = stmt.filter(Statistics.start < end_time)
if metadata_ids:
stmt += lambda q: q.filter(table.metadata_id.in_(metadata_ids))
stmt = stmt.filter(Statistics.metadata_id.in_(metadata_ids))
return stmt.order_by(Statistics.metadata_id, Statistics.start)
stmt += lambda q: q.order_by(table.metadata_id, table.start)
return stmt
def _statistics_during_period_stmt_short_term(
start_time: datetime,
end_time: datetime | None,
metadata_ids: list[int] | None,
) -> Select:
"""Prepare a database query for short term statistics during a given period."""
stmt = select(*QUERY_STATISTICS_SHORT_TERM).filter(
StatisticsShortTerm.start >= start_time
)
if end_time is not None:
stmt = stmt.filter(StatisticsShortTerm.start < end_time)
if metadata_ids:
stmt = stmt.filter(StatisticsShortTerm.metadata_id.in_(metadata_ids))
return stmt.order_by(StatisticsShortTerm.metadata_id, StatisticsShortTerm.start)
def statistics_during_period(
@@ -1034,11 +1031,13 @@ def statistics_during_period(
if period == "5minute":
table = StatisticsShortTerm
stmt = _statistics_during_period_stmt_short_term(
start_time, end_time, metadata_ids
)
else:
table = Statistics
stmt = _statistics_during_period_stmt(start_time, end_time, metadata_ids, table)
stats = execute_stmt_lambda_element(session, stmt)
stmt = _statistics_during_period_stmt(start_time, end_time, metadata_ids)
stats = execute_stmt(session, stmt)
if not stats:
return {}
@@ -1069,19 +1068,27 @@ def statistics_during_period(
def _get_last_statistics_stmt(
metadata_id: int,
number_of_stats: int,
table: type[Statistics | StatisticsShortTerm],
) -> StatementLambdaElement:
) -> Select:
"""Generate a statement for number_of_stats statistics for a given statistic_id."""
if table == StatisticsShortTerm:
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SHORT_TERM))
else:
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS))
stmt += (
lambda q: q.filter_by(metadata_id=metadata_id)
.order_by(table.metadata_id, table.start.desc())
return (
select(*QUERY_STATISTICS)
.filter_by(metadata_id=metadata_id)
.order_by(Statistics.metadata_id, Statistics.start.desc())
.limit(number_of_stats)
)
def _get_last_statistics_short_term_stmt(
metadata_id: int,
number_of_stats: int,
) -> Select:
"""Generate a statement for number_of_stats short term statistics for a given statistic_id."""
return (
select(*QUERY_STATISTICS_SHORT_TERM)
.filter_by(metadata_id=metadata_id)
.order_by(StatisticsShortTerm.metadata_id, StatisticsShortTerm.start.desc())
.limit(number_of_stats)
)
return stmt
def _get_last_statistics(
@@ -1099,8 +1106,11 @@ def _get_last_statistics(
if not metadata:
return {}
metadata_id = metadata[statistic_id][0]
stmt = _get_last_statistics_stmt(metadata_id, number_of_stats, table)
stats = execute_stmt_lambda_element(session, stmt)
if table == Statistics:
stmt = _get_last_statistics_stmt(metadata_id, number_of_stats)
else:
stmt = _get_last_statistics_short_term_stmt(metadata_id, number_of_stats)
stats = execute_stmt(session, stmt)
if not stats:
return {}
@@ -1136,12 +1146,9 @@ def get_last_short_term_statistics(
)
def _latest_short_term_statistics_stmt(
metadata_ids: list[int],
) -> StatementLambdaElement:
"""Create the statement for finding the latest short term stat rows."""
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SHORT_TERM))
most_recent_statistic_row = (
def _generate_most_recent_statistic_row(metadata_ids: list[int]) -> Subquery:
"""Generate the subquery to find the most recent statistic row."""
return (
select(
StatisticsShortTerm.metadata_id,
func.max(StatisticsShortTerm.start).label("start_max"),
@@ -1149,7 +1156,15 @@ def _latest_short_term_statistics_stmt(
.where(StatisticsShortTerm.metadata_id.in_(metadata_ids))
.group_by(StatisticsShortTerm.metadata_id)
).subquery()
stmt += lambda s: s.join(
def _latest_short_term_statistics_stmt(
metadata_ids: list[int],
) -> Select:
"""Create the statement for finding the latest short term stat rows."""
stmt = select(*QUERY_STATISTICS_SHORT_TERM)
most_recent_statistic_row = _generate_most_recent_statistic_row(metadata_ids)
return stmt.join(
most_recent_statistic_row,
(
StatisticsShortTerm.metadata_id # pylint: disable=comparison-with-callable
@@ -1157,7 +1172,6 @@ def _latest_short_term_statistics_stmt(
)
& (StatisticsShortTerm.start == most_recent_statistic_row.c.start_max),
)
return stmt
def get_latest_short_term_statistics(
@@ -1180,7 +1194,7 @@ def get_latest_short_term_statistics(
if statistic_id in metadata
]
stmt = _latest_short_term_statistics_stmt(metadata_ids)
stats = execute_stmt_lambda_element(session, stmt)
stats = execute_stmt(session, stmt)
if not stats:
return {}

View File

@@ -22,7 +22,6 @@ from sqlalchemy.engine.row import Row
from sqlalchemy.exc import OperationalError, SQLAlchemyError
from sqlalchemy.orm.query import Query
from sqlalchemy.orm.session import Session
from sqlalchemy.sql.lambdas import StatementLambdaElement
from typing_extensions import Concatenate, ParamSpec
from homeassistant.core import HomeAssistant
@@ -167,9 +166,9 @@ def execute(
assert False # unreachable # pragma: no cover
def execute_stmt_lambda_element(
def execute_stmt(
session: Session,
stmt: StatementLambdaElement,
query: Query,
start_time: datetime | None = None,
end_time: datetime | None = None,
yield_per: int | None = DEFAULT_YIELD_STATES_ROWS,
@@ -185,11 +184,12 @@ def execute_stmt_lambda_element(
specific entities) since they are usually faster
with .all().
"""
executed = session.execute(stmt)
use_all = not start_time or ((end_time or dt_util.utcnow()) - start_time).days <= 1
for tryno in range(0, RETRIES):
try:
return executed.all() if use_all else executed.yield_per(yield_per) # type: ignore[no-any-return]
if use_all:
return session.execute(query).all() # type: ignore[no-any-return]
return session.execute(query).yield_per(yield_per) # type: ignore[no-any-return]
except SQLAlchemyError as err:
_LOGGER.error("Error executing query: %s", err)
if tryno == RETRIES - 1:

View File

@@ -7,7 +7,7 @@
"samsungctl[websocket]==0.7.1",
"samsungtvws[async,encrypted]==2.5.0",
"wakeonlan==2.0.1",
"async-upnp-client==0.30.1"
"async-upnp-client==0.31.1"
],
"ssdp": [
{

View File

@@ -3,7 +3,7 @@
"name": "SimpliSafe",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/simplisafe",
"requirements": ["simplisafe-python==2022.05.2"],
"requirements": ["simplisafe-python==2022.06.0"],
"codeowners": ["@bachya"],
"iot_class": "cloud_polling",
"dhcp": [

View File

@@ -751,17 +751,23 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
media_content_type,
)
def join_players(self, group_members):
async def async_join_players(self, group_members):
"""Join `group_members` as a player group with the current player."""
speakers = []
for entity_id in group_members:
if speaker := self.hass.data[DATA_SONOS].entity_id_mappings.get(entity_id):
speakers.append(speaker)
else:
raise HomeAssistantError(f"Not a known Sonos entity_id: {entity_id}")
async with self.hass.data[DATA_SONOS].topology_condition:
speakers = []
for entity_id in group_members:
if speaker := self.hass.data[DATA_SONOS].entity_id_mappings.get(
entity_id
):
speakers.append(speaker)
else:
raise HomeAssistantError(
f"Not a known Sonos entity_id: {entity_id}"
)
self.speaker.join(speakers)
await self.hass.async_add_executor_job(self.speaker.join, speakers)
def unjoin_player(self):
async def async_unjoin_player(self):
"""Remove this player from any group."""
self.speaker.unjoin()
async with self.hass.data[DATA_SONOS].topology_condition:
await self.hass.async_add_executor_job(self.speaker.unjoin)

View File

@@ -2,7 +2,7 @@
"domain": "ssdp",
"name": "Simple Service Discovery Protocol (SSDP)",
"documentation": "https://www.home-assistant.io/integrations/ssdp",
"requirements": ["async-upnp-client==0.30.1"],
"requirements": ["async-upnp-client==0.31.1"],
"dependencies": ["network"],
"after_dependencies": ["zeroconf"],
"codeowners": [],

View File

@@ -198,13 +198,16 @@ class TomorrowioWeatherEntity(TomorrowioEntity, WeatherEntity):
max_forecasts = MAX_FORECASTS[self.forecast_type]
forecast_count = 0
# Convert utcnow to local to be compatible with tests
today = dt_util.as_local(dt_util.utcnow()).date()
# Set default values (in cases where keys don't exist), None will be
# returned. Override properties per forecast type as needed
for forecast in raw_forecasts:
forecast_dt = dt_util.parse_datetime(forecast[TMRW_ATTR_TIMESTAMP])
# Throw out past data
if forecast_dt.date() < dt_util.utcnow().date():
if dt_util.as_local(forecast_dt).date() < today:
continue
values = forecast["values"]

View File

@@ -3,7 +3,7 @@
"name": "UPnP/IGD",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/upnp",
"requirements": ["async-upnp-client==0.30.1", "getmac==0.8.2"],
"requirements": ["async-upnp-client==0.31.1", "getmac==0.8.2"],
"dependencies": ["network", "ssdp"],
"codeowners": ["@StevenLooman", "@ehendrix23"],
"ssdp": [

View File

@@ -17,6 +17,8 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from . import DATA_VELUX, VeluxEntity
PARALLEL_UPDATES = 1
async def async_setup_platform(
hass: HomeAssistant,
@@ -97,12 +99,11 @@ class VeluxCover(VeluxEntity, CoverEntity):
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
if ATTR_POSITION in kwargs:
position_percent = 100 - kwargs[ATTR_POSITION]
position_percent = 100 - kwargs[ATTR_POSITION]
await self.node.set_position(
Position(position_percent=position_percent), wait_for_completion=False
)
await self.node.set_position(
Position(position_percent=position_percent), wait_for_completion=False
)
async def async_stop_cover(self, **kwargs):
"""Stop the cover."""

View File

@@ -10,6 +10,8 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from . import DATA_VELUX, VeluxEntity
PARALLEL_UPDATES = 1
async def async_setup_platform(
hass: HomeAssistant,

View File

@@ -10,6 +10,8 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from . import _LOGGER, DATA_VELUX
PARALLEL_UPDATES = 1
async def async_setup_platform(
hass: HomeAssistant,

View File

@@ -3,7 +3,7 @@
"name": "Wallbox",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/wallbox",
"requirements": ["wallbox==0.4.4"],
"requirements": ["wallbox==0.4.9"],
"ssdp": [],
"zeroconf": [],
"homekit": {},

View File

@@ -3,7 +3,7 @@
"name": "Belkin WeMo",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/wemo",
"requirements": ["pywemo==0.8.1"],
"requirements": ["pywemo==0.9.1"],
"ssdp": [
{
"manufacturer": "Belkin International Inc."

View File

@@ -2,7 +2,7 @@
"domain": "yeelight",
"name": "Yeelight",
"documentation": "https://www.home-assistant.io/integrations/yeelight",
"requirements": ["yeelight==0.7.10", "async-upnp-client==0.30.1"],
"requirements": ["yeelight==0.7.10", "async-upnp-client==0.31.1"],
"codeowners": ["@zewelor", "@shenxn", "@starkillerOG", "@alexyao2015"],
"config_flow": true,
"dependencies": ["network"],

View File

@@ -7,7 +7,7 @@ from .backports.enum import StrEnum
MAJOR_VERSION: Final = 2022
MINOR_VERSION: Final = 6
PATCH_VERSION: Final = "2"
PATCH_VERSION: Final = "4"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)

View File

@@ -4,7 +4,7 @@ aiodiscover==1.4.11
aiohttp==3.8.1
aiohttp_cors==0.7.0
astral==2.2
async-upnp-client==0.30.1
async-upnp-client==0.31.1
async_timeout==4.0.2
atomicwrites==1.4.0
attrs==21.2.0

View File

@@ -187,7 +187,7 @@ aiolifx==0.7.1
aiolifx_effects==0.2.2
# homeassistant.components.lookin
aiolookin==0.1.0
aiolookin==0.1.1
# homeassistant.components.lyric
aiolyric==1.0.8
@@ -336,7 +336,7 @@ asterisk_mbox==0.5.0
# homeassistant.components.ssdp
# homeassistant.components.upnp
# homeassistant.components.yeelight
async-upnp-client==0.30.1
async-upnp-client==0.31.1
# homeassistant.components.supla
asyncpysupla==0.0.5
@@ -1550,7 +1550,7 @@ pyhomeworks==0.0.6
pyialarm==1.9.0
# homeassistant.components.ialarm_xr
pyialarmxr==1.0.18
pyialarmxr-homeassistant==1.0.18
# homeassistant.components.icloud
pyicloud==1.0.0
@@ -2023,7 +2023,7 @@ pyvolumio==0.1.5
pywebpush==1.9.2
# homeassistant.components.wemo
pywemo==0.8.1
pywemo==0.9.1
# homeassistant.components.wilight
pywilight==0.0.70
@@ -2065,7 +2065,7 @@ raincloudy==0.0.7
raspyrfm-client==1.2.8
# homeassistant.components.rainmachine
regenmaschine==2022.05.1
regenmaschine==2022.06.0
# homeassistant.components.renault
renault-api==0.1.11
@@ -2168,7 +2168,7 @@ simplehound==0.3
simplepush==1.1.4
# homeassistant.components.simplisafe
simplisafe-python==2022.05.2
simplisafe-python==2022.06.0
# homeassistant.components.sisyphus
sisyphus-control==3.1.2
@@ -2418,7 +2418,7 @@ vultr==0.1.2
wakeonlan==2.0.1
# homeassistant.components.wallbox
wallbox==0.4.4
wallbox==0.4.9
# homeassistant.components.waqi
waqiasync==1.0.0

View File

@@ -159,7 +159,7 @@ aiohue==4.4.1
aiokafka==0.6.0
# homeassistant.components.lookin
aiolookin==0.1.0
aiolookin==0.1.1
# homeassistant.components.lyric
aiolyric==1.0.8
@@ -278,7 +278,7 @@ arcam-fmj==0.12.0
# homeassistant.components.ssdp
# homeassistant.components.upnp
# homeassistant.components.yeelight
async-upnp-client==0.30.1
async-upnp-client==0.31.1
# homeassistant.components.sleepiq
asyncsleepiq==1.2.3
@@ -1038,7 +1038,7 @@ pyhomematic==0.1.77
pyialarm==1.9.0
# homeassistant.components.ialarm_xr
pyialarmxr==1.0.18
pyialarmxr-homeassistant==1.0.18
# homeassistant.components.icloud
pyicloud==1.0.0
@@ -1343,7 +1343,7 @@ pyvolumio==0.1.5
pywebpush==1.9.2
# homeassistant.components.wemo
pywemo==0.8.1
pywemo==0.9.1
# homeassistant.components.wilight
pywilight==0.0.70
@@ -1364,7 +1364,7 @@ rachiopy==1.0.3
radios==0.1.1
# homeassistant.components.rainmachine
regenmaschine==2022.05.1
regenmaschine==2022.06.0
# homeassistant.components.renault
renault-api==0.1.11
@@ -1425,7 +1425,7 @@ sharkiq==0.0.1
simplehound==0.3
# homeassistant.components.simplisafe
simplisafe-python==2022.05.2
simplisafe-python==2022.06.0
# homeassistant.components.slack
slackclient==2.5.0
@@ -1591,7 +1591,7 @@ vultr==0.1.2
wakeonlan==2.0.1
# homeassistant.components.wallbox
wallbox==0.4.4
wallbox==0.4.9
# homeassistant.components.folder_watcher
watchdog==2.1.8

View File

@@ -1,5 +1,5 @@
[metadata]
version = 2022.6.2
version = 2022.6.4
url = https://www.home-assistant.io/
[options]

View File

@@ -1,4 +1,4 @@
"""Common stuff for AVM Fritz!Box tests."""
"""Common stuff for Fritz!Tools tests."""
import logging
from unittest.mock import MagicMock, patch
@@ -73,13 +73,19 @@ class FritzHostMock(FritzHosts):
return MOCK_MESH_DATA
@pytest.fixture(name="fc_data")
def fc_data_mock():
"""Fixture for default fc_data."""
return MOCK_FB_SERVICES
@pytest.fixture()
def fc_class_mock():
def fc_class_mock(fc_data):
"""Fixture that sets up a mocked FritzConnection class."""
with patch(
"homeassistant.components.fritz.common.FritzConnection", autospec=True
) as result:
result.return_value = FritzConnectionMock(MOCK_FB_SERVICES)
result.return_value = FritzConnectionMock(fc_data)
yield result

View File

@@ -1,4 +1,4 @@
"""Common stuff for AVM Fritz!Box tests."""
"""Common stuff for Fritz!Tools tests."""
from homeassistant.components import ssdp
from homeassistant.components.fritz.const import DOMAIN
from homeassistant.components.ssdp import ATTR_UPNP_FRIENDLY_NAME, ATTR_UPNP_UDN
@@ -194,6 +194,7 @@ MOCK_FB_SERVICES: dict[str, dict] = {
},
}
MOCK_MESH_DATA = {
"schema_version": "1.9",
"nodes": [

View File

@@ -1,4 +1,4 @@
"""Tests for Shelly button platform."""
"""Tests for Fritz!Tools button platform."""
from unittest.mock import patch
import pytest

View File

@@ -1,4 +1,4 @@
"""Tests for AVM Fritz!Box config flow."""
"""Tests for Fritz!Tools config flow."""
import dataclasses
from unittest.mock import patch

View File

@@ -1,4 +1,4 @@
"""Tests for the AVM Fritz!Box integration."""
"""Tests for Fritz!Tools diagnostics platform."""
from __future__ import annotations
from aiohttp import ClientSession

View File

@@ -1,4 +1,4 @@
"""Tests for AVM Fritz!Box."""
"""Tests for Fritz!Tools."""
from unittest.mock import patch
from fritzconnection.core.exceptions import FritzSecurityError

View File

@@ -1,4 +1,4 @@
"""Tests for Shelly button platform."""
"""Tests for Fritz!Tools sensor platform."""
from __future__ import annotations
from datetime import timedelta

View File

@@ -0,0 +1,189 @@
"""Tests for Fritz!Tools switch platform."""
from __future__ import annotations
import pytest
from homeassistant.components.fritz.const import DOMAIN
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from .const import MOCK_FB_SERVICES, MOCK_USER_DATA
from tests.common import MockConfigEntry
MOCK_WLANCONFIGS_SAME_SSID: dict[str, dict] = {
"WLANConfiguration1": {
"GetInfo": {
"NewEnable": True,
"NewStatus": "Up",
"NewMaxBitRate": "Auto",
"NewChannel": 13,
"NewSSID": "WiFi",
"NewBeaconType": "11iandWPA3",
"NewX_AVM-DE_PossibleBeaconTypes": "None,11i,11iandWPA3",
"NewMACAddressControlEnabled": False,
"NewStandard": "ax",
"NewBSSID": "1C:ED:6F:12:34:12",
"NewBasicEncryptionModes": "None",
"NewBasicAuthenticationMode": "None",
"NewMaxCharsSSID": 32,
"NewMinCharsSSID": 1,
"NewAllowedCharsSSID": "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~",
"NewMinCharsPSK": 64,
"NewMaxCharsPSK": 64,
"NewAllowedCharsPSK": "0123456789ABCDEFabcdef",
}
},
"WLANConfiguration2": {
"GetInfo": {
"NewEnable": True,
"NewStatus": "Up",
"NewMaxBitRate": "Auto",
"NewChannel": 52,
"NewSSID": "WiFi",
"NewBeaconType": "11iandWPA3",
"NewX_AVM-DE_PossibleBeaconTypes": "None,11i,11iandWPA3",
"NewMACAddressControlEnabled": False,
"NewStandard": "ax",
"NewBSSID": "1C:ED:6F:12:34:13",
"NewBasicEncryptionModes": "None",
"NewBasicAuthenticationMode": "None",
"NewMaxCharsSSID": 32,
"NewMinCharsSSID": 1,
"NewAllowedCharsSSID": "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~",
"NewMinCharsPSK": 64,
"NewMaxCharsPSK": 64,
"NewAllowedCharsPSK": "0123456789ABCDEFabcdef",
}
},
}
MOCK_WLANCONFIGS_DIFF_SSID: dict[str, dict] = {
"WLANConfiguration1": {
"GetInfo": {
"NewEnable": True,
"NewStatus": "Up",
"NewMaxBitRate": "Auto",
"NewChannel": 13,
"NewSSID": "WiFi",
"NewBeaconType": "11iandWPA3",
"NewX_AVM-DE_PossibleBeaconTypes": "None,11i,11iandWPA3",
"NewMACAddressControlEnabled": False,
"NewStandard": "ax",
"NewBSSID": "1C:ED:6F:12:34:12",
"NewBasicEncryptionModes": "None",
"NewBasicAuthenticationMode": "None",
"NewMaxCharsSSID": 32,
"NewMinCharsSSID": 1,
"NewAllowedCharsSSID": "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~",
"NewMinCharsPSK": 64,
"NewMaxCharsPSK": 64,
"NewAllowedCharsPSK": "0123456789ABCDEFabcdef",
}
},
"WLANConfiguration2": {
"GetInfo": {
"NewEnable": True,
"NewStatus": "Up",
"NewMaxBitRate": "Auto",
"NewChannel": 52,
"NewSSID": "WiFi2",
"NewBeaconType": "11iandWPA3",
"NewX_AVM-DE_PossibleBeaconTypes": "None,11i,11iandWPA3",
"NewMACAddressControlEnabled": False,
"NewStandard": "ax",
"NewBSSID": "1C:ED:6F:12:34:13",
"NewBasicEncryptionModes": "None",
"NewBasicAuthenticationMode": "None",
"NewMaxCharsSSID": 32,
"NewMinCharsSSID": 1,
"NewAllowedCharsSSID": "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~",
"NewMinCharsPSK": 64,
"NewMaxCharsPSK": 64,
"NewAllowedCharsPSK": "0123456789ABCDEFabcdef",
}
},
}
MOCK_WLANCONFIGS_DIFF2_SSID: dict[str, dict] = {
"WLANConfiguration1": {
"GetInfo": {
"NewEnable": True,
"NewStatus": "Up",
"NewMaxBitRate": "Auto",
"NewChannel": 13,
"NewSSID": "WiFi",
"NewBeaconType": "11iandWPA3",
"NewX_AVM-DE_PossibleBeaconTypes": "None,11i,11iandWPA3",
"NewMACAddressControlEnabled": False,
"NewStandard": "ax",
"NewBSSID": "1C:ED:6F:12:34:12",
"NewBasicEncryptionModes": "None",
"NewBasicAuthenticationMode": "None",
"NewMaxCharsSSID": 32,
"NewMinCharsSSID": 1,
"NewAllowedCharsSSID": "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~",
"NewMinCharsPSK": 64,
"NewMaxCharsPSK": 64,
"NewAllowedCharsPSK": "0123456789ABCDEFabcdef",
}
},
"WLANConfiguration2": {
"GetInfo": {
"NewEnable": True,
"NewStatus": "Up",
"NewMaxBitRate": "Auto",
"NewChannel": 52,
"NewSSID": "WiFi+",
"NewBeaconType": "11iandWPA3",
"NewX_AVM-DE_PossibleBeaconTypes": "None,11i,11iandWPA3",
"NewMACAddressControlEnabled": False,
"NewStandard": "ax",
"NewBSSID": "1C:ED:6F:12:34:13",
"NewBasicEncryptionModes": "None",
"NewBasicAuthenticationMode": "None",
"NewMaxCharsSSID": 32,
"NewMinCharsSSID": 1,
"NewAllowedCharsSSID": "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~",
"NewMinCharsPSK": 64,
"NewMaxCharsPSK": 64,
"NewAllowedCharsPSK": "0123456789ABCDEFabcdef",
}
},
}
@pytest.mark.parametrize(
"fc_data, expected_wifi_names",
[
(
{**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_SAME_SSID},
["WiFi (2.4Ghz)", "WiFi (5Ghz)"],
),
({**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_DIFF_SSID}, ["WiFi", "WiFi2"]),
(
{**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_DIFF2_SSID},
["WiFi (2.4Ghz)", "WiFi+ (5Ghz)"],
),
],
)
async def test_switch_setup(
hass: HomeAssistant,
expected_wifi_names: list[str],
fc_class_mock,
fh_class_mock,
):
"""Test setup of Fritz!Tools switches."""
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA)
entry.add_to_hass(hass)
assert await async_setup_component(hass, DOMAIN, {})
await hass.async_block_till_done()
assert entry.state == ConfigEntryState.LOADED
switches = hass.states.async_all(Platform.SWITCH)
assert len(switches) == 3
assert switches[0].name == f"Mock Title Wi-Fi {expected_wifi_names[0]}"
assert switches[1].name == f"Mock Title Wi-Fi {expected_wifi_names[1]}"
assert switches[2].name == "printer Internet Access"

View File

@@ -1,4 +1,4 @@
"""The tests for the Fritzbox update entity."""
"""Tests for Fritz!Tools update platform."""
from unittest.mock import patch

View File

@@ -68,7 +68,6 @@ def mock_humanify(hass_, rows):
return list(
processor._humanify(
rows,
None,
ent_reg,
logbook_run,
context_augmenter,

View File

@@ -745,6 +745,12 @@ async def test_filter_continuous_sensor_values(
entity_id_third = "light.bla"
hass.states.async_set(entity_id_third, STATE_OFF, {"unit_of_measurement": "foo"})
hass.states.async_set(entity_id_third, STATE_ON, {"unit_of_measurement": "foo"})
entity_id_proximity = "proximity.bla"
hass.states.async_set(entity_id_proximity, STATE_OFF)
hass.states.async_set(entity_id_proximity, STATE_ON)
entity_id_counter = "counter.bla"
hass.states.async_set(entity_id_counter, STATE_OFF)
hass.states.async_set(entity_id_counter, STATE_ON)
await async_wait_recording_done(hass)

View File

@@ -27,8 +27,8 @@ from homeassistant.const import (
STATE_OFF,
STATE_ON,
)
from homeassistant.core import Event, HomeAssistant, State
from homeassistant.helpers import device_registry
from homeassistant.core import Event, HomeAssistant, State, callback
from homeassistant.helpers import device_registry, entity_registry
from homeassistant.helpers.entityfilter import CONF_ENTITY_GLOBS
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
@@ -51,22 +51,8 @@ def set_utc(hass):
hass.config.set_time_zone("UTC")
async def _async_mock_device_with_logbook_platform(hass):
"""Mock an integration that provides a device that are described by the logbook."""
entry = MockConfigEntry(domain="test", data={"first": True}, options=None)
entry.add_to_hass(hass)
dev_reg = device_registry.async_get(hass)
device = dev_reg.async_get_or_create(
config_entry_id=entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
sw_version="sw-version",
name="device name",
manufacturer="manufacturer",
model="model",
suggested_area="Game Room",
)
@callback
async def _async_mock_logbook_platform(hass: HomeAssistant) -> None:
class MockLogbookPlatform:
"""Mock a logbook platform."""
@@ -90,6 +76,40 @@ async def _async_mock_device_with_logbook_platform(hass):
async_describe_event("test", "mock_event", async_describe_test_event)
await logbook._process_logbook_platform(hass, "test", MockLogbookPlatform)
async def _async_mock_entity_with_logbook_platform(hass):
"""Mock an integration that provides an entity that are described by the logbook."""
entry = MockConfigEntry(domain="test", data={"first": True}, options=None)
entry.add_to_hass(hass)
ent_reg = entity_registry.async_get(hass)
entry = ent_reg.async_get_or_create(
platform="test",
domain="sensor",
config_entry=entry,
unique_id="1234",
suggested_object_id="test",
)
await _async_mock_logbook_platform(hass)
return entry
async def _async_mock_device_with_logbook_platform(hass):
"""Mock an integration that provides a device that are described by the logbook."""
entry = MockConfigEntry(domain="test", data={"first": True}, options=None)
entry.add_to_hass(hass)
dev_reg = device_registry.async_get(hass)
device = dev_reg.async_get_or_create(
config_entry_id=entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
sw_version="sw-version",
name="device name",
manufacturer="manufacturer",
model="model",
suggested_area="Game Room",
)
await _async_mock_logbook_platform(hass)
return device
@@ -1786,6 +1806,103 @@ async def test_event_stream_bad_start_time(hass, hass_ws_client, recorder_mock):
assert response["error"]["code"] == "invalid_start_time"
@patch("homeassistant.components.logbook.websocket_api.EVENT_COALESCE_TIME", 0)
async def test_logbook_stream_match_multiple_entities(
hass, recorder_mock, hass_ws_client
):
"""Test logbook stream with a described integration that uses multiple entities."""
now = dt_util.utcnow()
await asyncio.gather(
*[
async_setup_component(hass, comp, {})
for comp in ("homeassistant", "logbook", "automation", "script")
]
)
entry = await _async_mock_entity_with_logbook_platform(hass)
entity_id = entry.entity_id
hass.states.async_set(entity_id, STATE_ON)
await hass.async_block_till_done()
init_count = sum(hass.bus.async_listeners().values())
await async_wait_recording_done(hass)
websocket_client = await hass_ws_client()
await websocket_client.send_json(
{
"id": 7,
"type": "logbook/event_stream",
"start_time": now.isoformat(),
"entity_ids": [entity_id],
}
)
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
assert msg["id"] == 7
assert msg["type"] == TYPE_RESULT
assert msg["success"]
# There are no answers to our initial query
# so we get an empty reply. This is to ensure
# consumers of the api know there are no results
# and its not a failure case. This is useful
# in the frontend so we can tell the user there
# are no results vs waiting for them to appear
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
assert msg["id"] == 7
assert msg["type"] == "event"
assert msg["event"]["events"] == []
await async_wait_recording_done(hass)
hass.states.async_set("binary_sensor.should_not_appear", STATE_ON)
hass.states.async_set("binary_sensor.should_not_appear", STATE_OFF)
context = core.Context(
id="ac5bd62de45711eaaeb351041eec8dd9",
user_id="b400facee45711eaa9308bfd3d19e474",
)
hass.bus.async_fire(
"mock_event", {"entity_id": ["sensor.any", entity_id]}, context=context
)
hass.bus.async_fire("mock_event", {"entity_id": [f"sensor.any,{entity_id}"]})
hass.bus.async_fire("mock_event", {"entity_id": ["sensor.no_match", "light.off"]})
hass.states.async_set(entity_id, STATE_OFF, context=context)
await hass.async_block_till_done()
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
assert msg["id"] == 7
assert msg["type"] == "event"
assert msg["event"]["events"] == [
{
"context_user_id": "b400facee45711eaa9308bfd3d19e474",
"domain": "test",
"message": "is on fire",
"name": "device name",
"when": ANY,
},
{
"context_domain": "test",
"context_event_type": "mock_event",
"context_message": "is on fire",
"context_name": "device name",
"context_user_id": "b400facee45711eaa9308bfd3d19e474",
"entity_id": "sensor.test",
"state": "off",
"when": ANY,
},
]
await websocket_client.send_json(
{"id": 8, "type": "unsubscribe_events", "subscription": 7}
)
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
assert msg["id"] == 8
assert msg["type"] == TYPE_RESULT
assert msg["success"]
# Check our listener got unsubscribed
assert sum(hass.bus.async_listeners().values()) == init_count
async def test_event_stream_bad_end_time(hass, hass_ws_client, recorder_mock):
"""Test event_stream bad end time."""
await async_setup_component(hass, "logbook", {})
@@ -2092,7 +2209,9 @@ async def test_recorder_is_far_behind(hass, recorder_mock, hass_ws_client, caplo
@patch("homeassistant.components.logbook.websocket_api.EVENT_COALESCE_TIME", 0)
async def test_subscribe_all_entities_have_uom(hass, recorder_mock, hass_ws_client):
async def test_subscribe_all_entities_are_continuous(
hass, recorder_mock, hass_ws_client
):
"""Test subscribe/unsubscribe logbook stream with entities that are always filtered."""
now = dt_util.utcnow()
await asyncio.gather(
@@ -2102,11 +2221,19 @@ async def test_subscribe_all_entities_have_uom(hass, recorder_mock, hass_ws_clie
]
)
await async_wait_recording_done(hass)
entity_ids = ("sensor.uom", "sensor.uom_two")
def _cycle_entities():
for entity_id in entity_ids:
for state in ("1", "2", "3"):
hass.states.async_set(
entity_id, state, {ATTR_UNIT_OF_MEASUREMENT: "any"}
)
hass.states.async_set("counter.any", state)
hass.states.async_set("proximity.any", state)
init_count = sum(hass.bus.async_listeners().values())
hass.states.async_set("sensor.uom", "1", {ATTR_UNIT_OF_MEASUREMENT: "any"})
hass.states.async_set("sensor.uom", "2", {ATTR_UNIT_OF_MEASUREMENT: "any"})
hass.states.async_set("sensor.uom", "3", {ATTR_UNIT_OF_MEASUREMENT: "any"})
_cycle_entities()
await async_wait_recording_done(hass)
websocket_client = await hass_ws_client()
@@ -2115,7 +2242,7 @@ async def test_subscribe_all_entities_have_uom(hass, recorder_mock, hass_ws_clie
"id": 7,
"type": "logbook/event_stream",
"start_time": now.isoformat(),
"entity_ids": ["sensor.uom"],
"entity_ids": ["sensor.uom", "counter.any", "proximity.any"],
}
)
@@ -2124,9 +2251,61 @@ async def test_subscribe_all_entities_have_uom(hass, recorder_mock, hass_ws_clie
assert msg["type"] == TYPE_RESULT
assert msg["success"]
hass.states.async_set("sensor.uom", "1", {ATTR_UNIT_OF_MEASUREMENT: "any"})
hass.states.async_set("sensor.uom", "2", {ATTR_UNIT_OF_MEASUREMENT: "any"})
hass.states.async_set("sensor.uom", "3", {ATTR_UNIT_OF_MEASUREMENT: "any"})
_cycle_entities()
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
assert msg["id"] == 7
assert msg["type"] == "event"
assert msg["event"]["events"] == []
await websocket_client.close()
await hass.async_block_till_done()
# Check our listener got unsubscribed
assert sum(hass.bus.async_listeners().values()) == init_count
@patch("homeassistant.components.logbook.websocket_api.EVENT_COALESCE_TIME", 0)
async def test_subscribe_all_entities_have_uom_multiple(
hass, recorder_mock, hass_ws_client
):
"""Test logbook stream with specific request for multiple entities that are always filtered."""
now = dt_util.utcnow()
await asyncio.gather(
*[
async_setup_component(hass, comp, {})
for comp in ("homeassistant", "logbook", "automation", "script")
]
)
await async_wait_recording_done(hass)
entity_ids = ("sensor.uom", "sensor.uom_two")
def _cycle_entities():
for entity_id in entity_ids:
for state in ("1", "2", "3"):
hass.states.async_set(
entity_id, state, {ATTR_UNIT_OF_MEASUREMENT: "any"}
)
init_count = sum(hass.bus.async_listeners().values())
_cycle_entities()
await async_wait_recording_done(hass)
websocket_client = await hass_ws_client()
await websocket_client.send_json(
{
"id": 7,
"type": "logbook/event_stream",
"start_time": now.isoformat(),
"entity_ids": [*entity_ids],
}
)
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
assert msg["id"] == 7
assert msg["type"] == TYPE_RESULT
assert msg["success"]
_cycle_entities()
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
assert msg["id"] == 7
@@ -2138,3 +2317,90 @@ async def test_subscribe_all_entities_have_uom(hass, recorder_mock, hass_ws_clie
# Check our listener got unsubscribed
assert sum(hass.bus.async_listeners().values()) == init_count
@patch("homeassistant.components.logbook.websocket_api.EVENT_COALESCE_TIME", 0)
async def test_subscribe_entities_some_have_uom_multiple(
hass, recorder_mock, hass_ws_client
):
"""Test logbook stream with uom filtered entities and non-fitlered entities."""
now = dt_util.utcnow()
await asyncio.gather(
*[
async_setup_component(hass, comp, {})
for comp in ("homeassistant", "logbook", "automation", "script")
]
)
await async_wait_recording_done(hass)
filtered_entity_ids = ("sensor.uom", "sensor.uom_two")
non_filtered_entity_ids = ("sensor.keep", "sensor.keep_two")
def _cycle_entities():
for entity_id in filtered_entity_ids:
for state in ("1", "2", "3"):
hass.states.async_set(
entity_id, state, {ATTR_UNIT_OF_MEASUREMENT: "any"}
)
for entity_id in non_filtered_entity_ids:
for state in (STATE_ON, STATE_OFF):
hass.states.async_set(entity_id, state)
init_count = sum(hass.bus.async_listeners().values())
_cycle_entities()
await async_wait_recording_done(hass)
websocket_client = await hass_ws_client()
await websocket_client.send_json(
{
"id": 7,
"type": "logbook/event_stream",
"start_time": now.isoformat(),
"entity_ids": [*filtered_entity_ids, *non_filtered_entity_ids],
}
)
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
assert msg["id"] == 7
assert msg["type"] == TYPE_RESULT
assert msg["success"]
_cycle_entities()
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
assert msg["id"] == 7
assert msg["type"] == "event"
assert msg["event"]["partial"] is True
assert msg["event"]["events"] == [
{"entity_id": "sensor.keep", "state": "off", "when": ANY},
{"entity_id": "sensor.keep_two", "state": "off", "when": ANY},
]
_cycle_entities()
await hass.async_block_till_done()
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
assert msg["id"] == 7
assert msg["type"] == "event"
assert msg["event"]["events"] == []
assert "partial" not in msg["event"]
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
assert msg["id"] == 7
assert msg["type"] == "event"
assert msg["event"]["events"] == [
{"entity_id": "sensor.keep", "state": "on", "when": ANY},
{"entity_id": "sensor.keep", "state": "off", "when": ANY},
{"entity_id": "sensor.keep_two", "state": "on", "when": ANY},
{"entity_id": "sensor.keep_two", "state": "off", "when": ANY},
{"entity_id": "sensor.keep", "state": "on", "when": ANY},
{"entity_id": "sensor.keep", "state": "off", "when": ANY},
{"entity_id": "sensor.keep_two", "state": "on", "when": ANY},
{"entity_id": "sensor.keep_two", "state": "off", "when": ANY},
]
assert "partial" not in msg["event"]
await websocket_client.close()
await hass.async_block_till_done()
# Check our listener got unsubscribed
assert sum(hass.bus.async_listeners().values()) == init_count

View File

@@ -878,3 +878,32 @@ async def test_get_full_significant_states_handles_empty_last_changed(
assert db_sensor_one_states[0].last_updated is not None
assert db_sensor_one_states[1].last_updated is not None
assert db_sensor_one_states[0].last_updated != db_sensor_one_states[1].last_updated
def test_state_changes_during_period_multiple_entities_single_test(hass_recorder):
"""Test state change during period with multiple entities in the same test.
This test ensures the sqlalchemy query cache does not
generate incorrect results.
"""
hass = hass_recorder()
start = dt_util.utcnow()
test_entites = {f"sensor.{i}": str(i) for i in range(30)}
for entity_id, value in test_entites.items():
hass.states.set(entity_id, value)
wait_recording_done(hass)
end = dt_util.utcnow()
hist = history.state_changes_during_period(hass, start, end, None)
for entity_id, value in test_entites.items():
hist[entity_id][0].state == value
for entity_id, value in test_entites.items():
hist = history.state_changes_during_period(hass, start, end, entity_id)
assert len(hist) == 1
hist[entity_id][0].state == value
hist = history.state_changes_during_period(hass, start, end, None)
for entity_id, value in test_entites.items():
hist[entity_id][0].state == value

View File

@@ -100,6 +100,15 @@ def test_compile_hourly_statistics(hass_recorder):
stats = statistics_during_period(hass, zero, period="5minute")
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
# Test statistics_during_period with a far future start and end date
future = dt_util.as_utc(dt_util.parse_datetime("2221-11-01 00:00:00"))
stats = statistics_during_period(hass, future, end_time=future, period="5minute")
assert stats == {}
# Test statistics_during_period with a far future end date
stats = statistics_during_period(hass, zero, end_time=future, period="5minute")
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
stats = statistics_during_period(
hass, zero, statistic_ids=["sensor.test2"], period="5minute"
)
@@ -814,6 +823,59 @@ def test_monthly_statistics(hass_recorder, caplog, timezone):
]
}
stats = statistics_during_period(
hass,
start_time=zero,
statistic_ids=["not", "the", "same", "test:total_energy_import"],
period="month",
)
sep_start = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
sep_end = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
oct_start = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
oct_end = dt_util.as_utc(dt_util.parse_datetime("2021-11-01 00:00:00"))
assert stats == {
"test:total_energy_import": [
{
"statistic_id": "test:total_energy_import",
"start": sep_start.isoformat(),
"end": sep_end.isoformat(),
"max": None,
"mean": None,
"min": None,
"last_reset": None,
"state": approx(1.0),
"sum": approx(3.0),
},
{
"statistic_id": "test:total_energy_import",
"start": oct_start.isoformat(),
"end": oct_end.isoformat(),
"max": None,
"mean": None,
"min": None,
"last_reset": None,
"state": approx(3.0),
"sum": approx(5.0),
},
]
}
# Use 5minute to ensure table switch works
stats = statistics_during_period(
hass,
start_time=zero,
statistic_ids=["test:total_energy_import", "with_other"],
period="5minute",
)
assert stats == {}
# Ensure future date has not data
future = dt_util.as_utc(dt_util.parse_datetime("2221-11-01 00:00:00"))
stats = statistics_during_period(
hass, start_time=future, end_time=future, period="month"
)
assert stats == {}
dt_util.set_default_time_zone(dt_util.get_time_zone("UTC"))

View File

@@ -9,7 +9,6 @@ from sqlalchemy import text
from sqlalchemy.engine.result import ChunkedIteratorResult
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.sql.elements import TextClause
from sqlalchemy.sql.lambdas import StatementLambdaElement
from homeassistant.components import recorder
from homeassistant.components.recorder import history, util
@@ -712,8 +711,8 @@ def test_build_mysqldb_conv():
@patch("homeassistant.components.recorder.util.QUERY_RETRY_WAIT", 0)
def test_execute_stmt_lambda_element(hass_recorder):
"""Test executing with execute_stmt_lambda_element."""
def test_execute_stmt(hass_recorder):
"""Test executing with execute_stmt."""
hass = hass_recorder()
instance = recorder.get_instance(hass)
hass.states.set("sensor.on", "on")
@@ -724,13 +723,15 @@ def test_execute_stmt_lambda_element(hass_recorder):
one_week_from_now = now + timedelta(days=7)
class MockExecutor:
_calls = 0
def __init__(self, stmt):
assert isinstance(stmt, StatementLambdaElement)
self.calls = 0
"""Init the mock."""
def all(self):
self.calls += 1
if self.calls == 2:
MockExecutor._calls += 1
if MockExecutor._calls == 2:
return ["mock_row"]
raise SQLAlchemyError
@@ -739,24 +740,24 @@ def test_execute_stmt_lambda_element(hass_recorder):
stmt = history._get_single_entity_states_stmt(
instance.schema_version, dt_util.utcnow(), "sensor.on", False
)
rows = util.execute_stmt_lambda_element(session, stmt)
rows = util.execute_stmt(session, stmt)
assert isinstance(rows, list)
assert rows[0].state == new_state.state
assert rows[0].entity_id == new_state.entity_id
# Time window >= 2 days, we get a ChunkedIteratorResult
rows = util.execute_stmt_lambda_element(session, stmt, now, one_week_from_now)
rows = util.execute_stmt(session, stmt, now, one_week_from_now)
assert isinstance(rows, ChunkedIteratorResult)
row = next(rows)
assert row.state == new_state.state
assert row.entity_id == new_state.entity_id
# Time window < 2 days, we get a list
rows = util.execute_stmt_lambda_element(session, stmt, now, tomorrow)
rows = util.execute_stmt(session, stmt, now, tomorrow)
assert isinstance(rows, list)
assert rows[0].state == new_state.state
assert rows[0].entity_id == new_state.entity_id
with patch.object(session, "execute", MockExecutor):
rows = util.execute_stmt_lambda_element(session, stmt, now, tomorrow)
rows = util.execute_stmt(session, stmt, now, tomorrow)
assert rows == ["mock_row"]

View File

@@ -26,7 +26,7 @@ from homeassistant.components.wallbox.const import (
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from .const import ERROR, JWT, STATUS, TTL, USER_ID
from .const import ERROR, STATUS, TTL, USER_ID
from tests.common import MockConfigEntry
@@ -54,11 +54,32 @@ test_response = json.loads(
authorisation_response = json.loads(
json.dumps(
{
JWT: "fakekeyhere",
USER_ID: 12345,
TTL: 145656758,
ERROR: "false",
STATUS: 200,
"data": {
"attributes": {
"token": "fakekeyhere",
USER_ID: 12345,
TTL: 145656758,
ERROR: "false",
STATUS: 200,
}
}
}
)
)
authorisation_response_unauthorised = json.loads(
json.dumps(
{
"data": {
"attributes": {
"token": "fakekeyhere",
USER_ID: 12345,
TTL: 145656758,
ERROR: "false",
STATUS: 404,
}
}
}
)
)
@@ -81,7 +102,7 @@ async def setup_integration(hass: HomeAssistant) -> None:
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
"https://user-api.wall-box.com/users/signin",
json=authorisation_response,
status_code=HTTPStatus.OK,
)
@@ -107,7 +128,7 @@ async def setup_integration_connection_error(hass: HomeAssistant) -> None:
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
"https://user-api.wall-box.com/users/signin",
json=authorisation_response,
status_code=HTTPStatus.FORBIDDEN,
)
@@ -133,7 +154,7 @@ async def setup_integration_read_only(hass: HomeAssistant) -> None:
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
"https://user-api.wall-box.com/users/signin",
json=authorisation_response,
status_code=HTTPStatus.OK,
)

View File

@@ -18,8 +18,12 @@ from homeassistant.components.wallbox.const import (
)
from homeassistant.core import HomeAssistant
from tests.components.wallbox import entry, setup_integration
from tests.components.wallbox.const import ERROR, JWT, STATUS, TTL, USER_ID
from tests.components.wallbox import (
authorisation_response,
authorisation_response_unauthorised,
entry,
setup_integration,
)
test_response = json.loads(
json.dumps(
@@ -34,30 +38,6 @@ test_response = json.loads(
)
)
authorisation_response = json.loads(
json.dumps(
{
JWT: "fakekeyhere",
USER_ID: 12345,
TTL: 145656758,
ERROR: "false",
STATUS: 200,
}
)
)
authorisation_response_unauthorised = json.loads(
json.dumps(
{
JWT: "fakekeyhere",
USER_ID: 12345,
TTL: 145656758,
ERROR: "false",
STATUS: 404,
}
)
)
async def test_show_set_form(hass: HomeAssistant) -> None:
"""Test that the setup form is served."""
@@ -77,7 +57,7 @@ async def test_form_cannot_authenticate(hass: HomeAssistant) -> None:
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
"https://user-api.wall-box.com/users/signin",
json=authorisation_response,
status_code=HTTPStatus.FORBIDDEN,
)
@@ -107,7 +87,7 @@ async def test_form_cannot_connect(hass: HomeAssistant) -> None:
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
"https://user-api.wall-box.com/users/signin",
json=authorisation_response_unauthorised,
status_code=HTTPStatus.NOT_FOUND,
)
@@ -137,7 +117,7 @@ async def test_form_validate_input(hass: HomeAssistant) -> None:
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
"https://user-api.wall-box.com/users/signin",
json=authorisation_response,
status_code=HTTPStatus.OK,
)
@@ -166,8 +146,8 @@ async def test_form_reauth(hass: HomeAssistant) -> None:
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
text='{"jwt":"fakekeyhere","user_id":12345,"ttl":145656758,"error":false,"status":200}',
"https://user-api.wall-box.com/users/signin",
json=authorisation_response,
status_code=200,
)
mock_request.get(
@@ -206,7 +186,7 @@ async def test_form_reauth_invalid(hass: HomeAssistant) -> None:
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
"https://user-api.wall-box.com/users/signin",
text='{"jwt":"fakekeyhere","user_id":12345,"ttl":145656758,"error":false,"status":200}',
status_code=200,
)

View File

@@ -11,24 +11,12 @@ from . import test_response
from tests.components.wallbox import (
DOMAIN,
authorisation_response,
entry,
setup_integration,
setup_integration_connection_error,
setup_integration_read_only,
)
from tests.components.wallbox.const import ERROR, JWT, STATUS, TTL, USER_ID
authorisation_response = json.loads(
json.dumps(
{
JWT: "fakekeyhere",
USER_ID: 12345,
TTL: 145656758,
ERROR: "false",
STATUS: 200,
}
)
)
async def test_wallbox_setup_unload_entry(hass: HomeAssistant) -> None:
@@ -59,7 +47,7 @@ async def test_wallbox_refresh_failed_invalid_auth(hass: HomeAssistant) -> None:
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
"https://user-api.wall-box.com/users/signin",
json=authorisation_response,
status_code=403,
)
@@ -85,7 +73,7 @@ async def test_wallbox_refresh_failed_connection_error(hass: HomeAssistant) -> N
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
"https://user-api.wall-box.com/users/signin",
json=authorisation_response,
status_code=200,
)

View File

@@ -10,30 +10,12 @@ from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import HomeAssistant
from tests.components.wallbox import (
authorisation_response,
entry,
setup_integration,
setup_integration_read_only,
)
from tests.components.wallbox.const import (
ERROR,
JWT,
MOCK_LOCK_ENTITY_ID,
STATUS,
TTL,
USER_ID,
)
authorisation_response = json.loads(
json.dumps(
{
JWT: "fakekeyhere",
USER_ID: 12345,
TTL: 145656758,
ERROR: "false",
STATUS: 200,
}
)
)
from tests.components.wallbox.const import MOCK_LOCK_ENTITY_ID
async def test_wallbox_lock_class(hass: HomeAssistant) -> None:
@@ -47,7 +29,7 @@ async def test_wallbox_lock_class(hass: HomeAssistant) -> None:
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
"https://user-api.wall-box.com/users/signin",
json=authorisation_response,
status_code=200,
)
@@ -85,7 +67,7 @@ async def test_wallbox_lock_class_connection_error(hass: HomeAssistant) -> None:
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
"https://user-api.wall-box.com/users/signin",
json=authorisation_response,
status_code=200,
)

View File

@@ -9,27 +9,8 @@ from homeassistant.components.wallbox import CHARGER_MAX_CHARGING_CURRENT_KEY
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import HomeAssistant
from tests.components.wallbox import entry, setup_integration
from tests.components.wallbox.const import (
ERROR,
JWT,
MOCK_NUMBER_ENTITY_ID,
STATUS,
TTL,
USER_ID,
)
authorisation_response = json.loads(
json.dumps(
{
JWT: "fakekeyhere",
USER_ID: 12345,
TTL: 145656758,
ERROR: "false",
STATUS: 200,
}
)
)
from tests.components.wallbox import authorisation_response, entry, setup_integration
from tests.components.wallbox.const import MOCK_NUMBER_ENTITY_ID
async def test_wallbox_number_class(hass: HomeAssistant) -> None:
@@ -39,7 +20,7 @@ async def test_wallbox_number_class(hass: HomeAssistant) -> None:
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
"https://user-api.wall-box.com/users/signin",
json=authorisation_response,
status_code=200,
)
@@ -68,7 +49,7 @@ async def test_wallbox_number_class_connection_error(hass: HomeAssistant) -> Non
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
"https://user-api.wall-box.com/users/signin",
json=authorisation_response,
status_code=200,
)

View File

@@ -10,27 +10,8 @@ from homeassistant.components.wallbox.const import CHARGER_STATUS_ID_KEY
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import HomeAssistant
from tests.components.wallbox import entry, setup_integration
from tests.components.wallbox.const import (
ERROR,
JWT,
MOCK_SWITCH_ENTITY_ID,
STATUS,
TTL,
USER_ID,
)
authorisation_response = json.loads(
json.dumps(
{
JWT: "fakekeyhere",
USER_ID: 12345,
TTL: 145656758,
ERROR: "false",
STATUS: 200,
}
)
)
from tests.components.wallbox import authorisation_response, entry, setup_integration
from tests.components.wallbox.const import MOCK_SWITCH_ENTITY_ID
async def test_wallbox_switch_class(hass: HomeAssistant) -> None:
@@ -44,7 +25,7 @@ async def test_wallbox_switch_class(hass: HomeAssistant) -> None:
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
"https://user-api.wall-box.com/users/signin",
json=authorisation_response,
status_code=200,
)
@@ -82,7 +63,7 @@ async def test_wallbox_switch_class_connection_error(hass: HomeAssistant) -> Non
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
"https://user-api.wall-box.com/users/signin",
json=authorisation_response,
status_code=200,
)
@@ -121,7 +102,7 @@ async def test_wallbox_switch_class_authentication_error(hass: HomeAssistant) ->
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
"https://user-api.wall-box.com/users/signin",
json=authorisation_response,
status_code=200,
)